hash
stringlengths
64
64
content
stringlengths
0
1.51M
78879c629cc001785334572cf3eb9681a7d678301540658d92dafc7582870a97
import ctypes import faulthandler import io import itertools import logging import multiprocessing import os import pickle import sys import textwrap import unittest from importlib import import_module from io import StringIO from django.core.management import call_command from django.db import connections from django.test import SimpleTestCase, TestCase from django.test.utils import ( NullTimeKeeper, TimeKeeper, iter_test_cases, setup_databases as _setup_databases, setup_test_environment, teardown_databases as _teardown_databases, teardown_test_environment, ) from django.utils.datastructures import OrderedSet try: import ipdb as pdb except ImportError: import pdb try: import tblib.pickling_support except ImportError: tblib = None class DebugSQLTextTestResult(unittest.TextTestResult): def __init__(self, stream, descriptions, verbosity): self.logger = logging.getLogger('django.db.backends') self.logger.setLevel(logging.DEBUG) self.debug_sql_stream = None super().__init__(stream, descriptions, verbosity) def startTest(self, test): self.debug_sql_stream = StringIO() self.handler = logging.StreamHandler(self.debug_sql_stream) self.logger.addHandler(self.handler) super().startTest(test) def stopTest(self, test): super().stopTest(test) self.logger.removeHandler(self.handler) if self.showAll: self.debug_sql_stream.seek(0) self.stream.write(self.debug_sql_stream.read()) self.stream.writeln(self.separator2) def addError(self, test, err): super().addError(test, err) if self.debug_sql_stream is None: # Error before tests e.g. in setUpTestData(). sql = '' else: self.debug_sql_stream.seek(0) sql = self.debug_sql_stream.read() self.errors[-1] = self.errors[-1] + (sql,) def addFailure(self, test, err): super().addFailure(test, err) self.debug_sql_stream.seek(0) self.failures[-1] = self.failures[-1] + (self.debug_sql_stream.read(),) def addSubTest(self, test, subtest, err): super().addSubTest(test, subtest, err) if err is not None: self.debug_sql_stream.seek(0) errors = self.failures if issubclass(err[0], test.failureException) else self.errors errors[-1] = errors[-1] + (self.debug_sql_stream.read(),) def printErrorList(self, flavour, errors): for test, err, sql_debug in errors: self.stream.writeln(self.separator1) self.stream.writeln("%s: %s" % (flavour, self.getDescription(test))) self.stream.writeln(self.separator2) self.stream.writeln(err) self.stream.writeln(self.separator2) self.stream.writeln(sql_debug) class PDBDebugResult(unittest.TextTestResult): """ Custom result class that triggers a PDB session when an error or failure occurs. """ def addError(self, test, err): super().addError(test, err) self.debug(err) def addFailure(self, test, err): super().addFailure(test, err) self.debug(err) def debug(self, error): self._restoreStdout() self.buffer = False exc_type, exc_value, traceback = error print("\nOpening PDB: %r" % exc_value) pdb.post_mortem(traceback) class DummyList: """ Dummy list class for faking storage of results in unittest.TestResult. """ __slots__ = () def append(self, item): pass class RemoteTestResult(unittest.TestResult): """ Extend unittest.TestResult to record events in the child processes so they can be replayed in the parent process. Events include things like which tests succeeded or failed. """ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # Fake storage of results to reduce memory usage. These are used by the # unittest default methods, but here 'events' is used instead. dummy_list = DummyList() self.failures = dummy_list self.errors = dummy_list self.skipped = dummy_list self.expectedFailures = dummy_list self.unexpectedSuccesses = dummy_list if tblib is not None: tblib.pickling_support.install() self.events = [] def __getstate__(self): # Make this class picklable by removing the file-like buffer # attributes. This is possible since they aren't used after unpickling # after being sent to ParallelTestSuite. state = self.__dict__.copy() state.pop('_stdout_buffer', None) state.pop('_stderr_buffer', None) state.pop('_original_stdout', None) state.pop('_original_stderr', None) return state @property def test_index(self): return self.testsRun - 1 def _confirm_picklable(self, obj): """ Confirm that obj can be pickled and unpickled as multiprocessing will need to pickle the exception in the child process and unpickle it in the parent process. Let the exception rise, if not. """ pickle.loads(pickle.dumps(obj)) def _print_unpicklable_subtest(self, test, subtest, pickle_exc): print(""" Subtest failed: test: {} subtest: {} Unfortunately, the subtest that failed cannot be pickled, so the parallel test runner cannot handle it cleanly. Here is the pickling error: > {} You should re-run this test with --parallel=1 to reproduce the failure with a cleaner failure message. """.format(test, subtest, pickle_exc)) def check_picklable(self, test, err): # Ensure that sys.exc_info() tuples are picklable. This displays a # clear multiprocessing.pool.RemoteTraceback generated in the child # process instead of a multiprocessing.pool.MaybeEncodingError, making # the root cause easier to figure out for users who aren't familiar # with the multiprocessing module. Since we're in a forked process, # our best chance to communicate with them is to print to stdout. try: self._confirm_picklable(err) except Exception as exc: original_exc_txt = repr(err[1]) original_exc_txt = textwrap.fill(original_exc_txt, 75, initial_indent=' ', subsequent_indent=' ') pickle_exc_txt = repr(exc) pickle_exc_txt = textwrap.fill(pickle_exc_txt, 75, initial_indent=' ', subsequent_indent=' ') if tblib is None: print(""" {} failed: {} Unfortunately, tracebacks cannot be pickled, making it impossible for the parallel test runner to handle this exception cleanly. In order to see the traceback, you should install tblib: python -m pip install tblib """.format(test, original_exc_txt)) else: print(""" {} failed: {} Unfortunately, the exception it raised cannot be pickled, making it impossible for the parallel test runner to handle it cleanly. Here's the error encountered while trying to pickle the exception: {} You should re-run this test with the --parallel=1 option to reproduce the failure and get a correct traceback. """.format(test, original_exc_txt, pickle_exc_txt)) raise def check_subtest_picklable(self, test, subtest): try: self._confirm_picklable(subtest) except Exception as exc: self._print_unpicklable_subtest(test, subtest, exc) raise def startTestRun(self): super().startTestRun() self.events.append(('startTestRun',)) def stopTestRun(self): super().stopTestRun() self.events.append(('stopTestRun',)) def startTest(self, test): super().startTest(test) self.events.append(('startTest', self.test_index)) def stopTest(self, test): super().stopTest(test) self.events.append(('stopTest', self.test_index)) def addError(self, test, err): self.check_picklable(test, err) self.events.append(('addError', self.test_index, err)) super().addError(test, err) def addFailure(self, test, err): self.check_picklable(test, err) self.events.append(('addFailure', self.test_index, err)) super().addFailure(test, err) def addSubTest(self, test, subtest, err): # Follow Python's implementation of unittest.TestResult.addSubTest() by # not doing anything when a subtest is successful. if err is not None: # Call check_picklable() before check_subtest_picklable() since # check_picklable() performs the tblib check. self.check_picklable(test, err) self.check_subtest_picklable(test, subtest) self.events.append(('addSubTest', self.test_index, subtest, err)) super().addSubTest(test, subtest, err) def addSuccess(self, test): self.events.append(('addSuccess', self.test_index)) super().addSuccess(test) def addSkip(self, test, reason): self.events.append(('addSkip', self.test_index, reason)) super().addSkip(test, reason) def addExpectedFailure(self, test, err): # If tblib isn't installed, pickling the traceback will always fail. # However we don't want tblib to be required for running the tests # when they pass or fail as expected. Drop the traceback when an # expected failure occurs. if tblib is None: err = err[0], err[1], None self.check_picklable(test, err) self.events.append(('addExpectedFailure', self.test_index, err)) super().addExpectedFailure(test, err) def addUnexpectedSuccess(self, test): self.events.append(('addUnexpectedSuccess', self.test_index)) super().addUnexpectedSuccess(test) def wasSuccessful(self): """Tells whether or not this result was a success.""" failure_types = {'addError', 'addFailure', 'addSubTest', 'addUnexpectedSuccess'} return all(e[0] not in failure_types for e in self.events) def _exc_info_to_string(self, err, test): # Make this method no-op. It only powers the default unittest behavior # for recording errors, but this class pickles errors into 'events' # instead. return '' class RemoteTestRunner: """ Run tests and record everything but don't display anything. The implementation matches the unpythonic coding style of unittest2. """ resultclass = RemoteTestResult def __init__(self, failfast=False, resultclass=None, buffer=False): self.failfast = failfast self.buffer = buffer if resultclass is not None: self.resultclass = resultclass def run(self, test): result = self.resultclass() unittest.registerResult(result) result.failfast = self.failfast result.buffer = self.buffer test(result) return result def default_test_processes(): """Default number of test processes when using the --parallel option.""" # The current implementation of the parallel test runner requires # multiprocessing to start subprocesses with fork(). if multiprocessing.get_start_method() != 'fork': return 1 try: return int(os.environ['DJANGO_TEST_PROCESSES']) except KeyError: return multiprocessing.cpu_count() _worker_id = 0 def _init_worker(counter): """ Switch to databases dedicated to this worker. This helper lives at module-level because of the multiprocessing module's requirements. """ global _worker_id with counter.get_lock(): counter.value += 1 _worker_id = counter.value for alias in connections: connection = connections[alias] settings_dict = connection.creation.get_test_db_clone_settings(str(_worker_id)) # connection.settings_dict must be updated in place for changes to be # reflected in django.db.connections. If the following line assigned # connection.settings_dict = settings_dict, new threads would connect # to the default database instead of the appropriate clone. connection.settings_dict.update(settings_dict) connection.close() def _run_subsuite(args): """ Run a suite of tests with a RemoteTestRunner and return a RemoteTestResult. This helper lives at module-level and its arguments are wrapped in a tuple because of the multiprocessing module's requirements. """ runner_class, subsuite_index, subsuite, failfast, buffer = args runner = runner_class(failfast=failfast, buffer=buffer) result = runner.run(subsuite) return subsuite_index, result.events class ParallelTestSuite(unittest.TestSuite): """ Run a series of tests in parallel in several processes. While the unittest module's documentation implies that orchestrating the execution of tests is the responsibility of the test runner, in practice, it appears that TestRunner classes are more concerned with formatting and displaying test results. Since there are fewer use cases for customizing TestSuite than TestRunner, implementing parallelization at the level of the TestSuite improves interoperability with existing custom test runners. A single instance of a test runner can still collect results from all tests without being aware that they have been run in parallel. """ # In case someone wants to modify these in a subclass. init_worker = _init_worker run_subsuite = _run_subsuite runner_class = RemoteTestRunner def __init__(self, suite, processes, failfast=False, buffer=False): self.subsuites = partition_suite_by_case(suite) self.processes = processes self.failfast = failfast self.buffer = buffer super().__init__() def run(self, result): """ Distribute test cases across workers. Return an identifier of each test case with its result in order to use imap_unordered to show results as soon as they're available. To minimize pickling errors when getting results from workers: - pass back numeric indexes in self.subsuites instead of tests - make tracebacks picklable with tblib, if available Even with tblib, errors may still occur for dynamically created exception classes which cannot be unpickled. """ counter = multiprocessing.Value(ctypes.c_int, 0) pool = multiprocessing.Pool( processes=self.processes, initializer=self.init_worker.__func__, initargs=[counter], ) args = [ (self.runner_class, index, subsuite, self.failfast, self.buffer) for index, subsuite in enumerate(self.subsuites) ] test_results = pool.imap_unordered(self.run_subsuite.__func__, args) while True: if result.shouldStop: pool.terminate() break try: subsuite_index, events = test_results.next(timeout=0.1) except multiprocessing.TimeoutError: continue except StopIteration: pool.close() break tests = list(self.subsuites[subsuite_index]) for event in events: event_name = event[0] handler = getattr(result, event_name, None) if handler is None: continue test = tests[event[1]] args = event[2:] handler(test, *args) pool.join() return result def __iter__(self): return iter(self.subsuites) class DiscoverRunner: """A Django test runner that uses unittest2 test discovery.""" test_suite = unittest.TestSuite parallel_test_suite = ParallelTestSuite test_runner = unittest.TextTestRunner test_loader = unittest.defaultTestLoader reorder_by = (TestCase, SimpleTestCase) def __init__(self, pattern=None, top_level=None, verbosity=1, interactive=True, failfast=False, keepdb=False, reverse=False, debug_mode=False, debug_sql=False, parallel=0, tags=None, exclude_tags=None, test_name_patterns=None, pdb=False, buffer=False, enable_faulthandler=True, timing=False, **kwargs): self.pattern = pattern self.top_level = top_level self.verbosity = verbosity self.interactive = interactive self.failfast = failfast self.keepdb = keepdb self.reverse = reverse self.debug_mode = debug_mode self.debug_sql = debug_sql self.parallel = parallel self.tags = set(tags or []) self.exclude_tags = set(exclude_tags or []) if not faulthandler.is_enabled() and enable_faulthandler: try: faulthandler.enable(file=sys.stderr.fileno()) except (AttributeError, io.UnsupportedOperation): faulthandler.enable(file=sys.__stderr__.fileno()) self.pdb = pdb if self.pdb and self.parallel > 1: raise ValueError('You cannot use --pdb with parallel tests; pass --parallel=1 to use it.') self.buffer = buffer self.test_name_patterns = None self.time_keeper = TimeKeeper() if timing else NullTimeKeeper() if test_name_patterns: # unittest does not export the _convert_select_pattern function # that converts command-line arguments to patterns. self.test_name_patterns = { pattern if '*' in pattern else '*%s*' % pattern for pattern in test_name_patterns } @classmethod def add_arguments(cls, parser): parser.add_argument( '-t', '--top-level-directory', dest='top_level', help='Top level of project for unittest discovery.', ) parser.add_argument( '-p', '--pattern', default="test*.py", help='The test matching pattern. Defaults to test*.py.', ) parser.add_argument( '--keepdb', action='store_true', help='Preserves the test DB between runs.' ) parser.add_argument( '-r', '--reverse', action='store_true', help='Reverses test cases order.', ) parser.add_argument( '--debug-mode', action='store_true', help='Sets settings.DEBUG to True.', ) parser.add_argument( '-d', '--debug-sql', action='store_true', help='Prints logged SQL queries on failure.', ) parser.add_argument( '--parallel', nargs='?', default=1, type=int, const=default_test_processes(), metavar='N', help='Run tests using up to N parallel processes.', ) parser.add_argument( '--tag', action='append', dest='tags', help='Run only tests with the specified tag. Can be used multiple times.', ) parser.add_argument( '--exclude-tag', action='append', dest='exclude_tags', help='Do not run tests with the specified tag. Can be used multiple times.', ) parser.add_argument( '--pdb', action='store_true', help='Runs a debugger (pdb, or ipdb if installed) on error or failure.' ) parser.add_argument( '-b', '--buffer', action='store_true', help='Discard output from passing tests.', ) parser.add_argument( '--no-faulthandler', action='store_false', dest='enable_faulthandler', help='Disables the Python faulthandler module during tests.', ) parser.add_argument( '--timing', action='store_true', help=( 'Output timings, including database set up and total run time.' ), ) parser.add_argument( '-k', action='append', dest='test_name_patterns', help=( 'Only run test methods and classes that match the pattern ' 'or substring. Can be used multiple times. Same as ' 'unittest -k option.' ), ) def setup_test_environment(self, **kwargs): setup_test_environment(debug=self.debug_mode) unittest.installHandler() def build_suite(self, test_labels=None, extra_tests=None, **kwargs): test_labels = test_labels or ['.'] extra_tests = extra_tests or [] self.test_loader.testNamePatterns = self.test_name_patterns discover_kwargs = {} if self.pattern is not None: discover_kwargs['pattern'] = self.pattern if self.top_level is not None: discover_kwargs['top_level_dir'] = self.top_level all_tests = [] for label in test_labels: label_as_path = os.path.abspath(label) tests = None # if a module, or "module.ClassName[.method_name]", just run those if not os.path.exists(label_as_path): tests = self.test_loader.loadTestsFromName(label) # Try discovery if "label" is a package or directory. if not (tests and tests.countTestCases()) and is_discoverable(label): kwargs = discover_kwargs.copy() if os.path.isdir(label_as_path) and not self.top_level: kwargs['top_level_dir'] = find_top_level(label_as_path) tests = self.test_loader.discover(start_dir=label, **kwargs) # Make unittest forget the top-level dir it calculated from this # run, to support running tests from two different top-levels. self.test_loader._top_level_dir = None all_tests.extend(iter_test_cases(tests)) all_tests.extend(iter_test_cases(extra_tests)) if self.tags or self.exclude_tags: if self.verbosity >= 2: if self.tags: print('Including test tag(s): %s.' % ', '.join(sorted(self.tags))) if self.exclude_tags: print('Excluding test tag(s): %s.' % ', '.join(sorted(self.exclude_tags))) all_tests = filter_tests_by_tags(all_tests, self.tags, self.exclude_tags) all_tests = reorder_tests(all_tests, self.reorder_by, self.reverse) suite = self.test_suite(all_tests) if self.parallel > 1: parallel_suite = self.parallel_test_suite( suite, self.parallel, self.failfast, self.buffer, ) # Since tests are distributed across processes on a per-TestCase # basis, there's no need for more processes than TestCases. parallel_units = len(parallel_suite.subsuites) self.parallel = min(self.parallel, parallel_units) # If there's only one TestCase, parallelization isn't needed. if self.parallel > 1: suite = parallel_suite return suite def setup_databases(self, **kwargs): return _setup_databases( self.verbosity, self.interactive, time_keeper=self.time_keeper, keepdb=self.keepdb, debug_sql=self.debug_sql, parallel=self.parallel, **kwargs ) def get_resultclass(self): if self.debug_sql: return DebugSQLTextTestResult elif self.pdb: return PDBDebugResult def get_test_runner_kwargs(self): return { 'failfast': self.failfast, 'resultclass': self.get_resultclass(), 'verbosity': self.verbosity, 'buffer': self.buffer, } def run_checks(self, databases): # Checks are run after database creation since some checks require # database access. call_command('check', verbosity=self.verbosity, databases=databases) def run_suite(self, suite, **kwargs): kwargs = self.get_test_runner_kwargs() runner = self.test_runner(**kwargs) return runner.run(suite) def teardown_databases(self, old_config, **kwargs): """Destroy all the non-mirror databases.""" _teardown_databases( old_config, verbosity=self.verbosity, parallel=self.parallel, keepdb=self.keepdb, ) def teardown_test_environment(self, **kwargs): unittest.removeHandler() teardown_test_environment() def suite_result(self, suite, result, **kwargs): return len(result.failures) + len(result.errors) def _get_databases(self, suite): databases = {} for test in iter_test_cases(suite): test_databases = getattr(test, 'databases', None) if test_databases == '__all__': test_databases = connections if test_databases: serialized_rollback = getattr(test, 'serialized_rollback', False) databases.update( (alias, serialized_rollback or databases.get(alias, False)) for alias in test_databases ) return databases def get_databases(self, suite): databases = self._get_databases(suite) if self.verbosity >= 2: unused_databases = [alias for alias in connections if alias not in databases] if unused_databases: print('Skipping setup of unused database(s): %s.' % ', '.join(sorted(unused_databases))) return databases def run_tests(self, test_labels, extra_tests=None, **kwargs): """ Run the unit tests for all the test labels in the provided list. Test labels should be dotted Python paths to test modules, test classes, or test methods. A list of 'extra' tests may also be provided; these tests will be added to the test suite. Return the number of tests that failed. """ self.setup_test_environment() suite = self.build_suite(test_labels, extra_tests) databases = self.get_databases(suite) serialized_aliases = set( alias for alias, serialize in databases.items() if serialize ) with self.time_keeper.timed('Total database setup'): old_config = self.setup_databases( aliases=databases, serialized_aliases=serialized_aliases, ) run_failed = False try: self.run_checks(databases) result = self.run_suite(suite) except Exception: run_failed = True raise finally: try: with self.time_keeper.timed('Total database teardown'): self.teardown_databases(old_config) self.teardown_test_environment() except Exception: # Silence teardown exceptions if an exception was raised during # runs to avoid shadowing it. if not run_failed: raise self.time_keeper.print_results() return self.suite_result(suite, result) def is_discoverable(label): """ Check if a test label points to a Python package or file directory. Relative labels like "." and ".." are seen as directories. """ try: mod = import_module(label) except (ImportError, TypeError): pass else: return hasattr(mod, '__path__') return os.path.isdir(os.path.abspath(label)) def find_top_level(top_level): # Try to be a bit smarter than unittest about finding the default top-level # for a given directory path, to avoid breaking relative imports. # (Unittest's default is to set top-level equal to the path, which means # relative imports will result in "Attempted relative import in # non-package."). # We'd be happy to skip this and require dotted module paths (which don't # cause this problem) instead of file paths (which do), but in the case of # a directory in the cwd, which would be equally valid if considered as a # top-level module or as a directory path, unittest unfortunately prefers # the latter. while True: init_py = os.path.join(top_level, '__init__.py') if not os.path.exists(init_py): break try_next = os.path.dirname(top_level) if try_next == top_level: # __init__.py all the way down? give up. break top_level = try_next return top_level def reorder_tests(tests, classes, reverse=False): """ Reorder an iterable of tests by test type, removing any duplicates. `classes` is a sequence of types. The result is returned as an iterator. All tests of type classes[0] are placed first, then tests of type classes[1], etc. Tests with no match in classes are placed last. If `reverse` is True, sort tests within classes in opposite order but don't reverse test classes. """ bins = [OrderedSet() for i in range(len(classes) + 1)] *class_bins, last_bin = bins for test in tests: for test_bin, test_class in zip(class_bins, classes): if isinstance(test, test_class): break else: test_bin = last_bin test_bin.add(test) if reverse: bins = (reversed(tests) for tests in bins) return itertools.chain(*bins) def partition_suite_by_case(suite): """Partition a test suite by test case, preserving the order of tests.""" suite_class = type(suite) all_tests = iter_test_cases(suite) return [ suite_class(tests) for _, tests in itertools.groupby(all_tests, type) ] def test_match_tags(test, tags, exclude_tags): test_tags = set(getattr(test, 'tags', [])) test_fn_name = getattr(test, '_testMethodName', str(test)) if hasattr(test, test_fn_name): test_fn = getattr(test, test_fn_name) test_fn_tags = list(getattr(test_fn, 'tags', [])) test_tags = test_tags.union(test_fn_tags) matched_tags = test_tags.intersection(tags) return (matched_tags or not tags) and not test_tags.intersection(exclude_tags) def filter_tests_by_tags(tests, tags, exclude_tags): """Return the matching tests as an iterator.""" return (test for test in tests if test_match_tags(test, tags, exclude_tags))
171a709ea1833b4af0f6d74af0ab0d2b20e547be77cccc1eff272e3330de4c49
""" Cross Site Request Forgery Middleware. This module provides a middleware that implements protection against request forgeries from other sites. """ import logging import re import string from collections import defaultdict from urllib.parse import urlparse from django.conf import settings from django.core.exceptions import DisallowedHost, ImproperlyConfigured from django.urls import get_callable from django.utils.cache import patch_vary_headers from django.utils.crypto import constant_time_compare, get_random_string from django.utils.deprecation import MiddlewareMixin from django.utils.functional import cached_property from django.utils.http import is_same_domain from django.utils.log import log_response logger = logging.getLogger('django.security.csrf') REASON_BAD_ORIGIN = "Origin checking failed - %s does not match any trusted origins." REASON_NO_REFERER = "Referer checking failed - no Referer." REASON_BAD_REFERER = "Referer checking failed - %s does not match any trusted origins." REASON_NO_CSRF_COOKIE = "CSRF cookie not set." REASON_BAD_TOKEN = "CSRF token missing or incorrect." REASON_MALFORMED_REFERER = "Referer checking failed - Referer is malformed." REASON_INSECURE_REFERER = "Referer checking failed - Referer is insecure while host is secure." CSRF_SECRET_LENGTH = 32 CSRF_TOKEN_LENGTH = 2 * CSRF_SECRET_LENGTH CSRF_ALLOWED_CHARS = string.ascii_letters + string.digits CSRF_SESSION_KEY = '_csrftoken' def _get_failure_view(): """Return the view to be used for CSRF rejections.""" return get_callable(settings.CSRF_FAILURE_VIEW) def _get_new_csrf_string(): return get_random_string(CSRF_SECRET_LENGTH, allowed_chars=CSRF_ALLOWED_CHARS) def _mask_cipher_secret(secret): """ Given a secret (assumed to be a string of CSRF_ALLOWED_CHARS), generate a token by adding a mask and applying it to the secret. """ mask = _get_new_csrf_string() chars = CSRF_ALLOWED_CHARS pairs = zip((chars.index(x) for x in secret), (chars.index(x) for x in mask)) cipher = ''.join(chars[(x + y) % len(chars)] for x, y in pairs) return mask + cipher def _unmask_cipher_token(token): """ Given a token (assumed to be a string of CSRF_ALLOWED_CHARS, of length CSRF_TOKEN_LENGTH, and that its first half is a mask), use it to decrypt the second half to produce the original secret. """ mask = token[:CSRF_SECRET_LENGTH] token = token[CSRF_SECRET_LENGTH:] chars = CSRF_ALLOWED_CHARS pairs = zip((chars.index(x) for x in token), (chars.index(x) for x in mask)) return ''.join(chars[x - y] for x, y in pairs) # Note negative values are ok def _get_new_csrf_token(): return _mask_cipher_secret(_get_new_csrf_string()) def get_token(request): """ Return the CSRF token required for a POST form. The token is an alphanumeric value. A new token is created if one is not already set. A side effect of calling this function is to make the csrf_protect decorator and the CsrfViewMiddleware add a CSRF cookie and a 'Vary: Cookie' header to the outgoing response. For this reason, you may need to use this function lazily, as is done by the csrf context processor. """ if "CSRF_COOKIE" not in request.META: csrf_secret = _get_new_csrf_string() request.META["CSRF_COOKIE"] = _mask_cipher_secret(csrf_secret) else: csrf_secret = _unmask_cipher_token(request.META["CSRF_COOKIE"]) request.META["CSRF_COOKIE_USED"] = True return _mask_cipher_secret(csrf_secret) def rotate_token(request): """ Change the CSRF token in use for a request - should be done on login for security purposes. """ request.META.update({ "CSRF_COOKIE_USED": True, "CSRF_COOKIE": _get_new_csrf_token(), }) request.csrf_cookie_needs_reset = True def _sanitize_token(token): # Allow only ASCII alphanumerics if re.search('[^a-zA-Z0-9]', token): return _get_new_csrf_token() elif len(token) == CSRF_TOKEN_LENGTH: return token elif len(token) == CSRF_SECRET_LENGTH: # Older Django versions set cookies to values of CSRF_SECRET_LENGTH # alphanumeric characters. For backwards compatibility, accept # such values as unmasked secrets. # It's easier to mask here and be consistent later, rather than add # different code paths in the checks, although that might be a tad more # efficient. return _mask_cipher_secret(token) return _get_new_csrf_token() def _compare_masked_tokens(request_csrf_token, csrf_token): # Assume both arguments are sanitized -- that is, strings of # length CSRF_TOKEN_LENGTH, all CSRF_ALLOWED_CHARS. return constant_time_compare( _unmask_cipher_token(request_csrf_token), _unmask_cipher_token(csrf_token), ) class CsrfViewMiddleware(MiddlewareMixin): """ Require a present and correct csrfmiddlewaretoken for POST requests that have a CSRF cookie, and set an outgoing CSRF cookie. This middleware should be used in conjunction with the {% csrf_token %} template tag. """ @cached_property def csrf_trusted_origins_hosts(self): return [ urlparse(origin).netloc.lstrip('*') for origin in settings.CSRF_TRUSTED_ORIGINS ] @cached_property def allowed_origins_exact(self): return { origin for origin in settings.CSRF_TRUSTED_ORIGINS if '*' not in origin } @cached_property def allowed_origin_subdomains(self): """ A mapping of allowed schemes to list of allowed netlocs, where all subdomains of the netloc are allowed. """ allowed_origin_subdomains = defaultdict(list) for parsed in (urlparse(origin) for origin in settings.CSRF_TRUSTED_ORIGINS if '*' in origin): allowed_origin_subdomains[parsed.scheme].append(parsed.netloc.lstrip('*')) return allowed_origin_subdomains # The _accept and _reject methods currently only exist for the sake of the # requires_csrf_token decorator. def _accept(self, request): # Avoid checking the request twice by adding a custom attribute to # request. This will be relevant when both decorator and middleware # are used. request.csrf_processing_done = True return None def _reject(self, request, reason): response = _get_failure_view()(request, reason=reason) log_response( 'Forbidden (%s): %s', reason, request.path, response=response, request=request, logger=logger, ) return response def _get_token(self, request): if settings.CSRF_USE_SESSIONS: try: return request.session.get(CSRF_SESSION_KEY) except AttributeError: raise ImproperlyConfigured( 'CSRF_USE_SESSIONS is enabled, but request.session is not ' 'set. SessionMiddleware must appear before CsrfViewMiddleware ' 'in MIDDLEWARE.' ) else: try: cookie_token = request.COOKIES[settings.CSRF_COOKIE_NAME] except KeyError: return None csrf_token = _sanitize_token(cookie_token) if csrf_token != cookie_token: # Cookie token needed to be replaced; # the cookie needs to be reset. request.csrf_cookie_needs_reset = True return csrf_token def _set_token(self, request, response): if settings.CSRF_USE_SESSIONS: if request.session.get(CSRF_SESSION_KEY) != request.META['CSRF_COOKIE']: request.session[CSRF_SESSION_KEY] = request.META['CSRF_COOKIE'] else: response.set_cookie( settings.CSRF_COOKIE_NAME, request.META['CSRF_COOKIE'], max_age=settings.CSRF_COOKIE_AGE, domain=settings.CSRF_COOKIE_DOMAIN, path=settings.CSRF_COOKIE_PATH, secure=settings.CSRF_COOKIE_SECURE, httponly=settings.CSRF_COOKIE_HTTPONLY, samesite=settings.CSRF_COOKIE_SAMESITE, ) # Set the Vary header since content varies with the CSRF cookie. patch_vary_headers(response, ('Cookie',)) def _origin_verified(self, request): request_origin = request.META['HTTP_ORIGIN'] good_origin = '%s://%s' % ( 'https' if request.is_secure() else 'http', request.get_host(), ) if request_origin == good_origin: return True if request_origin in self.allowed_origins_exact: return True try: parsed_origin = urlparse(request_origin) except ValueError: return False request_scheme = parsed_origin.scheme request_netloc = parsed_origin.netloc return any( is_same_domain(request_netloc, host) for host in self.allowed_origin_subdomains.get(request_scheme, ()) ) def process_request(self, request): csrf_token = self._get_token(request) if csrf_token is not None: # Use same token next time. request.META['CSRF_COOKIE'] = csrf_token def process_view(self, request, callback, callback_args, callback_kwargs): if getattr(request, 'csrf_processing_done', False): return None # Wait until request.META["CSRF_COOKIE"] has been manipulated before # bailing out, so that get_token still works if getattr(callback, 'csrf_exempt', False): return None # Assume that anything not defined as 'safe' by RFC7231 needs protection if request.method not in ('GET', 'HEAD', 'OPTIONS', 'TRACE'): if getattr(request, '_dont_enforce_csrf_checks', False): # Mechanism to turn off CSRF checks for test suite. # It comes after the creation of CSRF cookies, so that # everything else continues to work exactly the same # (e.g. cookies are sent, etc.), but before any # branches that call reject(). return self._accept(request) # Reject the request if the Origin header doesn't match an allowed # value. if 'HTTP_ORIGIN' in request.META: if not self._origin_verified(request): return self._reject(request, REASON_BAD_ORIGIN % request.META['HTTP_ORIGIN']) elif request.is_secure(): # If the Origin header wasn't provided, reject HTTPS requests # if the Referer header doesn't match an allowed value. # # Suppose user visits http://example.com/ # An active network attacker (man-in-the-middle, MITM) sends a # POST form that targets https://example.com/detonate-bomb/ and # submits it via JavaScript. # # The attacker will need to provide a CSRF cookie and token, but # that's no problem for a MITM and the session-independent # secret we're using. So the MITM can circumvent the CSRF # protection. This is true for any HTTP connection, but anyone # using HTTPS expects better! For this reason, for # https://example.com/ we need additional protection that treats # http://example.com/ as completely untrusted. Under HTTPS, # Barth et al. found that the Referer header is missing for # same-domain requests in only about 0.2% of cases or less, so # we can use strict Referer checking. referer = request.META.get('HTTP_REFERER') if referer is None: return self._reject(request, REASON_NO_REFERER) try: referer = urlparse(referer) except ValueError: return self._reject(request, REASON_MALFORMED_REFERER) # Make sure we have a valid URL for Referer. if '' in (referer.scheme, referer.netloc): return self._reject(request, REASON_MALFORMED_REFERER) # Ensure that our Referer is also secure. if referer.scheme != 'https': return self._reject(request, REASON_INSECURE_REFERER) # If there isn't a CSRF_COOKIE_DOMAIN, require an exact match # match on host:port. If not, obey the cookie rules (or those # for the session cookie, if CSRF_USE_SESSIONS). good_referer = ( settings.SESSION_COOKIE_DOMAIN if settings.CSRF_USE_SESSIONS else settings.CSRF_COOKIE_DOMAIN ) if good_referer is not None: server_port = request.get_port() if server_port not in ('443', '80'): good_referer = '%s:%s' % (good_referer, server_port) else: try: # request.get_host() includes the port. good_referer = request.get_host() except DisallowedHost: pass # Create a list of all acceptable HTTP referers, including the # current host if it's permitted by ALLOWED_HOSTS. good_hosts = list(self.csrf_trusted_origins_hosts) if good_referer is not None: good_hosts.append(good_referer) if not any(is_same_domain(referer.netloc, host) for host in good_hosts): reason = REASON_BAD_REFERER % referer.geturl() return self._reject(request, reason) # Access csrf_token via self._get_token() as rotate_token() may # have been called by an authentication middleware during the # process_request() phase. csrf_token = self._get_token(request) if csrf_token is None: # No CSRF cookie. For POST requests, we insist on a CSRF cookie, # and in this way we can avoid all CSRF attacks, including login # CSRF. return self._reject(request, REASON_NO_CSRF_COOKIE) # Check non-cookie token for match. request_csrf_token = "" if request.method == "POST": try: request_csrf_token = request.POST.get('csrfmiddlewaretoken', '') except OSError: # Handle a broken connection before we've completed reading # the POST data. process_view shouldn't raise any # exceptions, so we'll ignore and serve the user a 403 # (assuming they're still listening, which they probably # aren't because of the error). pass if request_csrf_token == "": # Fall back to X-CSRFToken, to make things easier for AJAX, # and possible for PUT/DELETE. request_csrf_token = request.META.get(settings.CSRF_HEADER_NAME, '') request_csrf_token = _sanitize_token(request_csrf_token) if not _compare_masked_tokens(request_csrf_token, csrf_token): return self._reject(request, REASON_BAD_TOKEN) return self._accept(request) def process_response(self, request, response): if not getattr(request, 'csrf_cookie_needs_reset', False): if getattr(response, 'csrf_cookie_set', False): return response if not request.META.get("CSRF_COOKIE_USED", False): return response # Set the CSRF cookie even if it's already set, so we renew # the expiry timer. self._set_token(request, response) response.csrf_cookie_set = True return response
b46ff7e5773190527fb12b1db6236def3f6c28a7fc55725b296ec233678f2042
import re from django.conf import settings from django.contrib.sessions.backends.cache import SessionStore from django.core.exceptions import ImproperlyConfigured from django.http import HttpRequest, HttpResponse from django.middleware.csrf import ( CSRF_SESSION_KEY, CSRF_TOKEN_LENGTH, REASON_BAD_ORIGIN, REASON_BAD_TOKEN, REASON_NO_CSRF_COOKIE, CsrfViewMiddleware, _compare_masked_tokens as equivalent_tokens, get_token, ) from django.test import SimpleTestCase, override_settings from django.views.decorators.csrf import csrf_exempt, requires_csrf_token from .views import ( ensure_csrf_cookie_view, non_token_view_using_request_processor, post_form_view, token_view, ) class TestingHttpRequest(HttpRequest): """ A version of HttpRequest that allows us to change some things more easily """ def __init__(self): super().__init__() self.session = SessionStore() def is_secure(self): return getattr(self, '_is_secure_override', False) class CsrfViewMiddlewareTestMixin: """ Shared methods and tests for session-based and cookie-based tokens. """ _csrf_id = _csrf_id_cookie = '1bcdefghij2bcdefghij3bcdefghij4bcdefghij5bcdefghij6bcdefghijABCD' def _get_GET_no_csrf_cookie_request(self): req = TestingHttpRequest() req.method = 'GET' return req def _get_GET_csrf_cookie_request(self): raise NotImplementedError('This method must be implemented by a subclass.') def _get_POST_csrf_cookie_request(self): req = self._get_GET_csrf_cookie_request() req.method = "POST" return req def _get_POST_no_csrf_cookie_request(self): req = self._get_GET_no_csrf_cookie_request() req.method = "POST" return req def _get_POST_request_with_token(self): req = self._get_POST_csrf_cookie_request() req.POST['csrfmiddlewaretoken'] = self._csrf_id return req def _check_token_present(self, response, csrf_id=None): text = str(response.content, response.charset) match = re.search('name="csrfmiddlewaretoken" value="(.*?)"', text) csrf_token = csrf_id or self._csrf_id self.assertTrue( match and equivalent_tokens(csrf_token, match[1]), "Could not find csrfmiddlewaretoken to match %s" % csrf_token ) def test_process_response_get_token_not_used(self): """ If get_token() is not called, the view middleware does not add a cookie. """ # This is important to make pages cacheable. Pages which do call # get_token(), assuming they use the token, are not cacheable because # the token is specific to the user req = self._get_GET_no_csrf_cookie_request() # non_token_view_using_request_processor does not call get_token(), but # does use the csrf request processor. By using this, we are testing # that the view processor is properly lazy and doesn't call get_token() # until needed. mw = CsrfViewMiddleware(non_token_view_using_request_processor) mw.process_request(req) mw.process_view(req, non_token_view_using_request_processor, (), {}) resp = mw(req) csrf_cookie = resp.cookies.get(settings.CSRF_COOKIE_NAME, False) self.assertIs(csrf_cookie, False) # Check the request processing def test_process_request_no_csrf_cookie(self): """ If no CSRF cookies is present, the middleware rejects the incoming request. This will stop login CSRF. """ req = self._get_POST_no_csrf_cookie_request() mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) with self.assertLogs('django.security.csrf', 'WARNING') as cm: resp = mw.process_view(req, post_form_view, (), {}) self.assertEqual(403, resp.status_code) self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % REASON_NO_CSRF_COOKIE) def test_process_request_csrf_cookie_no_token(self): """ If a CSRF cookie is present but no token, the middleware rejects the incoming request. """ req = self._get_POST_csrf_cookie_request() mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) with self.assertLogs('django.security.csrf', 'WARNING') as cm: resp = mw.process_view(req, post_form_view, (), {}) self.assertEqual(403, resp.status_code) self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % REASON_BAD_TOKEN) def test_process_request_csrf_cookie_and_token(self): """ If both a cookie and a token is present, the middleware lets it through. """ req = self._get_POST_request_with_token() mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) def test_process_request_csrf_cookie_no_token_exempt_view(self): """ If a CSRF cookie is present and no token, but the csrf_exempt decorator has been applied to the view, the middleware lets it through """ req = self._get_POST_csrf_cookie_request() mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, csrf_exempt(post_form_view), (), {}) self.assertIsNone(resp) def test_csrf_token_in_header(self): """ The token may be passed in a header instead of in the form. """ req = self._get_POST_csrf_cookie_request() req.META['HTTP_X_CSRFTOKEN'] = self._csrf_id mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) @override_settings(CSRF_HEADER_NAME='HTTP_X_CSRFTOKEN_CUSTOMIZED') def test_csrf_token_in_header_with_customized_name(self): """ settings.CSRF_HEADER_NAME can be used to customize the CSRF header name """ req = self._get_POST_csrf_cookie_request() req.META['HTTP_X_CSRFTOKEN_CUSTOMIZED'] = self._csrf_id mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) def test_put_and_delete_rejected(self): """ HTTP PUT and DELETE methods have protection """ req = TestingHttpRequest() req.method = 'PUT' mw = CsrfViewMiddleware(post_form_view) with self.assertLogs('django.security.csrf', 'WARNING') as cm: resp = mw.process_view(req, post_form_view, (), {}) self.assertEqual(403, resp.status_code) self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % REASON_NO_CSRF_COOKIE) req = TestingHttpRequest() req.method = 'DELETE' with self.assertLogs('django.security.csrf', 'WARNING') as cm: resp = mw.process_view(req, post_form_view, (), {}) self.assertEqual(403, resp.status_code) self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % REASON_NO_CSRF_COOKIE) def test_put_and_delete_allowed(self): """ HTTP PUT and DELETE can get through with X-CSRFToken and a cookie. """ req = self._get_GET_csrf_cookie_request() req.method = 'PUT' req.META['HTTP_X_CSRFTOKEN'] = self._csrf_id mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) req = self._get_GET_csrf_cookie_request() req.method = 'DELETE' req.META['HTTP_X_CSRFTOKEN'] = self._csrf_id mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) # Tests for the template tag method def test_token_node_no_csrf_cookie(self): """ CsrfTokenNode works when no CSRF cookie is set. """ req = self._get_GET_no_csrf_cookie_request() resp = token_view(req) token = get_token(req) self.assertIsNotNone(token) self._check_token_present(resp, token) def test_token_node_empty_csrf_cookie(self): """ A new token is sent if the csrf_cookie is the empty string. """ req = self._get_GET_no_csrf_cookie_request() req.COOKIES[settings.CSRF_COOKIE_NAME] = "" mw = CsrfViewMiddleware(token_view) mw.process_view(req, token_view, (), {}) resp = token_view(req) token = get_token(req) self.assertIsNotNone(token) self._check_token_present(resp, token) def test_token_node_with_csrf_cookie(self): """ CsrfTokenNode works when a CSRF cookie is set. """ req = self._get_GET_csrf_cookie_request() mw = CsrfViewMiddleware(token_view) mw.process_request(req) mw.process_view(req, token_view, (), {}) resp = token_view(req) self._check_token_present(resp) def test_get_token_for_exempt_view(self): """ get_token still works for a view decorated with 'csrf_exempt'. """ req = self._get_GET_csrf_cookie_request() mw = CsrfViewMiddleware(token_view) mw.process_request(req) mw.process_view(req, csrf_exempt(token_view), (), {}) resp = token_view(req) self._check_token_present(resp) def test_get_token_for_requires_csrf_token_view(self): """ get_token() works for a view decorated solely with requires_csrf_token. """ req = self._get_GET_csrf_cookie_request() resp = requires_csrf_token(token_view)(req) self._check_token_present(resp) def test_token_node_with_new_csrf_cookie(self): """ CsrfTokenNode works when a CSRF cookie is created by the middleware (when one was not already present) """ req = self._get_GET_no_csrf_cookie_request() mw = CsrfViewMiddleware(token_view) mw.process_view(req, token_view, (), {}) resp = mw(req) csrf_cookie = resp.cookies[settings.CSRF_COOKIE_NAME] self._check_token_present(resp, csrf_id=csrf_cookie.value) def test_cookie_not_reset_on_accepted_request(self): """ The csrf token used in posts is changed on every request (although stays equivalent). The csrf cookie should not change on accepted requests. If it appears in the response, it should keep its value. """ req = self._get_POST_request_with_token() mw = CsrfViewMiddleware(token_view) mw.process_request(req) mw.process_view(req, token_view, (), {}) resp = mw(req) csrf_cookie = resp.cookies.get(settings.CSRF_COOKIE_NAME, None) if csrf_cookie: self.assertEqual( csrf_cookie.value, self._csrf_id_cookie, "CSRF cookie was changed on an accepted request" ) @override_settings(DEBUG=True, ALLOWED_HOSTS=['www.example.com']) def test_https_bad_referer(self): """ A POST HTTPS request with a bad referer is rejected """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_REFERER'] = 'https://www.evil.org/somepage' req.META['SERVER_PORT'] = '443' mw = CsrfViewMiddleware(post_form_view) response = mw.process_view(req, post_form_view, (), {}) self.assertContains( response, 'Referer checking failed - https://www.evil.org/somepage does not ' 'match any trusted origins.', status_code=403, ) def test_https_malformed_host(self): """ CsrfViewMiddleware generates a 403 response if it receives an HTTPS request with a bad host. """ req = self._get_POST_no_csrf_cookie_request() req._is_secure_override = True req.META['HTTP_HOST'] = '@malformed' req.META['HTTP_REFERER'] = 'https://www.evil.org/somepage' req.META['SERVER_PORT'] = '443' mw = CsrfViewMiddleware(token_view) response = mw.process_view(req, token_view, (), {}) self.assertEqual(response.status_code, 403) @override_settings(DEBUG=True) def test_https_malformed_referer(self): """ A POST HTTPS request with a bad referer is rejected. """ malformed_referer_msg = 'Referer checking failed - Referer is malformed.' req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_REFERER'] = 'http://http://www.example.com/' mw = CsrfViewMiddleware(post_form_view) response = mw.process_view(req, post_form_view, (), {}) self.assertContains( response, 'Referer checking failed - Referer is insecure while host is secure.', status_code=403, ) # Empty req.META['HTTP_REFERER'] = '' response = mw.process_view(req, post_form_view, (), {}) self.assertContains(response, malformed_referer_msg, status_code=403) # Non-ASCII req.META['HTTP_REFERER'] = 'ØBöIß' response = mw.process_view(req, post_form_view, (), {}) self.assertContains(response, malformed_referer_msg, status_code=403) # missing scheme # >>> urlparse('//example.com/') # ParseResult(scheme='', netloc='example.com', path='/', params='', query='', fragment='') req.META['HTTP_REFERER'] = '//example.com/' response = mw.process_view(req, post_form_view, (), {}) self.assertContains(response, malformed_referer_msg, status_code=403) # missing netloc # >>> urlparse('https://') # ParseResult(scheme='https', netloc='', path='', params='', query='', fragment='') req.META['HTTP_REFERER'] = 'https://' response = mw.process_view(req, post_form_view, (), {}) self.assertContains(response, malformed_referer_msg, status_code=403) # Invalid URL # >>> urlparse('https://[') # ValueError: Invalid IPv6 URL req.META['HTTP_REFERER'] = 'https://[' response = mw.process_view(req, post_form_view, (), {}) self.assertContains(response, malformed_referer_msg, status_code=403) @override_settings(ALLOWED_HOSTS=['www.example.com']) def test_https_good_referer(self): """ A POST HTTPS request with a good referer is accepted. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_REFERER'] = 'https://www.example.com/somepage' mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) @override_settings(ALLOWED_HOSTS=['www.example.com']) def test_https_good_referer_2(self): """ A POST HTTPS request with a good referer is accepted where the referer contains no trailing slash. """ # See ticket #15617 req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_REFERER'] = 'https://www.example.com' mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) def _test_https_good_referer_behind_proxy(self): req = self._get_POST_request_with_token() req._is_secure_override = True req.META.update({ 'HTTP_HOST': '10.0.0.2', 'HTTP_REFERER': 'https://www.example.com/somepage', 'SERVER_PORT': '8080', 'HTTP_X_FORWARDED_HOST': 'www.example.com', 'HTTP_X_FORWARDED_PORT': '443', }) mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) @override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_TRUSTED_ORIGINS=['https://dashboard.example.com']) def test_https_csrf_trusted_origin_allowed(self): """ A POST HTTPS request with a referer added to the CSRF_TRUSTED_ORIGINS setting is accepted. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_REFERER'] = 'https://dashboard.example.com' mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) @override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_TRUSTED_ORIGINS=['https://*.example.com']) def test_https_csrf_wildcard_trusted_origin_allowed(self): """ A POST HTTPS request with a referer that matches a CSRF_TRUSTED_ORIGINS wildcard is accepted. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_REFERER'] = 'https://dashboard.example.com' mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) response = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(response) def _test_https_good_referer_matches_cookie_domain(self): req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_REFERER'] = 'https://foo.example.com/' req.META['SERVER_PORT'] = '443' mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) response = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(response) def _test_https_good_referer_matches_cookie_domain_with_different_port(self): req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_REFERER'] = 'https://foo.example.com:4443/' req.META['SERVER_PORT'] = '4443' mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) response = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(response) def test_ensures_csrf_cookie_no_logging(self): """ ensure_csrf_cookie() doesn't log warnings (#19436). """ with self.assertNoLogs('django.request', 'WARNING'): req = self._get_GET_no_csrf_cookie_request() ensure_csrf_cookie_view(req) def test_post_data_read_failure(self): """ OSErrors during POST data reading are caught and treated as if the POST data wasn't there (#20128). """ class CsrfPostRequest(HttpRequest): """ HttpRequest that can raise an OSError when accessing POST data """ def __init__(self, token, raise_error): super().__init__() self.method = 'POST' self.raise_error = False self.COOKIES[settings.CSRF_COOKIE_NAME] = token # Handle both cases here to prevent duplicate code in the # session tests. self.session = {} self.session[CSRF_SESSION_KEY] = token self.POST['csrfmiddlewaretoken'] = token self.raise_error = raise_error def _load_post_and_files(self): raise OSError('error reading input data') def _get_post(self): if self.raise_error: self._load_post_and_files() return self._post def _set_post(self, post): self._post = post POST = property(_get_post, _set_post) token = ('ABC' + self._csrf_id)[:CSRF_TOKEN_LENGTH] req = CsrfPostRequest(token, raise_error=False) mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) req = CsrfPostRequest(token, raise_error=True) mw.process_request(req) with self.assertLogs('django.security.csrf', 'WARNING') as cm: resp = mw.process_view(req, post_form_view, (), {}) self.assertEqual(resp.status_code, 403) self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % REASON_BAD_TOKEN) @override_settings(ALLOWED_HOSTS=['www.example.com']) def test_bad_origin_bad_domain(self): """A request with a bad origin is rejected.""" req = self._get_POST_request_with_token() req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'https://www.evil.org' mw = CsrfViewMiddleware(post_form_view) self.assertIs(mw._origin_verified(req), False) with self.assertLogs('django.security.csrf', 'WARNING') as cm: response = mw.process_view(req, post_form_view, (), {}) self.assertEqual(response.status_code, 403) msg = REASON_BAD_ORIGIN % req.META['HTTP_ORIGIN'] self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % msg) @override_settings(ALLOWED_HOSTS=['www.example.com']) def test_bad_origin_null_origin(self): """A request with a null origin is rejected.""" req = self._get_POST_request_with_token() req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'null' mw = CsrfViewMiddleware(post_form_view) self.assertIs(mw._origin_verified(req), False) with self.assertLogs('django.security.csrf', 'WARNING') as cm: response = mw.process_view(req, post_form_view, (), {}) self.assertEqual(response.status_code, 403) msg = REASON_BAD_ORIGIN % req.META['HTTP_ORIGIN'] self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % msg) @override_settings(ALLOWED_HOSTS=['www.example.com']) def test_bad_origin_bad_protocol(self): """A request with an origin with wrong protocol is rejected.""" req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'http://example.com' mw = CsrfViewMiddleware(post_form_view) self.assertIs(mw._origin_verified(req), False) with self.assertLogs('django.security.csrf', 'WARNING') as cm: response = mw.process_view(req, post_form_view, (), {}) self.assertEqual(response.status_code, 403) msg = REASON_BAD_ORIGIN % req.META['HTTP_ORIGIN'] self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % msg) @override_settings( ALLOWED_HOSTS=['www.example.com'], CSRF_TRUSTED_ORIGINS=[ 'http://no-match.com', 'https://*.example.com', 'http://*.no-match.com', 'http://*.no-match-2.com', ], ) def test_bad_origin_csrf_trusted_origin_bad_protocol(self): """ A request with an origin with the wrong protocol compared to CSRF_TRUSTED_ORIGINS is rejected. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'http://foo.example.com' mw = CsrfViewMiddleware(post_form_view) self.assertIs(mw._origin_verified(req), False) with self.assertLogs('django.security.csrf', 'WARNING') as cm: response = mw.process_view(req, post_form_view, (), {}) self.assertEqual(response.status_code, 403) msg = REASON_BAD_ORIGIN % req.META['HTTP_ORIGIN'] self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % msg) self.assertEqual(mw.allowed_origins_exact, {'http://no-match.com'}) self.assertEqual(mw.allowed_origin_subdomains, { 'https': ['.example.com'], 'http': ['.no-match.com', '.no-match-2.com'], }) @override_settings(ALLOWED_HOSTS=['www.example.com']) def test_bad_origin_cannot_be_parsed(self): """ A POST request with an origin that can't be parsed by urlparse() is rejected. """ req = self._get_POST_request_with_token() req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'https://[' mw = CsrfViewMiddleware(post_form_view) self.assertIs(mw._origin_verified(req), False) with self.assertLogs('django.security.csrf', 'WARNING') as cm: response = mw.process_view(req, post_form_view, (), {}) self.assertEqual(response.status_code, 403) msg = REASON_BAD_ORIGIN % req.META['HTTP_ORIGIN'] self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % msg) @override_settings(ALLOWED_HOSTS=['www.example.com']) def test_good_origin_insecure(self): """A POST HTTP request with a good origin is accepted.""" req = self._get_POST_request_with_token() req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'http://www.example.com' mw = CsrfViewMiddleware(post_form_view) self.assertIs(mw._origin_verified(req), True) response = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(response) @override_settings(ALLOWED_HOSTS=['www.example.com']) def test_good_origin_secure(self): """A POST HTTPS request with a good origin is accepted.""" req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'https://www.example.com' mw = CsrfViewMiddleware(post_form_view) self.assertIs(mw._origin_verified(req), True) response = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(response) @override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_TRUSTED_ORIGINS=['https://dashboard.example.com']) def test_good_origin_csrf_trusted_origin_allowed(self): """ A POST request with an origin added to the CSRF_TRUSTED_ORIGINS setting is accepted. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'https://dashboard.example.com' mw = CsrfViewMiddleware(post_form_view) self.assertIs(mw._origin_verified(req), True) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) self.assertEqual(mw.allowed_origins_exact, {'https://dashboard.example.com'}) self.assertEqual(mw.allowed_origin_subdomains, {}) @override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_TRUSTED_ORIGINS=['https://*.example.com']) def test_good_origin_wildcard_csrf_trusted_origin_allowed(self): """ A POST request with an origin that matches a CSRF_TRUSTED_ORIGINS wildcard is accepted. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'https://foo.example.com' mw = CsrfViewMiddleware(post_form_view) self.assertIs(mw._origin_verified(req), True) response = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(response) self.assertEqual(mw.allowed_origins_exact, set()) self.assertEqual(mw.allowed_origin_subdomains, {'https': ['.example.com']}) class CsrfViewMiddlewareTests(CsrfViewMiddlewareTestMixin, SimpleTestCase): def _get_GET_csrf_cookie_request(self): req = TestingHttpRequest() req.COOKIES[settings.CSRF_COOKIE_NAME] = self._csrf_id_cookie return req def _get_POST_bare_secret_csrf_cookie_request(self): req = self._get_POST_no_csrf_cookie_request() req.COOKIES[settings.CSRF_COOKIE_NAME] = self._csrf_id_cookie[:32] return req def _get_POST_bare_secret_csrf_cookie_request_with_token(self): req = self._get_POST_bare_secret_csrf_cookie_request() req.POST['csrfmiddlewaretoken'] = self._csrf_id_cookie[:32] return req def test_ensures_csrf_cookie_no_middleware(self): """ The ensure_csrf_cookie() decorator works without middleware. """ req = self._get_GET_no_csrf_cookie_request() resp = ensure_csrf_cookie_view(req) self.assertTrue(resp.cookies.get(settings.CSRF_COOKIE_NAME, False)) self.assertIn('Cookie', resp.get('Vary', '')) def test_ensures_csrf_cookie_with_middleware(self): """ The ensure_csrf_cookie() decorator works with the CsrfViewMiddleware enabled. """ req = self._get_GET_no_csrf_cookie_request() mw = CsrfViewMiddleware(ensure_csrf_cookie_view) mw.process_view(req, ensure_csrf_cookie_view, (), {}) resp = mw(req) self.assertTrue(resp.cookies.get(settings.CSRF_COOKIE_NAME, False)) self.assertIn('Cookie', resp.get('Vary', '')) def test_csrf_cookie_age(self): """ CSRF cookie age can be set using settings.CSRF_COOKIE_AGE. """ req = self._get_GET_no_csrf_cookie_request() MAX_AGE = 123 with self.settings(CSRF_COOKIE_NAME='csrfcookie', CSRF_COOKIE_DOMAIN='.example.com', CSRF_COOKIE_AGE=MAX_AGE, CSRF_COOKIE_PATH='/test/', CSRF_COOKIE_SECURE=True, CSRF_COOKIE_HTTPONLY=True): # token_view calls get_token() indirectly mw = CsrfViewMiddleware(token_view) mw.process_view(req, token_view, (), {}) resp = mw(req) max_age = resp.cookies.get('csrfcookie').get('max-age') self.assertEqual(max_age, MAX_AGE) def test_csrf_cookie_age_none(self): """ CSRF cookie age does not have max age set and therefore uses session-based cookies. """ req = self._get_GET_no_csrf_cookie_request() MAX_AGE = None with self.settings(CSRF_COOKIE_NAME='csrfcookie', CSRF_COOKIE_DOMAIN='.example.com', CSRF_COOKIE_AGE=MAX_AGE, CSRF_COOKIE_PATH='/test/', CSRF_COOKIE_SECURE=True, CSRF_COOKIE_HTTPONLY=True): # token_view calls get_token() indirectly mw = CsrfViewMiddleware(token_view) mw.process_view(req, token_view, (), {}) resp = mw(req) max_age = resp.cookies.get('csrfcookie').get('max-age') self.assertEqual(max_age, '') def test_csrf_cookie_samesite(self): req = self._get_GET_no_csrf_cookie_request() with self.settings(CSRF_COOKIE_NAME='csrfcookie', CSRF_COOKIE_SAMESITE='Strict'): mw = CsrfViewMiddleware(token_view) mw.process_view(req, token_view, (), {}) resp = mw(req) self.assertEqual(resp.cookies['csrfcookie']['samesite'], 'Strict') def test_process_view_token_too_long(self): """ If the token is longer than expected, it is ignored and a new token is created. """ req = self._get_GET_no_csrf_cookie_request() req.COOKIES[settings.CSRF_COOKIE_NAME] = 'x' * 100000 mw = CsrfViewMiddleware(token_view) mw.process_view(req, token_view, (), {}) resp = mw(req) csrf_cookie = resp.cookies.get(settings.CSRF_COOKIE_NAME, False) self.assertEqual(len(csrf_cookie.value), CSRF_TOKEN_LENGTH) def test_process_view_token_invalid_chars(self): """ If the token contains non-alphanumeric characters, it is ignored and a new token is created. """ token = ('!@#' + self._csrf_id)[:CSRF_TOKEN_LENGTH] req = self._get_GET_no_csrf_cookie_request() req.COOKIES[settings.CSRF_COOKIE_NAME] = token mw = CsrfViewMiddleware(token_view) mw.process_view(req, token_view, (), {}) resp = mw(req) csrf_cookie = resp.cookies.get(settings.CSRF_COOKIE_NAME, False) self.assertEqual(len(csrf_cookie.value), CSRF_TOKEN_LENGTH) self.assertNotEqual(csrf_cookie.value, token) def test_bare_secret_accepted_and_replaced(self): """ The csrf token is reset from a bare secret. """ req = self._get_POST_bare_secret_csrf_cookie_request_with_token() mw = CsrfViewMiddleware(token_view) mw.process_request(req) resp = mw.process_view(req, token_view, (), {}) self.assertIsNone(resp) resp = mw(req) self.assertIn(settings.CSRF_COOKIE_NAME, resp.cookies, "Cookie was not reset from bare secret") csrf_cookie = resp.cookies[settings.CSRF_COOKIE_NAME] self.assertEqual(len(csrf_cookie.value), CSRF_TOKEN_LENGTH) self._check_token_present(resp, csrf_id=csrf_cookie.value) @override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_COOKIE_DOMAIN='.example.com', USE_X_FORWARDED_PORT=True) def test_https_good_referer_behind_proxy(self): """ A POST HTTPS request is accepted when USE_X_FORWARDED_PORT=True. """ self._test_https_good_referer_behind_proxy() @override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_COOKIE_DOMAIN='.example.com') def test_https_good_referer_matches_cookie_domain(self): """ A POST HTTPS request with a good referer should be accepted from a subdomain that's allowed by CSRF_COOKIE_DOMAIN. """ self._test_https_good_referer_matches_cookie_domain() @override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_COOKIE_DOMAIN='.example.com') def test_https_good_referer_matches_cookie_domain_with_different_port(self): """ A POST HTTPS request with a good referer should be accepted from a subdomain that's allowed by CSRF_COOKIE_DOMAIN and a non-443 port. """ self._test_https_good_referer_matches_cookie_domain_with_different_port() @override_settings(CSRF_COOKIE_DOMAIN='.example.com', DEBUG=True) def test_https_reject_insecure_referer(self): """ A POST HTTPS request from an insecure referer should be rejected. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_REFERER'] = 'http://example.com/' req.META['SERVER_PORT'] = '443' mw = CsrfViewMiddleware(post_form_view) response = mw.process_view(req, post_form_view, (), {}) self.assertContains( response, 'Referer checking failed - Referer is insecure while host is secure.', status_code=403, ) @override_settings(CSRF_USE_SESSIONS=True, CSRF_COOKIE_DOMAIN=None) class CsrfViewMiddlewareUseSessionsTests(CsrfViewMiddlewareTestMixin, SimpleTestCase): """ CSRF tests with CSRF_USE_SESSIONS=True. """ def _get_POST_bare_secret_csrf_cookie_request(self): req = self._get_POST_no_csrf_cookie_request() req.session[CSRF_SESSION_KEY] = self._csrf_id_cookie[:32] return req def _get_GET_csrf_cookie_request(self): req = TestingHttpRequest() req.session[CSRF_SESSION_KEY] = self._csrf_id_cookie return req def test_no_session_on_request(self): msg = ( 'CSRF_USE_SESSIONS is enabled, but request.session is not set. ' 'SessionMiddleware must appear before CsrfViewMiddleware in MIDDLEWARE.' ) with self.assertRaisesMessage(ImproperlyConfigured, msg): mw = CsrfViewMiddleware(lambda req: HttpResponse()) mw.process_request(HttpRequest()) def test_process_response_get_token_used(self): """The ensure_csrf_cookie() decorator works without middleware.""" req = self._get_GET_no_csrf_cookie_request() ensure_csrf_cookie_view(req) self.assertTrue(req.session.get(CSRF_SESSION_KEY, False)) def test_session_modify(self): """The session isn't saved if the CSRF cookie is unchanged.""" req = self._get_GET_no_csrf_cookie_request() mw = CsrfViewMiddleware(ensure_csrf_cookie_view) mw.process_view(req, ensure_csrf_cookie_view, (), {}) mw(req) self.assertIsNotNone(req.session.get(CSRF_SESSION_KEY)) req.session.modified = False mw.process_view(req, ensure_csrf_cookie_view, (), {}) mw(req) self.assertFalse(req.session.modified) def test_ensures_csrf_cookie_with_middleware(self): """ The ensure_csrf_cookie() decorator works with the CsrfViewMiddleware enabled. """ req = self._get_GET_no_csrf_cookie_request() mw = CsrfViewMiddleware(ensure_csrf_cookie_view) mw.process_view(req, ensure_csrf_cookie_view, (), {}) mw(req) self.assertTrue(req.session.get(CSRF_SESSION_KEY, False)) def test_token_node_with_new_csrf_cookie(self): """ CsrfTokenNode works when a CSRF cookie is created by the middleware (when one was not already present). """ req = self._get_GET_no_csrf_cookie_request() mw = CsrfViewMiddleware(token_view) mw.process_view(req, token_view, (), {}) resp = mw(req) csrf_cookie = req.session[CSRF_SESSION_KEY] self._check_token_present(resp, csrf_id=csrf_cookie) @override_settings( ALLOWED_HOSTS=['www.example.com'], SESSION_COOKIE_DOMAIN='.example.com', USE_X_FORWARDED_PORT=True, DEBUG=True, ) def test_https_good_referer_behind_proxy(self): """ A POST HTTPS request is accepted when USE_X_FORWARDED_PORT=True. """ self._test_https_good_referer_behind_proxy() @override_settings(ALLOWED_HOSTS=['www.example.com'], SESSION_COOKIE_DOMAIN='.example.com') def test_https_good_referer_matches_cookie_domain(self): """ A POST HTTPS request with a good referer should be accepted from a subdomain that's allowed by SESSION_COOKIE_DOMAIN. """ self._test_https_good_referer_matches_cookie_domain() @override_settings(ALLOWED_HOSTS=['www.example.com'], SESSION_COOKIE_DOMAIN='.example.com') def test_https_good_referer_matches_cookie_domain_with_different_port(self): """ A POST HTTPS request with a good referer should be accepted from a subdomain that's allowed by SESSION_COOKIE_DOMAIN and a non-443 port. """ self._test_https_good_referer_matches_cookie_domain_with_different_port() @override_settings(SESSION_COOKIE_DOMAIN='.example.com', DEBUG=True) def test_https_reject_insecure_referer(self): """ A POST HTTPS request from an insecure referer should be rejected. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_REFERER'] = 'http://example.com/' req.META['SERVER_PORT'] = '443' mw = CsrfViewMiddleware(post_form_view) response = mw.process_view(req, post_form_view, (), {}) self.assertContains( response, 'Referer checking failed - Referer is insecure while host is secure.', status_code=403, ) @override_settings(ROOT_URLCONF='csrf_tests.csrf_token_error_handler_urls', DEBUG=False) class CsrfInErrorHandlingViewsTests(SimpleTestCase): def test_csrf_token_on_404_stays_constant(self): response = self.client.get('/does not exist/') # The error handler returns status code 599. self.assertEqual(response.status_code, 599) token1 = response.content response = self.client.get('/does not exist/') self.assertEqual(response.status_code, 599) token2 = response.content self.assertTrue(equivalent_tokens(token1.decode('ascii'), token2.decode('ascii')))
98d66e96c15882c0cd86c58eebc93ffa25c0c3fe94ca355ecf835402f1b13bec
import os from argparse import ArgumentParser from contextlib import contextmanager from unittest import TestSuite, TextTestRunner, defaultTestLoader, mock from django.db import connections from django.test import SimpleTestCase from django.test.runner import DiscoverRunner from django.test.utils import ( NullTimeKeeper, TimeKeeper, captured_stderr, captured_stdout, ) @contextmanager def change_cwd(directory): current_dir = os.path.abspath(os.path.dirname(__file__)) new_dir = os.path.join(current_dir, directory) old_cwd = os.getcwd() os.chdir(new_dir) try: yield finally: os.chdir(old_cwd) class DiscoverRunnerTests(SimpleTestCase): @staticmethod def get_test_methods_names(suite): return [ t.__class__.__name__ + '.' + t._testMethodName for t in suite._tests ] def test_init_debug_mode(self): runner = DiscoverRunner() self.assertFalse(runner.debug_mode) def test_add_arguments_debug_mode(self): parser = ArgumentParser() DiscoverRunner.add_arguments(parser) ns = parser.parse_args([]) self.assertFalse(ns.debug_mode) ns = parser.parse_args(["--debug-mode"]) self.assertTrue(ns.debug_mode) def test_dotted_test_module(self): count = DiscoverRunner().build_suite( ['test_runner_apps.sample.tests_sample'], ).countTestCases() self.assertEqual(count, 4) def test_dotted_test_class_vanilla_unittest(self): count = DiscoverRunner().build_suite( ['test_runner_apps.sample.tests_sample.TestVanillaUnittest'], ).countTestCases() self.assertEqual(count, 1) def test_dotted_test_class_django_testcase(self): count = DiscoverRunner().build_suite( ['test_runner_apps.sample.tests_sample.TestDjangoTestCase'], ).countTestCases() self.assertEqual(count, 1) def test_dotted_test_method_django_testcase(self): count = DiscoverRunner().build_suite( ['test_runner_apps.sample.tests_sample.TestDjangoTestCase.test_sample'], ).countTestCases() self.assertEqual(count, 1) def test_pattern(self): count = DiscoverRunner( pattern="*_tests.py", ).build_suite(['test_runner_apps.sample']).countTestCases() self.assertEqual(count, 1) def test_name_patterns(self): all_test_1 = [ 'DjangoCase1.test_1', 'DjangoCase2.test_1', 'SimpleCase1.test_1', 'SimpleCase2.test_1', 'UnittestCase1.test_1', 'UnittestCase2.test_1', ] all_test_2 = [ 'DjangoCase1.test_2', 'DjangoCase2.test_2', 'SimpleCase1.test_2', 'SimpleCase2.test_2', 'UnittestCase1.test_2', 'UnittestCase2.test_2', ] all_tests = sorted([*all_test_1, *all_test_2, 'UnittestCase2.test_3_test']) for pattern, expected in [ [['test_1'], all_test_1], [['UnittestCase1'], ['UnittestCase1.test_1', 'UnittestCase1.test_2']], [['*test'], ['UnittestCase2.test_3_test']], [['test*'], all_tests], [['test'], all_tests], [['test_1', 'test_2'], sorted([*all_test_1, *all_test_2])], [['test*1'], all_test_1], ]: with self.subTest(pattern): suite = DiscoverRunner( test_name_patterns=pattern ).build_suite(['test_runner_apps.simple']) self.assertEqual(expected, self.get_test_methods_names(suite)) def test_file_path(self): with change_cwd(".."): count = DiscoverRunner().build_suite( ['test_runner_apps/sample/'], ).countTestCases() self.assertEqual(count, 5) def test_empty_label(self): """ If the test label is empty, discovery should happen on the current working directory. """ with change_cwd("."): suite = DiscoverRunner().build_suite([]) self.assertEqual( suite._tests[0].id().split(".")[0], os.path.basename(os.getcwd()), ) def test_empty_test_case(self): count = DiscoverRunner().build_suite( ['test_runner_apps.sample.tests_sample.EmptyTestCase'], ).countTestCases() self.assertEqual(count, 0) def test_discovery_on_package(self): count = DiscoverRunner().build_suite( ['test_runner_apps.sample.tests'], ).countTestCases() self.assertEqual(count, 1) def test_ignore_adjacent(self): """ When given a dotted path to a module, unittest discovery searches not just the module, but also the directory containing the module. This results in tests from adjacent modules being run when they should not. The discover runner avoids this behavior. """ count = DiscoverRunner().build_suite( ['test_runner_apps.sample.empty'], ).countTestCases() self.assertEqual(count, 0) def test_testcase_ordering(self): with change_cwd(".."): suite = DiscoverRunner().build_suite(['test_runner_apps/sample/']) self.assertEqual( suite._tests[0].__class__.__name__, 'TestDjangoTestCase', msg="TestDjangoTestCase should be the first test case") self.assertEqual( suite._tests[1].__class__.__name__, 'TestZimpleTestCase', msg="TestZimpleTestCase should be the second test case") # All others can follow in unspecified order, including doctests self.assertIn('DocTestCase', [t.__class__.__name__ for t in suite._tests[2:]]) def test_duplicates_ignored(self): """ Tests shouldn't be discovered twice when discovering on overlapping paths. """ base_app = 'forms_tests' sub_app = 'forms_tests.field_tests' with self.modify_settings(INSTALLED_APPS={'append': sub_app}): single = DiscoverRunner().build_suite([base_app]).countTestCases() dups = DiscoverRunner().build_suite([base_app, sub_app]).countTestCases() self.assertEqual(single, dups) def test_reverse(self): """ Reverse should reorder tests while maintaining the grouping specified by ``DiscoverRunner.reorder_by``. """ runner = DiscoverRunner(reverse=True) suite = runner.build_suite( test_labels=('test_runner_apps.sample', 'test_runner_apps.simple')) self.assertIn('test_runner_apps.simple', next(iter(suite)).id(), msg="Test labels should be reversed.") suite = runner.build_suite(test_labels=('test_runner_apps.simple',)) suite = tuple(suite) self.assertIn('DjangoCase', suite[0].id(), msg="Test groups should not be reversed.") self.assertIn('SimpleCase', suite[4].id(), msg="Test groups order should be preserved.") self.assertIn('DjangoCase2', suite[0].id(), msg="Django test cases should be reversed.") self.assertIn('SimpleCase2', suite[4].id(), msg="Simple test cases should be reversed.") self.assertIn('UnittestCase2', suite[8].id(), msg="Unittest test cases should be reversed.") self.assertIn('test_2', suite[0].id(), msg="Methods of Django cases should be reversed.") self.assertIn('test_2', suite[4].id(), msg="Methods of simple cases should be reversed.") self.assertIn('test_2', suite[9].id(), msg="Methods of unittest cases should be reversed.") def test_overridable_get_test_runner_kwargs(self): self.assertIsInstance(DiscoverRunner().get_test_runner_kwargs(), dict) def test_overridable_test_suite(self): self.assertEqual(DiscoverRunner().test_suite, TestSuite) def test_overridable_test_runner(self): self.assertEqual(DiscoverRunner().test_runner, TextTestRunner) def test_overridable_test_loader(self): self.assertEqual(DiscoverRunner().test_loader, defaultTestLoader) def test_tags(self): runner = DiscoverRunner(tags=['core']) self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 1) runner = DiscoverRunner(tags=['fast']) self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 2) runner = DiscoverRunner(tags=['slow']) self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 2) def test_exclude_tags(self): runner = DiscoverRunner(tags=['fast'], exclude_tags=['core']) self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 1) runner = DiscoverRunner(tags=['fast'], exclude_tags=['slow']) self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 0) runner = DiscoverRunner(exclude_tags=['slow']) self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 0) def test_tag_inheritance(self): def count_tests(**kwargs): suite = DiscoverRunner(**kwargs).build_suite(['test_runner_apps.tagged.tests_inheritance']) return suite.countTestCases() self.assertEqual(count_tests(tags=['foo']), 4) self.assertEqual(count_tests(tags=['bar']), 2) self.assertEqual(count_tests(tags=['baz']), 2) self.assertEqual(count_tests(tags=['foo'], exclude_tags=['bar']), 2) self.assertEqual(count_tests(tags=['foo'], exclude_tags=['bar', 'baz']), 1) self.assertEqual(count_tests(exclude_tags=['foo']), 0) def test_included_tags_displayed(self): runner = DiscoverRunner(tags=['foo', 'bar'], verbosity=2) with captured_stdout() as stdout: runner.build_suite(['test_runner_apps.tagged.tests']) self.assertIn('Including test tag(s): bar, foo.\n', stdout.getvalue()) def test_excluded_tags_displayed(self): runner = DiscoverRunner(exclude_tags=['foo', 'bar'], verbosity=3) with captured_stdout() as stdout: runner.build_suite(['test_runner_apps.tagged.tests']) self.assertIn('Excluding test tag(s): bar, foo.\n', stdout.getvalue()) def test_pdb_with_parallel(self): msg = ( 'You cannot use --pdb with parallel tests; pass --parallel=1 to ' 'use it.' ) with self.assertRaisesMessage(ValueError, msg): DiscoverRunner(pdb=True, parallel=2) def test_buffer_mode_test_pass(self): runner = DiscoverRunner(buffer=True, verbose=0) with captured_stdout() as stdout, captured_stderr() as stderr: suite = runner.build_suite([ 'test_runner_apps.buffer.tests_buffer.WriteToStdoutStderrTestCase.test_pass', ]) runner.run_suite(suite) self.assertNotIn('Write to stderr.', stderr.getvalue()) self.assertNotIn('Write to stdout.', stdout.getvalue()) def test_buffer_mode_test_fail(self): runner = DiscoverRunner(buffer=True, verbose=0) with captured_stdout() as stdout, captured_stderr() as stderr: suite = runner.build_suite([ 'test_runner_apps.buffer.tests_buffer.WriteToStdoutStderrTestCase.test_fail', ]) runner.run_suite(suite) self.assertIn('Write to stderr.', stderr.getvalue()) self.assertIn('Write to stdout.', stdout.getvalue()) @mock.patch('faulthandler.enable') def test_faulthandler_enabled(self, mocked_enable): with mock.patch('faulthandler.is_enabled', return_value=False): DiscoverRunner(enable_faulthandler=True) mocked_enable.assert_called() @mock.patch('faulthandler.enable') def test_faulthandler_already_enabled(self, mocked_enable): with mock.patch('faulthandler.is_enabled', return_value=True): DiscoverRunner(enable_faulthandler=True) mocked_enable.assert_not_called() @mock.patch('faulthandler.enable') def test_faulthandler_enabled_fileno(self, mocked_enable): # sys.stderr that is not an actual file. with mock.patch('faulthandler.is_enabled', return_value=False), captured_stderr(): DiscoverRunner(enable_faulthandler=True) mocked_enable.assert_called() @mock.patch('faulthandler.enable') def test_faulthandler_disabled(self, mocked_enable): with mock.patch('faulthandler.is_enabled', return_value=False): DiscoverRunner(enable_faulthandler=False) mocked_enable.assert_not_called() def test_timings_not_captured(self): runner = DiscoverRunner(timing=False) with captured_stderr() as stderr: with runner.time_keeper.timed('test'): pass runner.time_keeper.print_results() self.assertTrue(isinstance(runner.time_keeper, NullTimeKeeper)) self.assertNotIn('test', stderr.getvalue()) def test_timings_captured(self): runner = DiscoverRunner(timing=True) with captured_stderr() as stderr: with runner.time_keeper.timed('test'): pass runner.time_keeper.print_results() self.assertTrue(isinstance(runner.time_keeper, TimeKeeper)) self.assertIn('test', stderr.getvalue()) class DiscoverRunnerGetDatabasesTests(SimpleTestCase): runner = DiscoverRunner(verbosity=2) skip_msg = 'Skipping setup of unused database(s): ' def get_databases(self, test_labels): suite = self.runner.build_suite(test_labels) with captured_stdout() as stdout: databases = self.runner.get_databases(suite) return databases, stdout.getvalue() def assertSkippedDatabases(self, test_labels, expected_databases): databases, output = self.get_databases(test_labels) self.assertEqual(databases, expected_databases) skipped_databases = set(connections) - set(expected_databases) if skipped_databases: self.assertIn(self.skip_msg + ', '.join(sorted(skipped_databases)), output) else: self.assertNotIn(self.skip_msg, output) def test_mixed(self): databases, output = self.get_databases(['test_runner_apps.databases.tests']) self.assertEqual(databases, {'default': True, 'other': False}) self.assertNotIn(self.skip_msg, output) def test_all(self): databases, output = self.get_databases(['test_runner_apps.databases.tests.AllDatabasesTests']) self.assertEqual(databases, {alias: False for alias in connections}) self.assertNotIn(self.skip_msg, output) def test_default_and_other(self): self.assertSkippedDatabases([ 'test_runner_apps.databases.tests.DefaultDatabaseTests', 'test_runner_apps.databases.tests.OtherDatabaseTests', ], {'default': False, 'other': False}) def test_default_only(self): self.assertSkippedDatabases([ 'test_runner_apps.databases.tests.DefaultDatabaseTests', ], {'default': False}) def test_other_only(self): self.assertSkippedDatabases([ 'test_runner_apps.databases.tests.OtherDatabaseTests' ], {'other': False}) def test_no_databases_required(self): self.assertSkippedDatabases([ 'test_runner_apps.databases.tests.NoDatabaseTests' ], {}) def test_serialize(self): databases, _ = self.get_databases([ 'test_runner_apps.databases.tests.DefaultDatabaseSerializedTests' ]) self.assertEqual(databases, {'default': True})
5689fc7435fe3c833f048e496ea26e9db8024e67f7c293540b3b48d9e0776fc0
import pickle import unittest from django.test import SimpleTestCase from django.test.runner import RemoteTestResult try: import tblib except ImportError: tblib = None class ExceptionThatFailsUnpickling(Exception): """ After pickling, this class fails unpickling with an error about incorrect arguments passed to __init__(). """ def __init__(self, arg): super().__init__() class ParallelTestRunnerTest(SimpleTestCase): """ End-to-end tests of the parallel test runner. These tests are only meaningful when running tests in parallel using the --parallel option, though it doesn't hurt to run them not in parallel. """ def test_subtest(self): """ Passing subtests work. """ for i in range(2): with self.subTest(index=i): self.assertEqual(i, i) class SampleFailingSubtest(SimpleTestCase): # This method name doesn't begin with "test" to prevent test discovery # from seeing it. def dummy_test(self): """ A dummy test for testing subTest failures. """ for i in range(3): with self.subTest(index=i): self.assertEqual(i, 1) class RemoteTestResultTest(SimpleTestCase): def test_was_successful_no_events(self): result = RemoteTestResult() self.assertIs(result.wasSuccessful(), True) def test_was_successful_one_success(self): result = RemoteTestResult() result.addSuccess(None) self.assertIs(result.wasSuccessful(), True) def test_was_successful_one_expected_failure(self): result = RemoteTestResult() result.addExpectedFailure(None, ValueError('woops')) self.assertIs(result.wasSuccessful(), True) def test_was_successful_one_skip(self): result = RemoteTestResult() result.addSkip(None, 'Skipped') self.assertIs(result.wasSuccessful(), True) def test_was_successful_one_error(self): result = RemoteTestResult() result.addError(None, ValueError('woops')) self.assertIs(result.wasSuccessful(), False) def test_was_successful_one_failure(self): result = RemoteTestResult() result.addFailure(None, ValueError('woops')) self.assertIs(result.wasSuccessful(), False) def test_picklable(self): result = RemoteTestResult() loaded_result = pickle.loads(pickle.dumps(result)) self.assertEqual(result.events, loaded_result.events) def test_pickle_errors_detection(self): picklable_error = RuntimeError('This is fine') not_unpicklable_error = ExceptionThatFailsUnpickling('arg') result = RemoteTestResult() result._confirm_picklable(picklable_error) msg = '__init__() missing 1 required positional argument' with self.assertRaisesMessage(TypeError, msg): result._confirm_picklable(not_unpicklable_error) @unittest.skipUnless(tblib is not None, 'requires tblib to be installed') def test_add_failing_subtests(self): """ Failing subtests are added correctly using addSubTest(). """ # Manually run a test with failing subtests to prevent the failures # from affecting the actual test run. result = RemoteTestResult() subtest_test = SampleFailingSubtest(methodName='dummy_test') subtest_test.run(result=result) events = result.events self.assertEqual(len(events), 4) self.assertIs(result.wasSuccessful(), False) event = events[1] self.assertEqual(event[0], 'addSubTest') self.assertEqual(str(event[2]), 'dummy_test (test_runner.test_parallel.SampleFailingSubtest) (index=0)') self.assertEqual(repr(event[3][1]), "AssertionError('0 != 1')") event = events[2] self.assertEqual(repr(event[3][1]), "AssertionError('2 != 1')")
3ed7add28a9057f004b7a0034549d9525c4ece9d2d84fdaf62466bb320b1ea24
import datetime import pickle import unittest import uuid from collections import namedtuple from copy import deepcopy from decimal import Decimal from unittest import mock from django.core.exceptions import FieldError from django.db import DatabaseError, NotSupportedError, connection from django.db.models import ( AutoField, Avg, BinaryField, BooleanField, Case, CharField, Count, DateField, DateTimeField, DecimalField, DurationField, Exists, Expression, ExpressionList, ExpressionWrapper, F, FloatField, Func, IntegerField, Max, Min, Model, OrderBy, OuterRef, Q, StdDev, Subquery, Sum, TimeField, UUIDField, Value, Variance, When, ) from django.db.models.expressions import ( Col, Combinable, CombinedExpression, RawSQL, Ref, ) from django.db.models.functions import ( Coalesce, Concat, Left, Length, Lower, Substr, Upper, ) from django.db.models.sql import constants from django.db.models.sql.datastructures import Join from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature from django.test.utils import ( Approximate, CaptureQueriesContext, isolate_apps, register_lookup, ) from django.utils.functional import SimpleLazyObject from .models import ( UUID, UUIDPK, Company, Employee, Experiment, Manager, Number, RemoteEmployee, Result, SimulationRun, Time, ) class BasicExpressionsTests(TestCase): @classmethod def setUpTestData(cls): cls.example_inc = Company.objects.create( name="Example Inc.", num_employees=2300, num_chairs=5, ceo=Employee.objects.create(firstname="Joe", lastname="Smith", salary=10) ) cls.foobar_ltd = Company.objects.create( name="Foobar Ltd.", num_employees=3, num_chairs=4, based_in_eu=True, ceo=Employee.objects.create(firstname="Frank", lastname="Meyer", salary=20) ) cls.max = Employee.objects.create(firstname='Max', lastname='Mustermann', salary=30) cls.gmbh = Company.objects.create(name='Test GmbH', num_employees=32, num_chairs=1, ceo=cls.max) def setUp(self): self.company_query = Company.objects.values( "name", "num_employees", "num_chairs" ).order_by( "name", "num_employees", "num_chairs" ) def test_annotate_values_aggregate(self): companies = Company.objects.annotate( salaries=F('ceo__salary'), ).values('num_employees', 'salaries').aggregate( result=Sum( F('salaries') + F('num_employees'), output_field=IntegerField() ), ) self.assertEqual(companies['result'], 2395) def test_annotate_values_filter(self): companies = Company.objects.annotate( foo=RawSQL('%s', ['value']), ).filter(foo='value').order_by('name') self.assertSequenceEqual( companies, [self.example_inc, self.foobar_ltd, self.gmbh], ) def test_annotate_values_count(self): companies = Company.objects.annotate(foo=RawSQL('%s', ['value'])) self.assertEqual(companies.count(), 3) @skipUnlessDBFeature('supports_boolean_expr_in_select_clause') def test_filtering_on_annotate_that_uses_q(self): self.assertEqual( Company.objects.annotate( num_employees_check=ExpressionWrapper(Q(num_employees__gt=3), output_field=BooleanField()) ).filter(num_employees_check=True).count(), 2, ) def test_filtering_on_q_that_is_boolean(self): self.assertEqual( Company.objects.filter( ExpressionWrapper(Q(num_employees__gt=3), output_field=BooleanField()) ).count(), 2, ) def test_filtering_on_rawsql_that_is_boolean(self): self.assertEqual( Company.objects.filter( RawSQL('num_employees > %s', (3,), output_field=BooleanField()), ).count(), 2, ) def test_filter_inter_attribute(self): # We can filter on attribute relationships on same model obj, e.g. # find companies where the number of employees is greater # than the number of chairs. self.assertSequenceEqual( self.company_query.filter(num_employees__gt=F("num_chairs")), [ { "num_chairs": 5, "name": "Example Inc.", "num_employees": 2300, }, { "num_chairs": 1, "name": "Test GmbH", "num_employees": 32 }, ], ) def test_update(self): # We can set one field to have the value of another field # Make sure we have enough chairs self.company_query.update(num_chairs=F("num_employees")) self.assertSequenceEqual( self.company_query, [ { "num_chairs": 2300, "name": "Example Inc.", "num_employees": 2300 }, { "num_chairs": 3, "name": "Foobar Ltd.", "num_employees": 3 }, { "num_chairs": 32, "name": "Test GmbH", "num_employees": 32 } ], ) def test_arithmetic(self): # We can perform arithmetic operations in expressions # Make sure we have 2 spare chairs self.company_query.update(num_chairs=F("num_employees") + 2) self.assertSequenceEqual( self.company_query, [ { 'num_chairs': 2302, 'name': 'Example Inc.', 'num_employees': 2300 }, { 'num_chairs': 5, 'name': 'Foobar Ltd.', 'num_employees': 3 }, { 'num_chairs': 34, 'name': 'Test GmbH', 'num_employees': 32 } ], ) def test_order_of_operations(self): # Law of order of operations is followed self.company_query.update(num_chairs=F('num_employees') + 2 * F('num_employees')) self.assertSequenceEqual( self.company_query, [ { 'num_chairs': 6900, 'name': 'Example Inc.', 'num_employees': 2300 }, { 'num_chairs': 9, 'name': 'Foobar Ltd.', 'num_employees': 3 }, { 'num_chairs': 96, 'name': 'Test GmbH', 'num_employees': 32 } ], ) def test_parenthesis_priority(self): # Law of order of operations can be overridden by parentheses self.company_query.update(num_chairs=(F('num_employees') + 2) * F('num_employees')) self.assertSequenceEqual( self.company_query, [ { 'num_chairs': 5294600, 'name': 'Example Inc.', 'num_employees': 2300 }, { 'num_chairs': 15, 'name': 'Foobar Ltd.', 'num_employees': 3 }, { 'num_chairs': 1088, 'name': 'Test GmbH', 'num_employees': 32 } ], ) def test_update_with_fk(self): # ForeignKey can become updated with the value of another ForeignKey. self.assertEqual(Company.objects.update(point_of_contact=F('ceo')), 3) self.assertQuerysetEqual( Company.objects.all(), ['Joe Smith', 'Frank Meyer', 'Max Mustermann'], lambda c: str(c.point_of_contact), ordered=False ) def test_update_with_none(self): Number.objects.create(integer=1, float=1.0) Number.objects.create(integer=2) Number.objects.filter(float__isnull=False).update(float=Value(None)) self.assertQuerysetEqual( Number.objects.all(), [None, None], lambda n: n.float, ordered=False ) def test_filter_with_join(self): # F Expressions can also span joins Company.objects.update(point_of_contact=F('ceo')) c = Company.objects.first() c.point_of_contact = Employee.objects.create(firstname="Guido", lastname="van Rossum") c.save() self.assertQuerysetEqual( Company.objects.filter(ceo__firstname=F('point_of_contact__firstname')), ['Foobar Ltd.', 'Test GmbH'], lambda c: c.name, ordered=False ) Company.objects.exclude( ceo__firstname=F("point_of_contact__firstname") ).update(name="foo") self.assertEqual( Company.objects.exclude( ceo__firstname=F('point_of_contact__firstname') ).get().name, "foo", ) msg = "Joined field references are not permitted in this query" with self.assertRaisesMessage(FieldError, msg): Company.objects.exclude( ceo__firstname=F('point_of_contact__firstname') ).update(name=F('point_of_contact__lastname')) def test_object_update(self): # F expressions can be used to update attributes on single objects self.gmbh.num_employees = F('num_employees') + 4 self.gmbh.save() self.gmbh.refresh_from_db() self.assertEqual(self.gmbh.num_employees, 36) def test_new_object_save(self): # We should be able to use Funcs when inserting new data test_co = Company(name=Lower(Value('UPPER')), num_employees=32, num_chairs=1, ceo=self.max) test_co.save() test_co.refresh_from_db() self.assertEqual(test_co.name, "upper") def test_new_object_create(self): test_co = Company.objects.create(name=Lower(Value('UPPER')), num_employees=32, num_chairs=1, ceo=self.max) test_co.refresh_from_db() self.assertEqual(test_co.name, "upper") def test_object_create_with_aggregate(self): # Aggregates are not allowed when inserting new data msg = 'Aggregate functions are not allowed in this query (num_employees=Max(Value(1))).' with self.assertRaisesMessage(FieldError, msg): Company.objects.create( name='Company', num_employees=Max(Value(1)), num_chairs=1, ceo=Employee.objects.create(firstname="Just", lastname="Doit", salary=30), ) def test_object_update_fk(self): # F expressions cannot be used to update attributes which are foreign # keys, or attributes which involve joins. test_gmbh = Company.objects.get(pk=self.gmbh.pk) msg = 'F(ceo)": "Company.point_of_contact" must be a "Employee" instance.' with self.assertRaisesMessage(ValueError, msg): test_gmbh.point_of_contact = F('ceo') test_gmbh.point_of_contact = self.gmbh.ceo test_gmbh.save() test_gmbh.name = F('ceo__lastname') msg = 'Joined field references are not permitted in this query' with self.assertRaisesMessage(FieldError, msg): test_gmbh.save() def test_update_inherited_field_value(self): msg = 'Joined field references are not permitted in this query' with self.assertRaisesMessage(FieldError, msg): RemoteEmployee.objects.update(adjusted_salary=F('salary') * 5) def test_object_update_unsaved_objects(self): # F expressions cannot be used to update attributes on objects which do # not yet exist in the database acme = Company(name='The Acme Widget Co.', num_employees=12, num_chairs=5, ceo=self.max) acme.num_employees = F("num_employees") + 16 msg = ( 'Failed to insert expression "Col(expressions_company, ' 'expressions.Company.num_employees) + Value(16)" on ' 'expressions.Company.num_employees. F() expressions can only be ' 'used to update, not to insert.' ) with self.assertRaisesMessage(ValueError, msg): acme.save() acme.num_employees = 12 acme.name = Lower(F('name')) msg = ( 'Failed to insert expression "Lower(Col(expressions_company, ' 'expressions.Company.name))" on expressions.Company.name. F() ' 'expressions can only be used to update, not to insert.' ) with self.assertRaisesMessage(ValueError, msg): acme.save() def test_ticket_11722_iexact_lookup(self): Employee.objects.create(firstname="John", lastname="Doe") test = Employee.objects.create(firstname="Test", lastname="test") queryset = Employee.objects.filter(firstname__iexact=F('lastname')) self.assertSequenceEqual(queryset, [test]) def test_ticket_16731_startswith_lookup(self): Employee.objects.create(firstname="John", lastname="Doe") e2 = Employee.objects.create(firstname="Jack", lastname="Jackson") e3 = Employee.objects.create(firstname="Jack", lastname="jackson") self.assertSequenceEqual( Employee.objects.filter(lastname__startswith=F('firstname')), [e2, e3] if connection.features.has_case_insensitive_like else [e2] ) qs = Employee.objects.filter(lastname__istartswith=F('firstname')).order_by('pk') self.assertSequenceEqual(qs, [e2, e3]) def test_ticket_18375_join_reuse(self): # Reverse multijoin F() references and the lookup target the same join. # Pre #18375 the F() join was generated first and the lookup couldn't # reuse that join. qs = Employee.objects.filter(company_ceo_set__num_chairs=F('company_ceo_set__num_employees')) self.assertEqual(str(qs.query).count('JOIN'), 1) def test_ticket_18375_kwarg_ordering(self): # The next query was dict-randomization dependent - if the "gte=1" # was seen first, then the F() will reuse the join generated by the # gte lookup, if F() was seen first, then it generated a join the # other lookups could not reuse. qs = Employee.objects.filter( company_ceo_set__num_chairs=F('company_ceo_set__num_employees'), company_ceo_set__num_chairs__gte=1, ) self.assertEqual(str(qs.query).count('JOIN'), 1) def test_ticket_18375_kwarg_ordering_2(self): # Another similar case for F() than above. Now we have the same join # in two filter kwargs, one in the lhs lookup, one in F. Here pre # #18375 the amount of joins generated was random if dict # randomization was enabled, that is the generated query dependent # on which clause was seen first. qs = Employee.objects.filter( company_ceo_set__num_employees=F('pk'), pk=F('company_ceo_set__num_employees') ) self.assertEqual(str(qs.query).count('JOIN'), 1) def test_ticket_18375_chained_filters(self): # F() expressions do not reuse joins from previous filter. qs = Employee.objects.filter( company_ceo_set__num_employees=F('pk') ).filter( company_ceo_set__num_employees=F('company_ceo_set__num_employees') ) self.assertEqual(str(qs.query).count('JOIN'), 2) def test_order_by_exists(self): mary = Employee.objects.create(firstname='Mary', lastname='Mustermann', salary=20) mustermanns_by_seniority = Employee.objects.filter(lastname='Mustermann').order_by( # Order by whether the employee is the CEO of a company Exists(Company.objects.filter(ceo=OuterRef('pk'))).desc() ) self.assertSequenceEqual(mustermanns_by_seniority, [self.max, mary]) def test_order_by_multiline_sql(self): raw_order_by = ( RawSQL(''' CASE WHEN num_employees > 1000 THEN num_chairs ELSE 0 END ''', []).desc(), RawSQL(''' CASE WHEN num_chairs > 1 THEN 1 ELSE 0 END ''', []).asc() ) for qs in ( Company.objects.all(), Company.objects.distinct(), ): with self.subTest(qs=qs): self.assertSequenceEqual( qs.order_by(*raw_order_by), [self.example_inc, self.gmbh, self.foobar_ltd], ) def test_outerref(self): inner = Company.objects.filter(point_of_contact=OuterRef('pk')) msg = ( 'This queryset contains a reference to an outer query and may only ' 'be used in a subquery.' ) with self.assertRaisesMessage(ValueError, msg): inner.exists() outer = Employee.objects.annotate(is_point_of_contact=Exists(inner)) self.assertIs(outer.exists(), True) def test_exist_single_field_output_field(self): queryset = Company.objects.values('pk') self.assertIsInstance(Exists(queryset).output_field, BooleanField) def test_subquery(self): Company.objects.filter(name='Example Inc.').update( point_of_contact=Employee.objects.get(firstname='Joe', lastname='Smith'), ceo=self.max, ) Employee.objects.create(firstname='Bob', lastname='Brown', salary=40) qs = Employee.objects.annotate( is_point_of_contact=Exists(Company.objects.filter(point_of_contact=OuterRef('pk'))), is_not_point_of_contact=~Exists(Company.objects.filter(point_of_contact=OuterRef('pk'))), is_ceo_of_small_company=Exists(Company.objects.filter(num_employees__lt=200, ceo=OuterRef('pk'))), is_ceo_small_2=~~Exists(Company.objects.filter(num_employees__lt=200, ceo=OuterRef('pk'))), largest_company=Subquery(Company.objects.order_by('-num_employees').filter( Q(ceo=OuterRef('pk')) | Q(point_of_contact=OuterRef('pk')) ).values('name')[:1], output_field=CharField()) ).values( 'firstname', 'is_point_of_contact', 'is_not_point_of_contact', 'is_ceo_of_small_company', 'is_ceo_small_2', 'largest_company', ).order_by('firstname') results = list(qs) # Could use Coalesce(subq, Value('')) instead except for the bug in # cx_Oracle mentioned in #23843. bob = results[0] if bob['largest_company'] == '' and connection.features.interprets_empty_strings_as_nulls: bob['largest_company'] = None self.assertEqual(results, [ { 'firstname': 'Bob', 'is_point_of_contact': False, 'is_not_point_of_contact': True, 'is_ceo_of_small_company': False, 'is_ceo_small_2': False, 'largest_company': None, }, { 'firstname': 'Frank', 'is_point_of_contact': False, 'is_not_point_of_contact': True, 'is_ceo_of_small_company': True, 'is_ceo_small_2': True, 'largest_company': 'Foobar Ltd.', }, { 'firstname': 'Joe', 'is_point_of_contact': True, 'is_not_point_of_contact': False, 'is_ceo_of_small_company': False, 'is_ceo_small_2': False, 'largest_company': 'Example Inc.', }, { 'firstname': 'Max', 'is_point_of_contact': False, 'is_not_point_of_contact': True, 'is_ceo_of_small_company': True, 'is_ceo_small_2': True, 'largest_company': 'Example Inc.' } ]) # A less elegant way to write the same query: this uses a LEFT OUTER # JOIN and an IS NULL, inside a WHERE NOT IN which is probably less # efficient than EXISTS. self.assertCountEqual( qs.filter(is_point_of_contact=True).values('pk'), Employee.objects.exclude(company_point_of_contact_set=None).values('pk') ) def test_subquery_eq(self): qs = Employee.objects.annotate( is_ceo=Exists(Company.objects.filter(ceo=OuterRef('pk'))), is_point_of_contact=Exists( Company.objects.filter(point_of_contact=OuterRef('pk')), ), small_company=Exists( queryset=Company.objects.filter(num_employees__lt=200), ), ).filter(is_ceo=True, is_point_of_contact=False, small_company=True) self.assertNotEqual( qs.query.annotations['is_ceo'], qs.query.annotations['is_point_of_contact'], ) self.assertNotEqual( qs.query.annotations['is_ceo'], qs.query.annotations['small_company'], ) def test_in_subquery(self): # This is a contrived test (and you really wouldn't write this query), # but it is a succinct way to test the __in=Subquery() construct. small_companies = Company.objects.filter(num_employees__lt=200).values('pk') subquery_test = Company.objects.filter(pk__in=Subquery(small_companies)) self.assertCountEqual(subquery_test, [self.foobar_ltd, self.gmbh]) subquery_test2 = Company.objects.filter(pk=Subquery(small_companies.filter(num_employees=3))) self.assertCountEqual(subquery_test2, [self.foobar_ltd]) def test_uuid_pk_subquery(self): u = UUIDPK.objects.create() UUID.objects.create(uuid_fk=u) qs = UUIDPK.objects.filter(id__in=Subquery(UUID.objects.values('uuid_fk__id'))) self.assertCountEqual(qs, [u]) def test_nested_subquery(self): inner = Company.objects.filter(point_of_contact=OuterRef('pk')) outer = Employee.objects.annotate(is_point_of_contact=Exists(inner)) contrived = Employee.objects.annotate( is_point_of_contact=Subquery( outer.filter(pk=OuterRef('pk')).values('is_point_of_contact'), output_field=BooleanField(), ), ) self.assertCountEqual(contrived.values_list(), outer.values_list()) def test_nested_subquery_join_outer_ref(self): inner = Employee.objects.filter(pk=OuterRef('ceo__pk')).values('pk') qs = Employee.objects.annotate( ceo_company=Subquery( Company.objects.filter( ceo__in=inner, ceo__pk=OuterRef('pk'), ).values('pk'), ), ) self.assertSequenceEqual( qs.values_list('ceo_company', flat=True), [self.example_inc.pk, self.foobar_ltd.pk, self.gmbh.pk], ) def test_nested_subquery_outer_ref_2(self): first = Time.objects.create(time='09:00') second = Time.objects.create(time='17:00') third = Time.objects.create(time='21:00') SimulationRun.objects.bulk_create([ SimulationRun(start=first, end=second, midpoint='12:00'), SimulationRun(start=first, end=third, midpoint='15:00'), SimulationRun(start=second, end=first, midpoint='00:00'), ]) inner = Time.objects.filter(time=OuterRef(OuterRef('time')), pk=OuterRef('start')).values('time') middle = SimulationRun.objects.annotate(other=Subquery(inner)).values('other')[:1] outer = Time.objects.annotate(other=Subquery(middle, output_field=TimeField())) # This is a contrived example. It exercises the double OuterRef form. self.assertCountEqual(outer, [first, second, third]) def test_nested_subquery_outer_ref_with_autofield(self): first = Time.objects.create(time='09:00') second = Time.objects.create(time='17:00') SimulationRun.objects.create(start=first, end=second, midpoint='12:00') inner = SimulationRun.objects.filter(start=OuterRef(OuterRef('pk'))).values('start') middle = Time.objects.annotate(other=Subquery(inner)).values('other')[:1] outer = Time.objects.annotate(other=Subquery(middle, output_field=IntegerField())) # This exercises the double OuterRef form with AutoField as pk. self.assertCountEqual(outer, [first, second]) def test_annotations_within_subquery(self): Company.objects.filter(num_employees__lt=50).update(ceo=Employee.objects.get(firstname='Frank')) inner = Company.objects.filter( ceo=OuterRef('pk') ).values('ceo').annotate(total_employees=Sum('num_employees')).values('total_employees') outer = Employee.objects.annotate(total_employees=Subquery(inner)).filter(salary__lte=Subquery(inner)) self.assertSequenceEqual( outer.order_by('-total_employees').values('salary', 'total_employees'), [{'salary': 10, 'total_employees': 2300}, {'salary': 20, 'total_employees': 35}], ) def test_subquery_references_joined_table_twice(self): inner = Company.objects.filter( num_chairs__gte=OuterRef('ceo__salary'), num_employees__gte=OuterRef('point_of_contact__salary'), ) # Another contrived example (there is no need to have a subquery here) outer = Company.objects.filter(pk__in=Subquery(inner.values('pk'))) self.assertFalse(outer.exists()) def test_subquery_filter_by_aggregate(self): Number.objects.create(integer=1000, float=1.2) Employee.objects.create(salary=1000) qs = Number.objects.annotate( min_valuable_count=Subquery( Employee.objects.filter( salary=OuterRef('integer'), ).annotate(cnt=Count('salary')).filter(cnt__gt=0).values('cnt')[:1] ), ) self.assertEqual(qs.get().float, 1.2) def test_subquery_filter_by_lazy(self): self.max.manager = Manager.objects.create(name='Manager') self.max.save() max_manager = SimpleLazyObject( lambda: Manager.objects.get(pk=self.max.manager.pk) ) qs = Company.objects.annotate( ceo_manager=Subquery( Employee.objects.filter( lastname=OuterRef('ceo__lastname'), ).values('manager'), ), ).filter(ceo_manager=max_manager) self.assertEqual(qs.get(), self.gmbh) def test_aggregate_subquery_annotation(self): with self.assertNumQueries(1) as ctx: aggregate = Company.objects.annotate( ceo_salary=Subquery( Employee.objects.filter( id=OuterRef('ceo_id'), ).values('salary') ), ).aggregate( ceo_salary_gt_20=Count('pk', filter=Q(ceo_salary__gt=20)), ) self.assertEqual(aggregate, {'ceo_salary_gt_20': 1}) # Aggregation over a subquery annotation doesn't annotate the subquery # twice in the inner query. sql = ctx.captured_queries[0]['sql'] self.assertLessEqual(sql.count('SELECT'), 3) # GROUP BY isn't required to aggregate over a query that doesn't # contain nested aggregates. self.assertNotIn('GROUP BY', sql) @skipUnlessDBFeature('supports_over_clause') def test_aggregate_rawsql_annotation(self): with self.assertNumQueries(1) as ctx: aggregate = Company.objects.annotate( salary=RawSQL('SUM(num_chairs) OVER (ORDER BY num_employees)', []), ).aggregate( count=Count('pk'), ) self.assertEqual(aggregate, {'count': 3}) sql = ctx.captured_queries[0]['sql'] self.assertNotIn('GROUP BY', sql) def test_explicit_output_field(self): class FuncA(Func): output_field = CharField() class FuncB(Func): pass expr = FuncB(FuncA()) self.assertEqual(expr.output_field, FuncA.output_field) def test_outerref_mixed_case_table_name(self): inner = Result.objects.filter(result_time__gte=OuterRef('experiment__assigned')) outer = Result.objects.filter(pk__in=Subquery(inner.values('pk'))) self.assertFalse(outer.exists()) def test_outerref_with_operator(self): inner = Company.objects.filter(num_employees=OuterRef('ceo__salary') + 2) outer = Company.objects.filter(pk__in=Subquery(inner.values('pk'))) self.assertEqual(outer.get().name, 'Test GmbH') def test_nested_outerref_with_function(self): self.gmbh.point_of_contact = Employee.objects.get(lastname='Meyer') self.gmbh.save() inner = Employee.objects.filter( lastname__startswith=Left(OuterRef(OuterRef('lastname')), 1), ) qs = Employee.objects.annotate( ceo_company=Subquery( Company.objects.filter( point_of_contact__in=inner, ceo__pk=OuterRef('pk'), ).values('name'), ), ).filter(ceo_company__isnull=False) self.assertEqual(qs.get().ceo_company, 'Test GmbH') def test_annotation_with_outerref(self): gmbh_salary = Company.objects.annotate( max_ceo_salary_raise=Subquery( Company.objects.annotate( salary_raise=OuterRef('num_employees') + F('num_employees'), ).order_by('-salary_raise').values('salary_raise')[:1], output_field=IntegerField(), ), ).get(pk=self.gmbh.pk) self.assertEqual(gmbh_salary.max_ceo_salary_raise, 2332) def test_annotation_with_nested_outerref(self): self.gmbh.point_of_contact = Employee.objects.get(lastname='Meyer') self.gmbh.save() inner = Employee.objects.annotate( outer_lastname=OuterRef(OuterRef('lastname')), ).filter(lastname__startswith=Left('outer_lastname', 1)) qs = Employee.objects.annotate( ceo_company=Subquery( Company.objects.filter( point_of_contact__in=inner, ceo__pk=OuterRef('pk'), ).values('name'), ), ).filter(ceo_company__isnull=False) self.assertEqual(qs.get().ceo_company, 'Test GmbH') def test_pickle_expression(self): expr = Value(1) expr.convert_value # populate cached property self.assertEqual(pickle.loads(pickle.dumps(expr)), expr) def test_incorrect_field_in_F_expression(self): with self.assertRaisesMessage(FieldError, "Cannot resolve keyword 'nope' into field."): list(Employee.objects.filter(firstname=F('nope'))) def test_incorrect_joined_field_in_F_expression(self): with self.assertRaisesMessage(FieldError, "Cannot resolve keyword 'nope' into field."): list(Company.objects.filter(ceo__pk=F('point_of_contact__nope'))) def test_exists_in_filter(self): inner = Company.objects.filter(ceo=OuterRef('pk')).values('pk') qs1 = Employee.objects.filter(Exists(inner)) qs2 = Employee.objects.annotate(found=Exists(inner)).filter(found=True) self.assertCountEqual(qs1, qs2) self.assertFalse(Employee.objects.exclude(Exists(inner)).exists()) self.assertCountEqual(qs2, Employee.objects.exclude(~Exists(inner))) def test_subquery_in_filter(self): inner = Company.objects.filter(ceo=OuterRef('pk')).values('based_in_eu') self.assertSequenceEqual( Employee.objects.filter(Subquery(inner)), [self.foobar_ltd.ceo], ) def test_subquery_group_by_outerref_in_filter(self): inner = Company.objects.annotate( employee=OuterRef('pk'), ).values('employee').annotate( min_num_chairs=Min('num_chairs'), ).values('ceo') self.assertIs(Employee.objects.filter(pk__in=Subquery(inner)).exists(), True) def test_case_in_filter_if_boolean_output_field(self): is_ceo = Company.objects.filter(ceo=OuterRef('pk')) is_poc = Company.objects.filter(point_of_contact=OuterRef('pk')) qs = Employee.objects.filter( Case( When(Exists(is_ceo), then=True), When(Exists(is_poc), then=True), default=False, output_field=BooleanField(), ), ) self.assertCountEqual(qs, [self.example_inc.ceo, self.foobar_ltd.ceo, self.max]) def test_boolean_expression_combined(self): is_ceo = Company.objects.filter(ceo=OuterRef('pk')) is_poc = Company.objects.filter(point_of_contact=OuterRef('pk')) self.gmbh.point_of_contact = self.max self.gmbh.save() self.assertCountEqual( Employee.objects.filter(Exists(is_ceo) | Exists(is_poc)), [self.example_inc.ceo, self.foobar_ltd.ceo, self.max], ) self.assertCountEqual( Employee.objects.filter(Exists(is_ceo) & Exists(is_poc)), [self.max], ) self.assertCountEqual( Employee.objects.filter(Exists(is_ceo) & Q(salary__gte=30)), [self.max], ) self.assertCountEqual( Employee.objects.filter(Exists(is_poc) | Q(salary__lt=15)), [self.example_inc.ceo, self.max], ) self.assertCountEqual( Employee.objects.filter(Q(salary__gte=30) & Exists(is_ceo)), [self.max], ) self.assertCountEqual( Employee.objects.filter(Q(salary__lt=15) | Exists(is_poc)), [self.example_inc.ceo, self.max], ) def test_boolean_expression_combined_with_empty_Q(self): is_poc = Company.objects.filter(point_of_contact=OuterRef('pk')) self.gmbh.point_of_contact = self.max self.gmbh.save() tests = [ Exists(is_poc) & Q(), Q() & Exists(is_poc), Exists(is_poc) | Q(), Q() | Exists(is_poc), Q(Exists(is_poc)) & Q(), Q() & Q(Exists(is_poc)), Q(Exists(is_poc)) | Q(), Q() | Q(Exists(is_poc)), ] for conditions in tests: with self.subTest(conditions): self.assertCountEqual(Employee.objects.filter(conditions), [self.max]) def test_boolean_expression_in_Q(self): is_poc = Company.objects.filter(point_of_contact=OuterRef('pk')) self.gmbh.point_of_contact = self.max self.gmbh.save() self.assertCountEqual(Employee.objects.filter(Q(Exists(is_poc))), [self.max]) class IterableLookupInnerExpressionsTests(TestCase): @classmethod def setUpTestData(cls): ceo = Employee.objects.create(firstname='Just', lastname='Doit', salary=30) # MySQL requires that the values calculated for expressions don't pass # outside of the field's range, so it's inconvenient to use the values # in the more general tests. cls.c5020 = Company.objects.create(name='5020 Ltd', num_employees=50, num_chairs=20, ceo=ceo) cls.c5040 = Company.objects.create(name='5040 Ltd', num_employees=50, num_chairs=40, ceo=ceo) cls.c5050 = Company.objects.create(name='5050 Ltd', num_employees=50, num_chairs=50, ceo=ceo) cls.c5060 = Company.objects.create(name='5060 Ltd', num_employees=50, num_chairs=60, ceo=ceo) cls.c99300 = Company.objects.create(name='99300 Ltd', num_employees=99, num_chairs=300, ceo=ceo) def test_in_lookup_allows_F_expressions_and_expressions_for_integers(self): # __in lookups can use F() expressions for integers. queryset = Company.objects.filter(num_employees__in=([F('num_chairs') - 10])) self.assertSequenceEqual(queryset, [self.c5060]) self.assertCountEqual( Company.objects.filter(num_employees__in=([F('num_chairs') - 10, F('num_chairs') + 10])), [self.c5040, self.c5060], ) self.assertCountEqual( Company.objects.filter( num_employees__in=([F('num_chairs') - 10, F('num_chairs'), F('num_chairs') + 10]) ), [self.c5040, self.c5050, self.c5060], ) def test_expressions_in_lookups_join_choice(self): midpoint = datetime.time(13, 0) t1 = Time.objects.create(time=datetime.time(12, 0)) t2 = Time.objects.create(time=datetime.time(14, 0)) s1 = SimulationRun.objects.create(start=t1, end=t2, midpoint=midpoint) SimulationRun.objects.create(start=t1, end=None, midpoint=midpoint) SimulationRun.objects.create(start=None, end=t2, midpoint=midpoint) SimulationRun.objects.create(start=None, end=None, midpoint=midpoint) queryset = SimulationRun.objects.filter(midpoint__range=[F('start__time'), F('end__time')]) self.assertSequenceEqual(queryset, [s1]) for alias in queryset.query.alias_map.values(): if isinstance(alias, Join): self.assertEqual(alias.join_type, constants.INNER) queryset = SimulationRun.objects.exclude(midpoint__range=[F('start__time'), F('end__time')]) self.assertQuerysetEqual(queryset, [], ordered=False) for alias in queryset.query.alias_map.values(): if isinstance(alias, Join): self.assertEqual(alias.join_type, constants.LOUTER) def test_range_lookup_allows_F_expressions_and_expressions_for_integers(self): # Range lookups can use F() expressions for integers. Company.objects.filter(num_employees__exact=F("num_chairs")) self.assertCountEqual( Company.objects.filter(num_employees__range=(F('num_chairs'), 100)), [self.c5020, self.c5040, self.c5050], ) self.assertCountEqual( Company.objects.filter(num_employees__range=(F('num_chairs') - 10, F('num_chairs') + 10)), [self.c5040, self.c5050, self.c5060], ) self.assertCountEqual( Company.objects.filter(num_employees__range=(F('num_chairs') - 10, 100)), [self.c5020, self.c5040, self.c5050, self.c5060], ) self.assertCountEqual( Company.objects.filter(num_employees__range=(1, 100)), [self.c5020, self.c5040, self.c5050, self.c5060, self.c99300], ) def test_range_lookup_namedtuple(self): EmployeeRange = namedtuple('EmployeeRange', ['minimum', 'maximum']) qs = Company.objects.filter( num_employees__range=EmployeeRange(minimum=51, maximum=100), ) self.assertSequenceEqual(qs, [self.c99300]) @unittest.skipUnless(connection.vendor == 'sqlite', "This defensive test only works on databases that don't validate parameter types") def test_complex_expressions_do_not_introduce_sql_injection_via_untrusted_string_inclusion(self): """ This tests that SQL injection isn't possible using compilation of expressions in iterable filters, as their compilation happens before the main query compilation. It's limited to SQLite, as PostgreSQL, Oracle and other vendors have defense in depth against this by type checking. Testing against SQLite (the most permissive of the built-in databases) demonstrates that the problem doesn't exist while keeping the test simple. """ queryset = Company.objects.filter(name__in=[F('num_chairs') + '1)) OR ((1==1']) self.assertQuerysetEqual(queryset, [], ordered=False) def test_in_lookup_allows_F_expressions_and_expressions_for_datetimes(self): start = datetime.datetime(2016, 2, 3, 15, 0, 0) end = datetime.datetime(2016, 2, 5, 15, 0, 0) experiment_1 = Experiment.objects.create( name='Integrity testing', assigned=start.date(), start=start, end=end, completed=end.date(), estimated_time=end - start, ) experiment_2 = Experiment.objects.create( name='Taste testing', assigned=start.date(), start=start, end=end, completed=end.date(), estimated_time=end - start, ) r1 = Result.objects.create( experiment=experiment_1, result_time=datetime.datetime(2016, 2, 4, 15, 0, 0), ) Result.objects.create( experiment=experiment_1, result_time=datetime.datetime(2016, 3, 10, 2, 0, 0), ) Result.objects.create( experiment=experiment_2, result_time=datetime.datetime(2016, 1, 8, 5, 0, 0), ) within_experiment_time = [F('experiment__start'), F('experiment__end')] queryset = Result.objects.filter(result_time__range=within_experiment_time) self.assertSequenceEqual(queryset, [r1]) within_experiment_time = [F('experiment__start'), F('experiment__end')] queryset = Result.objects.filter(result_time__range=within_experiment_time) self.assertSequenceEqual(queryset, [r1]) class FTests(SimpleTestCase): def test_deepcopy(self): f = F("foo") g = deepcopy(f) self.assertEqual(f.name, g.name) def test_deconstruct(self): f = F('name') path, args, kwargs = f.deconstruct() self.assertEqual(path, 'django.db.models.expressions.F') self.assertEqual(args, (f.name,)) self.assertEqual(kwargs, {}) def test_equal(self): f = F('name') same_f = F('name') other_f = F('username') self.assertEqual(f, same_f) self.assertNotEqual(f, other_f) def test_hash(self): d = {F('name'): 'Bob'} self.assertIn(F('name'), d) self.assertEqual(d[F('name')], 'Bob') def test_not_equal_Value(self): f = F('name') value = Value('name') self.assertNotEqual(f, value) self.assertNotEqual(value, f) class ExpressionsTests(TestCase): def test_F_reuse(self): f = F('id') n = Number.objects.create(integer=-1) c = Company.objects.create( name="Example Inc.", num_employees=2300, num_chairs=5, ceo=Employee.objects.create(firstname="Joe", lastname="Smith") ) c_qs = Company.objects.filter(id=f) self.assertEqual(c_qs.get(), c) # Reuse the same F-object for another queryset n_qs = Number.objects.filter(id=f) self.assertEqual(n_qs.get(), n) # The original query still works correctly self.assertEqual(c_qs.get(), c) def test_patterns_escape(self): r""" Special characters (e.g. %, _ and \) stored in database are properly escaped when using a pattern lookup with an expression refs #16731 """ Employee.objects.bulk_create([ Employee(firstname="Johnny", lastname="%John"), Employee(firstname="Jean-Claude", lastname="Claud_"), Employee(firstname="Jean-Claude", lastname="Claude%"), Employee(firstname="Johnny", lastname="Joh\\n"), Employee(firstname="Johnny", lastname="_ohn"), ]) claude = Employee.objects.create(firstname='Jean-Claude', lastname='Claude') john = Employee.objects.create(firstname='Johnny', lastname='John') john_sign = Employee.objects.create(firstname='%Joh\\nny', lastname='%Joh\\n') self.assertCountEqual( Employee.objects.filter(firstname__contains=F('lastname')), [john_sign, john, claude], ) self.assertCountEqual( Employee.objects.filter(firstname__startswith=F('lastname')), [john_sign, john], ) self.assertSequenceEqual( Employee.objects.filter(firstname__endswith=F('lastname')), [claude], ) def test_insensitive_patterns_escape(self): r""" Special characters (e.g. %, _ and \) stored in database are properly escaped when using a case insensitive pattern lookup with an expression -- refs #16731 """ Employee.objects.bulk_create([ Employee(firstname="Johnny", lastname="%john"), Employee(firstname="Jean-Claude", lastname="claud_"), Employee(firstname="Jean-Claude", lastname="claude%"), Employee(firstname="Johnny", lastname="joh\\n"), Employee(firstname="Johnny", lastname="_ohn"), ]) claude = Employee.objects.create(firstname='Jean-Claude', lastname='claude') john = Employee.objects.create(firstname='Johnny', lastname='john') john_sign = Employee.objects.create(firstname='%Joh\\nny', lastname='%joh\\n') self.assertCountEqual( Employee.objects.filter(firstname__icontains=F('lastname')), [john_sign, john, claude], ) self.assertCountEqual( Employee.objects.filter(firstname__istartswith=F('lastname')), [john_sign, john], ) self.assertSequenceEqual( Employee.objects.filter(firstname__iendswith=F('lastname')), [claude], ) @isolate_apps('expressions') class SimpleExpressionTests(SimpleTestCase): def test_equal(self): self.assertEqual(Expression(), Expression()) self.assertEqual( Expression(IntegerField()), Expression(output_field=IntegerField()) ) self.assertEqual(Expression(IntegerField()), mock.ANY) self.assertNotEqual( Expression(IntegerField()), Expression(CharField()) ) class TestModel(Model): field = IntegerField() other_field = IntegerField() self.assertNotEqual( Expression(TestModel._meta.get_field('field')), Expression(TestModel._meta.get_field('other_field')), ) def test_hash(self): self.assertEqual(hash(Expression()), hash(Expression())) self.assertEqual( hash(Expression(IntegerField())), hash(Expression(output_field=IntegerField())) ) self.assertNotEqual( hash(Expression(IntegerField())), hash(Expression(CharField())), ) class TestModel(Model): field = IntegerField() other_field = IntegerField() self.assertNotEqual( hash(Expression(TestModel._meta.get_field('field'))), hash(Expression(TestModel._meta.get_field('other_field'))), ) class ExpressionsNumericTests(TestCase): @classmethod def setUpTestData(cls): Number(integer=-1).save() Number(integer=42).save() Number(integer=1337).save() Number.objects.update(float=F('integer')) def test_fill_with_value_from_same_object(self): """ We can fill a value in all objects with an other value of the same object. """ self.assertQuerysetEqual( Number.objects.all(), [(-1, -1), (42, 42), (1337, 1337)], lambda n: (n.integer, round(n.float)), ordered=False ) def test_increment_value(self): """ We can increment a value of all objects in a query set. """ self.assertEqual(Number.objects.filter(integer__gt=0).update(integer=F('integer') + 1), 2) self.assertQuerysetEqual( Number.objects.all(), [(-1, -1), (43, 42), (1338, 1337)], lambda n: (n.integer, round(n.float)), ordered=False ) def test_filter_not_equals_other_field(self): """ We can filter for objects, where a value is not equals the value of an other field. """ self.assertEqual(Number.objects.filter(integer__gt=0).update(integer=F('integer') + 1), 2) self.assertQuerysetEqual( Number.objects.exclude(float=F('integer')), [(43, 42), (1338, 1337)], lambda n: (n.integer, round(n.float)), ordered=False ) def test_complex_expressions(self): """ Complex expressions of different connection types are possible. """ n = Number.objects.create(integer=10, float=123.45) self.assertEqual(Number.objects.filter(pk=n.pk).update( float=F('integer') + F('float') * 2), 1) self.assertEqual(Number.objects.get(pk=n.pk).integer, 10) self.assertEqual(Number.objects.get(pk=n.pk).float, Approximate(256.900, places=3)) class ExpressionOperatorTests(TestCase): @classmethod def setUpTestData(cls): cls.n = Number.objects.create(integer=42, float=15.5) cls.n1 = Number.objects.create(integer=-42, float=-15.5) def test_lefthand_addition(self): # LH Addition of floats and integers Number.objects.filter(pk=self.n.pk).update( integer=F('integer') + 15, float=F('float') + 42.7 ) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3)) def test_lefthand_subtraction(self): # LH Subtraction of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') - 15, float=F('float') - 42.7) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(-27.200, places=3)) def test_lefthand_multiplication(self): # Multiplication of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') * 15, float=F('float') * 42.7) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3)) def test_lefthand_division(self): # LH Division of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') / 2, float=F('float') / 42.7) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 21) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(0.363, places=3)) def test_lefthand_modulo(self): # LH Modulo arithmetic on integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') % 20) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 2) def test_lefthand_bitwise_and(self): # LH Bitwise ands on integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer').bitand(56)) Number.objects.filter(pk=self.n1.pk).update(integer=F('integer').bitand(-56)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 40) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -64) def test_lefthand_bitwise_left_shift_operator(self): Number.objects.update(integer=F('integer').bitleftshift(2)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 168) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -168) def test_lefthand_bitwise_right_shift_operator(self): Number.objects.update(integer=F('integer').bitrightshift(2)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 10) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -11) def test_lefthand_bitwise_or(self): # LH Bitwise or on integers Number.objects.update(integer=F('integer').bitor(48)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 58) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -10) def test_lefthand_transformed_field_bitwise_or(self): Employee.objects.create(firstname='Max', lastname='Mustermann') with register_lookup(CharField, Length): qs = Employee.objects.annotate(bitor=F('lastname__length').bitor(48)) self.assertEqual(qs.get().bitor, 58) def test_lefthand_power(self): # LH Power arithmetic operation on floats and integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') ** 2, float=F('float') ** 1.5) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 1764) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(61.02, places=2)) def test_lefthand_bitwise_xor(self): Number.objects.update(integer=F('integer').bitxor(48)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 26) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -26) def test_lefthand_bitwise_xor_null(self): employee = Employee.objects.create(firstname='John', lastname='Doe') Employee.objects.update(salary=F('salary').bitxor(48)) employee.refresh_from_db() self.assertIsNone(employee.salary) @unittest.skipUnless(connection.vendor == 'oracle', "Oracle doesn't support bitwise XOR.") def test_lefthand_bitwise_xor_not_supported(self): msg = 'Bitwise XOR is not supported in Oracle.' with self.assertRaisesMessage(NotSupportedError, msg): Number.objects.update(integer=F('integer').bitxor(48)) def test_right_hand_addition(self): # Right hand operators Number.objects.filter(pk=self.n.pk).update(integer=15 + F('integer'), float=42.7 + F('float')) # RH Addition of floats and integers self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3)) def test_right_hand_subtraction(self): Number.objects.filter(pk=self.n.pk).update(integer=15 - F('integer'), float=42.7 - F('float')) # RH Subtraction of floats and integers self.assertEqual(Number.objects.get(pk=self.n.pk).integer, -27) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(27.200, places=3)) def test_right_hand_multiplication(self): # RH Multiplication of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=15 * F('integer'), float=42.7 * F('float')) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3)) def test_right_hand_division(self): # RH Division of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=640 / F('integer'), float=42.7 / F('float')) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 15) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(2.755, places=3)) def test_right_hand_modulo(self): # RH Modulo arithmetic on integers Number.objects.filter(pk=self.n.pk).update(integer=69 % F('integer')) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27) def test_righthand_power(self): # RH Power arithmetic operation on floats and integers Number.objects.filter(pk=self.n.pk).update(integer=2 ** F('integer'), float=1.5 ** F('float')) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 4398046511104) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(536.308, places=3)) class FTimeDeltaTests(TestCase): @classmethod def setUpTestData(cls): cls.sday = sday = datetime.date(2010, 6, 25) cls.stime = stime = datetime.datetime(2010, 6, 25, 12, 15, 30, 747000) midnight = datetime.time(0) delta0 = datetime.timedelta(0) delta1 = datetime.timedelta(microseconds=253000) delta2 = datetime.timedelta(seconds=44) delta3 = datetime.timedelta(hours=21, minutes=8) delta4 = datetime.timedelta(days=10) delta5 = datetime.timedelta(days=90) # Test data is set so that deltas and delays will be # strictly increasing. cls.deltas = [] cls.delays = [] cls.days_long = [] # e0: started same day as assigned, zero duration end = stime + delta0 cls.e0 = Experiment.objects.create( name='e0', assigned=sday, start=stime, end=end, completed=end.date(), estimated_time=delta0, ) cls.deltas.append(delta0) cls.delays.append(cls.e0.start - datetime.datetime.combine(cls.e0.assigned, midnight)) cls.days_long.append(cls.e0.completed - cls.e0.assigned) # e1: started one day after assigned, tiny duration, data # set so that end time has no fractional seconds, which # tests an edge case on sqlite. delay = datetime.timedelta(1) end = stime + delay + delta1 e1 = Experiment.objects.create( name='e1', assigned=sday, start=stime + delay, end=end, completed=end.date(), estimated_time=delta1, ) cls.deltas.append(delta1) cls.delays.append(e1.start - datetime.datetime.combine(e1.assigned, midnight)) cls.days_long.append(e1.completed - e1.assigned) # e2: started three days after assigned, small duration end = stime + delta2 e2 = Experiment.objects.create( name='e2', assigned=sday - datetime.timedelta(3), start=stime, end=end, completed=end.date(), estimated_time=datetime.timedelta(hours=1), ) cls.deltas.append(delta2) cls.delays.append(e2.start - datetime.datetime.combine(e2.assigned, midnight)) cls.days_long.append(e2.completed - e2.assigned) # e3: started four days after assigned, medium duration delay = datetime.timedelta(4) end = stime + delay + delta3 e3 = Experiment.objects.create( name='e3', assigned=sday, start=stime + delay, end=end, completed=end.date(), estimated_time=delta3, ) cls.deltas.append(delta3) cls.delays.append(e3.start - datetime.datetime.combine(e3.assigned, midnight)) cls.days_long.append(e3.completed - e3.assigned) # e4: started 10 days after assignment, long duration end = stime + delta4 e4 = Experiment.objects.create( name='e4', assigned=sday - datetime.timedelta(10), start=stime, end=end, completed=end.date(), estimated_time=delta4 - datetime.timedelta(1), ) cls.deltas.append(delta4) cls.delays.append(e4.start - datetime.datetime.combine(e4.assigned, midnight)) cls.days_long.append(e4.completed - e4.assigned) # e5: started a month after assignment, very long duration delay = datetime.timedelta(30) end = stime + delay + delta5 e5 = Experiment.objects.create( name='e5', assigned=sday, start=stime + delay, end=end, completed=end.date(), estimated_time=delta5, ) cls.deltas.append(delta5) cls.delays.append(e5.start - datetime.datetime.combine(e5.assigned, midnight)) cls.days_long.append(e5.completed - e5.assigned) cls.expnames = [e.name for e in Experiment.objects.all()] def test_multiple_query_compilation(self): # Ticket #21643 queryset = Experiment.objects.filter(end__lt=F('start') + datetime.timedelta(hours=1)) q1 = str(queryset.query) q2 = str(queryset.query) self.assertEqual(q1, q2) def test_query_clone(self): # Ticket #21643 - Crash when compiling query more than once qs = Experiment.objects.filter(end__lt=F('start') + datetime.timedelta(hours=1)) qs2 = qs.all() list(qs) list(qs2) # Intentionally no assert def test_delta_add(self): for i, delta in enumerate(self.deltas): test_set = [e.name for e in Experiment.objects.filter(end__lt=F('start') + delta)] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(end__lt=delta + F('start'))] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(end__lte=F('start') + delta)] self.assertEqual(test_set, self.expnames[:i + 1]) def test_delta_subtract(self): for i, delta in enumerate(self.deltas): test_set = [e.name for e in Experiment.objects.filter(start__gt=F('end') - delta)] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(start__gte=F('end') - delta)] self.assertEqual(test_set, self.expnames[:i + 1]) def test_exclude(self): for i, delta in enumerate(self.deltas): test_set = [e.name for e in Experiment.objects.exclude(end__lt=F('start') + delta)] self.assertEqual(test_set, self.expnames[i:]) test_set = [e.name for e in Experiment.objects.exclude(end__lte=F('start') + delta)] self.assertEqual(test_set, self.expnames[i + 1:]) def test_date_comparison(self): for i, days in enumerate(self.days_long): test_set = [e.name for e in Experiment.objects.filter(completed__lt=F('assigned') + days)] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(completed__lte=F('assigned') + days)] self.assertEqual(test_set, self.expnames[:i + 1]) @skipUnlessDBFeature("supports_mixed_date_datetime_comparisons") def test_mixed_comparisons1(self): for i, delay in enumerate(self.delays): test_set = [e.name for e in Experiment.objects.filter(assigned__gt=F('start') - delay)] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(assigned__gte=F('start') - delay)] self.assertEqual(test_set, self.expnames[:i + 1]) def test_mixed_comparisons2(self): for i, delay in enumerate(self.delays): delay = datetime.timedelta(delay.days) test_set = [e.name for e in Experiment.objects.filter(start__lt=F('assigned') + delay)] self.assertEqual(test_set, self.expnames[:i]) test_set = [ e.name for e in Experiment.objects.filter(start__lte=F('assigned') + delay + datetime.timedelta(1)) ] self.assertEqual(test_set, self.expnames[:i + 1]) def test_delta_update(self): for delta in self.deltas: exps = Experiment.objects.all() expected_durations = [e.duration() for e in exps] expected_starts = [e.start + delta for e in exps] expected_ends = [e.end + delta for e in exps] Experiment.objects.update(start=F('start') + delta, end=F('end') + delta) exps = Experiment.objects.all() new_starts = [e.start for e in exps] new_ends = [e.end for e in exps] new_durations = [e.duration() for e in exps] self.assertEqual(expected_starts, new_starts) self.assertEqual(expected_ends, new_ends) self.assertEqual(expected_durations, new_durations) def test_invalid_operator(self): with self.assertRaises(DatabaseError): list(Experiment.objects.filter(start=F('start') * datetime.timedelta(0))) def test_durationfield_add(self): zeros = [e.name for e in Experiment.objects.filter(start=F('start') + F('estimated_time'))] self.assertEqual(zeros, ['e0']) end_less = [e.name for e in Experiment.objects.filter(end__lt=F('start') + F('estimated_time'))] self.assertEqual(end_less, ['e2']) delta_math = [ e.name for e in Experiment.objects.filter(end__gte=F('start') + F('estimated_time') + datetime.timedelta(hours=1)) ] self.assertEqual(delta_math, ['e4']) queryset = Experiment.objects.annotate(shifted=ExpressionWrapper( F('start') + Value(None, output_field=DurationField()), output_field=DateTimeField(), )) self.assertIsNone(queryset.first().shifted) def test_duration_expressions(self): for delta in self.deltas: qs = Experiment.objects.annotate(duration=F('estimated_time') + delta) for obj in qs: self.assertEqual(obj.duration, obj.estimated_time + delta) @skipUnlessDBFeature('supports_temporal_subtraction') def test_date_subtraction(self): queryset = Experiment.objects.annotate( completion_duration=F('completed') - F('assigned'), ) at_least_5_days = {e.name for e in queryset.filter(completion_duration__gte=datetime.timedelta(days=5))} self.assertEqual(at_least_5_days, {'e3', 'e4', 'e5'}) at_least_120_days = {e.name for e in queryset.filter(completion_duration__gte=datetime.timedelta(days=120))} self.assertEqual(at_least_120_days, {'e5'}) less_than_5_days = {e.name for e in queryset.filter(completion_duration__lt=datetime.timedelta(days=5))} self.assertEqual(less_than_5_days, {'e0', 'e1', 'e2'}) queryset = Experiment.objects.annotate( difference=F('completed') - Value(None, output_field=DateField()), ) self.assertIsNone(queryset.first().difference) queryset = Experiment.objects.annotate(shifted=ExpressionWrapper( F('completed') - Value(None, output_field=DurationField()), output_field=DateField(), )) self.assertIsNone(queryset.first().shifted) @skipUnlessDBFeature('supports_temporal_subtraction') def test_date_subquery_subtraction(self): subquery = Experiment.objects.filter(pk=OuterRef('pk')).values('completed') queryset = Experiment.objects.annotate( difference=subquery - F('completed'), ).filter(difference=datetime.timedelta()) self.assertTrue(queryset.exists()) @skipUnlessDBFeature('supports_temporal_subtraction') def test_date_case_subtraction(self): queryset = Experiment.objects.annotate( date_case=Case( When(Q(name='e0'), then=F('completed')), output_field=DateField(), ), completed_value=Value( self.e0.completed, output_field=DateField(), ), difference=F('date_case') - F('completed_value'), ).filter(difference=datetime.timedelta()) self.assertEqual(queryset.get(), self.e0) @skipUnlessDBFeature('supports_temporal_subtraction') def test_time_subtraction(self): Time.objects.create(time=datetime.time(12, 30, 15, 2345)) queryset = Time.objects.annotate( difference=F('time') - Value(datetime.time(11, 15, 0)), ) self.assertEqual( queryset.get().difference, datetime.timedelta(hours=1, minutes=15, seconds=15, microseconds=2345) ) queryset = Time.objects.annotate( difference=F('time') - Value(None, output_field=TimeField()), ) self.assertIsNone(queryset.first().difference) queryset = Time.objects.annotate(shifted=ExpressionWrapper( F('time') - Value(None, output_field=DurationField()), output_field=TimeField(), )) self.assertIsNone(queryset.first().shifted) @skipUnlessDBFeature('supports_temporal_subtraction') def test_time_subquery_subtraction(self): Time.objects.create(time=datetime.time(12, 30, 15, 2345)) subquery = Time.objects.filter(pk=OuterRef('pk')).values('time') queryset = Time.objects.annotate( difference=subquery - F('time'), ).filter(difference=datetime.timedelta()) self.assertTrue(queryset.exists()) @skipUnlessDBFeature('supports_temporal_subtraction') def test_datetime_subtraction(self): under_estimate = [ e.name for e in Experiment.objects.filter(estimated_time__gt=F('end') - F('start')) ] self.assertEqual(under_estimate, ['e2']) over_estimate = [ e.name for e in Experiment.objects.filter(estimated_time__lt=F('end') - F('start')) ] self.assertEqual(over_estimate, ['e4']) queryset = Experiment.objects.annotate( difference=F('start') - Value(None, output_field=DateTimeField()), ) self.assertIsNone(queryset.first().difference) queryset = Experiment.objects.annotate(shifted=ExpressionWrapper( F('start') - Value(None, output_field=DurationField()), output_field=DateTimeField(), )) self.assertIsNone(queryset.first().shifted) @skipUnlessDBFeature('supports_temporal_subtraction') def test_datetime_subquery_subtraction(self): subquery = Experiment.objects.filter(pk=OuterRef('pk')).values('start') queryset = Experiment.objects.annotate( difference=subquery - F('start'), ).filter(difference=datetime.timedelta()) self.assertTrue(queryset.exists()) @skipUnlessDBFeature('supports_temporal_subtraction') def test_datetime_subtraction_microseconds(self): delta = datetime.timedelta(microseconds=8999999999999999) Experiment.objects.update(end=F('start') + delta) qs = Experiment.objects.annotate(delta=F('end') - F('start')) for e in qs: self.assertEqual(e.delta, delta) def test_duration_with_datetime(self): # Exclude e1 which has very high precision so we can test this on all # backends regardless of whether or not it supports # microsecond_precision. over_estimate = Experiment.objects.exclude(name='e1').filter( completed__gt=self.stime + F('estimated_time'), ).order_by('name') self.assertQuerysetEqual(over_estimate, ['e3', 'e4', 'e5'], lambda e: e.name) def test_duration_with_datetime_microseconds(self): delta = datetime.timedelta(microseconds=8999999999999999) qs = Experiment.objects.annotate(dt=ExpressionWrapper( F('start') + delta, output_field=DateTimeField(), )) for e in qs: self.assertEqual(e.dt, e.start + delta) def test_date_minus_duration(self): more_than_4_days = Experiment.objects.filter( assigned__lt=F('completed') - Value(datetime.timedelta(days=4)) ) self.assertQuerysetEqual(more_than_4_days, ['e3', 'e4', 'e5'], lambda e: e.name) def test_negative_timedelta_update(self): # subtract 30 seconds, 30 minutes, 2 hours and 2 days experiments = Experiment.objects.filter(name='e0').annotate( start_sub_seconds=F('start') + datetime.timedelta(seconds=-30), ).annotate( start_sub_minutes=F('start_sub_seconds') + datetime.timedelta(minutes=-30), ).annotate( start_sub_hours=F('start_sub_minutes') + datetime.timedelta(hours=-2), ).annotate( new_start=F('start_sub_hours') + datetime.timedelta(days=-2), ) expected_start = datetime.datetime(2010, 6, 23, 9, 45, 0) # subtract 30 microseconds experiments = experiments.annotate(new_start=F('new_start') + datetime.timedelta(microseconds=-30)) expected_start += datetime.timedelta(microseconds=+746970) experiments.update(start=F('new_start')) e0 = Experiment.objects.get(name='e0') self.assertEqual(e0.start, expected_start) class ValueTests(TestCase): def test_update_TimeField_using_Value(self): Time.objects.create() Time.objects.update(time=Value(datetime.time(1), output_field=TimeField())) self.assertEqual(Time.objects.get().time, datetime.time(1)) def test_update_UUIDField_using_Value(self): UUID.objects.create() UUID.objects.update(uuid=Value(uuid.UUID('12345678901234567890123456789012'), output_field=UUIDField())) self.assertEqual(UUID.objects.get().uuid, uuid.UUID('12345678901234567890123456789012')) def test_deconstruct(self): value = Value('name') path, args, kwargs = value.deconstruct() self.assertEqual(path, 'django.db.models.expressions.Value') self.assertEqual(args, (value.value,)) self.assertEqual(kwargs, {}) def test_deconstruct_output_field(self): value = Value('name', output_field=CharField()) path, args, kwargs = value.deconstruct() self.assertEqual(path, 'django.db.models.expressions.Value') self.assertEqual(args, (value.value,)) self.assertEqual(len(kwargs), 1) self.assertEqual(kwargs['output_field'].deconstruct(), CharField().deconstruct()) def test_equal(self): value = Value('name') self.assertEqual(value, Value('name')) self.assertNotEqual(value, Value('username')) def test_hash(self): d = {Value('name'): 'Bob'} self.assertIn(Value('name'), d) self.assertEqual(d[Value('name')], 'Bob') def test_equal_output_field(self): value = Value('name', output_field=CharField()) same_value = Value('name', output_field=CharField()) other_value = Value('name', output_field=TimeField()) no_output_field = Value('name') self.assertEqual(value, same_value) self.assertNotEqual(value, other_value) self.assertNotEqual(value, no_output_field) def test_raise_empty_expressionlist(self): msg = 'ExpressionList requires at least one expression' with self.assertRaisesMessage(ValueError, msg): ExpressionList() def test_compile_unresolved(self): # This test might need to be revisited later on if #25425 is enforced. compiler = Time.objects.all().query.get_compiler(connection=connection) value = Value('foo') self.assertEqual(value.as_sql(compiler, connection), ('%s', ['foo'])) value = Value('foo', output_field=CharField()) self.assertEqual(value.as_sql(compiler, connection), ('%s', ['foo'])) def test_resolve_output_field(self): value_types = [ ('str', CharField), (True, BooleanField), (42, IntegerField), (3.14, FloatField), (datetime.date(2019, 5, 15), DateField), (datetime.datetime(2019, 5, 15), DateTimeField), (datetime.time(3, 16), TimeField), (datetime.timedelta(1), DurationField), (Decimal('3.14'), DecimalField), (b'', BinaryField), (uuid.uuid4(), UUIDField), ] for value, ouput_field_type in value_types: with self.subTest(type=type(value)): expr = Value(value) self.assertIsInstance(expr.output_field, ouput_field_type) def test_resolve_output_field_failure(self): msg = 'Cannot resolve expression type, unknown output_field' with self.assertRaisesMessage(FieldError, msg): Value(object()).output_field class ExistsTests(TestCase): def test_optimizations(self): with CaptureQueriesContext(connection) as context: list(Experiment.objects.values(exists=Exists( Experiment.objects.order_by('pk'), )).order_by()) captured_queries = context.captured_queries self.assertEqual(len(captured_queries), 1) captured_sql = captured_queries[0]['sql'] self.assertNotIn( connection.ops.quote_name(Experiment._meta.pk.column), captured_sql, ) self.assertIn( connection.ops.limit_offset_sql(None, 1), captured_sql, ) self.assertNotIn('ORDER BY', captured_sql) class FieldTransformTests(TestCase): @classmethod def setUpTestData(cls): cls.sday = sday = datetime.date(2010, 6, 25) cls.stime = stime = datetime.datetime(2010, 6, 25, 12, 15, 30, 747000) cls.ex1 = Experiment.objects.create( name='Experiment 1', assigned=sday, completed=sday + datetime.timedelta(2), estimated_time=datetime.timedelta(2), start=stime, end=stime + datetime.timedelta(2), ) def test_month_aggregation(self): self.assertEqual( Experiment.objects.aggregate(month_count=Count('assigned__month')), {'month_count': 1} ) def test_transform_in_values(self): self.assertSequenceEqual( Experiment.objects.values('assigned__month'), [{'assigned__month': 6}], ) def test_multiple_transforms_in_values(self): self.assertSequenceEqual( Experiment.objects.values('end__date__month'), [{'end__date__month': 6}], ) class ReprTests(SimpleTestCase): def test_expressions(self): self.assertEqual( repr(Case(When(a=1))), "<Case: CASE WHEN <Q: (AND: ('a', 1))> THEN Value(None), ELSE Value(None)>" ) self.assertEqual( repr(When(Q(age__gte=18), then=Value('legal'))), "<When: WHEN <Q: (AND: ('age__gte', 18))> THEN Value(legal)>" ) self.assertEqual(repr(Col('alias', 'field')), "Col(alias, field)") self.assertEqual(repr(F('published')), "F(published)") self.assertEqual(repr(F('cost') + F('tax')), "<CombinedExpression: F(cost) + F(tax)>") self.assertEqual( repr(ExpressionWrapper(F('cost') + F('tax'), IntegerField())), "ExpressionWrapper(F(cost) + F(tax))" ) self.assertEqual(repr(Func('published', function='TO_CHAR')), "Func(F(published), function=TO_CHAR)") self.assertEqual(repr(OrderBy(Value(1))), 'OrderBy(Value(1), descending=False)') self.assertEqual(repr(RawSQL('table.col', [])), "RawSQL(table.col, [])") self.assertEqual(repr(Ref('sum_cost', Sum('cost'))), "Ref(sum_cost, Sum(F(cost)))") self.assertEqual(repr(Value(1)), "Value(1)") self.assertEqual( repr(ExpressionList(F('col'), F('anothercol'))), 'ExpressionList(F(col), F(anothercol))' ) self.assertEqual( repr(ExpressionList(OrderBy(F('col'), descending=False))), 'ExpressionList(OrderBy(F(col), descending=False))' ) def test_functions(self): self.assertEqual(repr(Coalesce('a', 'b')), "Coalesce(F(a), F(b))") self.assertEqual(repr(Concat('a', 'b')), "Concat(ConcatPair(F(a), F(b)))") self.assertEqual(repr(Length('a')), "Length(F(a))") self.assertEqual(repr(Lower('a')), "Lower(F(a))") self.assertEqual(repr(Substr('a', 1, 3)), "Substr(F(a), Value(1), Value(3))") self.assertEqual(repr(Upper('a')), "Upper(F(a))") def test_aggregates(self): self.assertEqual(repr(Avg('a')), "Avg(F(a))") self.assertEqual(repr(Count('a')), "Count(F(a))") self.assertEqual(repr(Count('*')), "Count('*')") self.assertEqual(repr(Max('a')), "Max(F(a))") self.assertEqual(repr(Min('a')), "Min(F(a))") self.assertEqual(repr(StdDev('a')), "StdDev(F(a), sample=False)") self.assertEqual(repr(Sum('a')), "Sum(F(a))") self.assertEqual(repr(Variance('a', sample=True)), "Variance(F(a), sample=True)") def test_distinct_aggregates(self): self.assertEqual(repr(Count('a', distinct=True)), "Count(F(a), distinct=True)") self.assertEqual(repr(Count('*', distinct=True)), "Count('*', distinct=True)") def test_filtered_aggregates(self): filter = Q(a=1) self.assertEqual(repr(Avg('a', filter=filter)), "Avg(F(a), filter=(AND: ('a', 1)))") self.assertEqual(repr(Count('a', filter=filter)), "Count(F(a), filter=(AND: ('a', 1)))") self.assertEqual(repr(Max('a', filter=filter)), "Max(F(a), filter=(AND: ('a', 1)))") self.assertEqual(repr(Min('a', filter=filter)), "Min(F(a), filter=(AND: ('a', 1)))") self.assertEqual(repr(StdDev('a', filter=filter)), "StdDev(F(a), filter=(AND: ('a', 1)), sample=False)") self.assertEqual(repr(Sum('a', filter=filter)), "Sum(F(a), filter=(AND: ('a', 1)))") self.assertEqual( repr(Variance('a', sample=True, filter=filter)), "Variance(F(a), filter=(AND: ('a', 1)), sample=True)" ) self.assertEqual( repr(Count('a', filter=filter, distinct=True)), "Count(F(a), distinct=True, filter=(AND: ('a', 1)))" ) class CombinableTests(SimpleTestCase): bitwise_msg = 'Use .bitand() and .bitor() for bitwise logical operations.' def test_negation(self): c = Combinable() self.assertEqual(-c, c * -1) def test_and(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): Combinable() & Combinable() def test_or(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): Combinable() | Combinable() def test_reversed_and(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): object() & Combinable() def test_reversed_or(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): object() | Combinable() class CombinedExpressionTests(SimpleTestCase): def test_resolve_output_field(self): tests = [ (IntegerField, AutoField, IntegerField), (AutoField, IntegerField, IntegerField), (IntegerField, DecimalField, DecimalField), (DecimalField, IntegerField, DecimalField), (IntegerField, FloatField, FloatField), (FloatField, IntegerField, FloatField), ] connectors = [Combinable.ADD, Combinable.SUB, Combinable.MUL, Combinable.DIV] for lhs, rhs, combined in tests: for connector in connectors: with self.subTest(lhs=lhs, connector=connector, rhs=rhs, combined=combined): expr = CombinedExpression( Expression(lhs()), connector, Expression(rhs()), ) self.assertIsInstance(expr.output_field, combined) class ExpressionWrapperTests(SimpleTestCase): def test_empty_group_by(self): expr = ExpressionWrapper(Value(3), output_field=IntegerField()) self.assertEqual(expr.get_group_by_cols(alias=None), []) def test_non_empty_group_by(self): value = Value('f') value.output_field = None expr = ExpressionWrapper(Lower(value), output_field=IntegerField()) group_by_cols = expr.get_group_by_cols(alias=None) self.assertEqual(group_by_cols, [expr.expression]) self.assertEqual(group_by_cols[0].output_field, expr.output_field)
22a2c25482c54a7244618d95067773507096248847b9657d94694dc2038a0c83
#!/usr/bin/env python import argparse import atexit import copy import gc import os import shutil import socket import subprocess import sys import tempfile import warnings try: import django except ImportError as e: raise RuntimeError( 'Django module not found, reference tests/README.rst for instructions.' ) from e else: from django.apps import apps from django.conf import settings from django.db import connection, connections from django.test import TestCase, TransactionTestCase from django.test.runner import default_test_processes from django.test.selenium import SeleniumTestCaseBase from django.test.utils import NullTimeKeeper, TimeKeeper, get_runner from django.utils.deprecation import ( RemovedInDjango41Warning, RemovedInDjango50Warning, ) from django.utils.log import DEFAULT_LOGGING try: import MySQLdb except ImportError: pass else: # Ignore informational warnings from QuerySet.explain(). warnings.filterwarnings('ignore', r'\(1003, *', category=MySQLdb.Warning) # Make deprecation warnings errors to ensure no usage of deprecated features. warnings.simplefilter('error', RemovedInDjango50Warning) warnings.simplefilter('error', RemovedInDjango41Warning) # Make resource and runtime warning errors to ensure no usage of error prone # patterns. warnings.simplefilter("error", ResourceWarning) warnings.simplefilter("error", RuntimeWarning) # Ignore known warnings in test dependencies. warnings.filterwarnings("ignore", "'U' mode is deprecated", DeprecationWarning, module='docutils.io') # RemovedInDjango41Warning: Ignore MemcachedCache deprecation warning. warnings.filterwarnings( 'ignore', 'MemcachedCache is deprecated', category=RemovedInDjango41Warning, ) # Reduce garbage collection frequency to improve performance. Since CPython # uses refcounting, garbage collection only collects objects with cyclic # references, which are a minority, so the garbage collection threshold can be # larger than the default threshold of 700 allocations + deallocations without # much increase in memory usage. gc.set_threshold(100_000) RUNTESTS_DIR = os.path.abspath(os.path.dirname(__file__)) TEMPLATE_DIR = os.path.join(RUNTESTS_DIR, 'templates') # Create a specific subdirectory for the duration of the test suite. TMPDIR = tempfile.mkdtemp(prefix='django_') # Set the TMPDIR environment variable in addition to tempfile.tempdir # so that children processes inherit it. tempfile.tempdir = os.environ['TMPDIR'] = TMPDIR # Removing the temporary TMPDIR. atexit.register(shutil.rmtree, TMPDIR) SUBDIRS_TO_SKIP = [ 'data', 'import_error_package', 'test_runner_apps', ] ALWAYS_INSTALLED_APPS = [ 'django.contrib.contenttypes', 'django.contrib.auth', 'django.contrib.sites', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.admin.apps.SimpleAdminConfig', 'django.contrib.staticfiles', ] ALWAYS_MIDDLEWARE = [ 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ] # Need to add the associated contrib app to INSTALLED_APPS in some cases to # avoid "RuntimeError: Model class X doesn't declare an explicit app_label # and isn't in an application in INSTALLED_APPS." CONTRIB_TESTS_TO_APPS = { 'deprecation': ['django.contrib.flatpages', 'django.contrib.redirects'], 'flatpages_tests': ['django.contrib.flatpages'], 'redirects_tests': ['django.contrib.redirects'], } def get_test_modules(): modules = [] discovery_paths = [(None, RUNTESTS_DIR)] if connection.features.gis_enabled: # GIS tests are in nested apps discovery_paths.append(('gis_tests', os.path.join(RUNTESTS_DIR, 'gis_tests'))) else: SUBDIRS_TO_SKIP.append('gis_tests') for modpath, dirpath in discovery_paths: for f in os.scandir(dirpath): if ('.' not in f.name and os.path.basename(f.name) not in SUBDIRS_TO_SKIP and not f.is_file() and os.path.exists(os.path.join(f.path, '__init__.py'))): modules.append((modpath, f.name)) return modules def get_installed(): return [app_config.name for app_config in apps.get_app_configs()] def setup(verbosity, test_labels, start_at, start_after): # Reduce each test label to just the top-level module part. test_labels_set = set() for label in test_labels: test_module = label.split('.')[0] test_labels_set.add(test_module) # Force declaring available_apps in TransactionTestCase for faster tests. def no_available_apps(self): raise Exception("Please define available_apps in TransactionTestCase " "and its subclasses.") TransactionTestCase.available_apps = property(no_available_apps) TestCase.available_apps = None state = { 'INSTALLED_APPS': settings.INSTALLED_APPS, 'ROOT_URLCONF': getattr(settings, "ROOT_URLCONF", ""), 'TEMPLATES': settings.TEMPLATES, 'LANGUAGE_CODE': settings.LANGUAGE_CODE, 'STATIC_URL': settings.STATIC_URL, 'STATIC_ROOT': settings.STATIC_ROOT, 'MIDDLEWARE': settings.MIDDLEWARE, } # Redirect some settings for the duration of these tests. settings.INSTALLED_APPS = ALWAYS_INSTALLED_APPS settings.ROOT_URLCONF = 'urls' settings.STATIC_URL = 'static/' settings.STATIC_ROOT = os.path.join(TMPDIR, 'static') settings.TEMPLATES = [{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [TEMPLATE_DIR], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }] settings.LANGUAGE_CODE = 'en' settings.SITE_ID = 1 settings.MIDDLEWARE = ALWAYS_MIDDLEWARE settings.MIGRATION_MODULES = { # This lets us skip creating migrations for the test models as many of # them depend on one of the following contrib applications. 'auth': None, 'contenttypes': None, 'sessions': None, } log_config = copy.deepcopy(DEFAULT_LOGGING) # Filter out non-error logging so we don't have to capture it in lots of # tests. log_config['loggers']['django']['level'] = 'ERROR' settings.LOGGING = log_config settings.SILENCED_SYSTEM_CHECKS = [ 'fields.W342', # ForeignKey(unique=True) -> OneToOneField ] # Load all the ALWAYS_INSTALLED_APPS. django.setup() # It would be nice to put this validation earlier but it must come after # django.setup() so that connection.features.gis_enabled can be accessed # without raising AppRegistryNotReady when running gis_tests in isolation # on some backends (e.g. PostGIS). if 'gis_tests' in test_labels_set and not connection.features.gis_enabled: print('Aborting: A GIS database backend is required to run gis_tests.') sys.exit(1) def _module_match_label(module_label, label): # Exact or ancestor match. return module_label == label or module_label.startswith(label + '.') # Load all the test model apps. test_modules = get_test_modules() found_start = not (start_at or start_after) installed_app_names = set(get_installed()) for modpath, module_name in test_modules: if modpath: module_label = modpath + '.' + module_name else: module_label = module_name if not found_start: if start_at and _module_match_label(module_label, start_at): found_start = True elif start_after and _module_match_label(module_label, start_after): found_start = True continue else: continue # if the module (or an ancestor) was named on the command line, or # no modules were named (i.e., run all), import # this module and add it to INSTALLED_APPS. module_found_in_labels = not test_labels or any( _module_match_label(module_label, label) for label in test_labels_set ) if module_name in CONTRIB_TESTS_TO_APPS and module_found_in_labels: for contrib_app in CONTRIB_TESTS_TO_APPS[module_name]: if contrib_app not in settings.INSTALLED_APPS: settings.INSTALLED_APPS.append(contrib_app) if module_found_in_labels and module_label not in installed_app_names: if verbosity >= 2: print("Importing application %s" % module_name) settings.INSTALLED_APPS.append(module_label) # Add contrib.gis to INSTALLED_APPS if needed (rather than requiring # @override_settings(INSTALLED_APPS=...) on all test cases. gis = 'django.contrib.gis' if connection.features.gis_enabled and gis not in settings.INSTALLED_APPS: if verbosity >= 2: print("Importing application %s" % gis) settings.INSTALLED_APPS.append(gis) apps.set_installed_apps(settings.INSTALLED_APPS) # Set an environment variable that other code may consult to see if # Django's own test suite is running. os.environ['RUNNING_DJANGOS_TEST_SUITE'] = 'true' return state def teardown(state): # Restore the old settings. for key, value in state.items(): setattr(settings, key, value) # Discard the multiprocessing.util finalizer that tries to remove a # temporary directory that's already removed by this script's # atexit.register(shutil.rmtree, TMPDIR) handler. Prevents # FileNotFoundError at the end of a test run (#27890). from multiprocessing.util import _finalizer_registry _finalizer_registry.pop((-100, 0), None) del os.environ['RUNNING_DJANGOS_TEST_SUITE'] def actual_test_processes(parallel): if parallel == 0: # This doesn't work before django.setup() on some databases. if all(conn.features.can_clone_databases for conn in connections.all()): return default_test_processes() else: return 1 else: return parallel class ActionSelenium(argparse.Action): """ Validate the comma-separated list of requested browsers. """ def __call__(self, parser, namespace, values, option_string=None): browsers = values.split(',') for browser in browsers: try: SeleniumTestCaseBase.import_webdriver(browser) except ImportError: raise argparse.ArgumentError(self, "Selenium browser specification '%s' is not valid." % browser) setattr(namespace, self.dest, browsers) def django_tests(verbosity, interactive, failfast, keepdb, reverse, test_labels, debug_sql, parallel, tags, exclude_tags, test_name_patterns, start_at, start_after, pdb, buffer, timing): if verbosity >= 1: msg = "Testing against Django installed in '%s'" % os.path.dirname(django.__file__) max_parallel = default_test_processes() if parallel == 0 else parallel if max_parallel > 1: msg += " with up to %d processes" % max_parallel print(msg) state = setup(verbosity, test_labels, start_at, start_after) # Run the test suite, including the extra validation tests. if not hasattr(settings, 'TEST_RUNNER'): settings.TEST_RUNNER = 'django.test.runner.DiscoverRunner' TestRunner = get_runner(settings) test_runner = TestRunner( verbosity=verbosity, interactive=interactive, failfast=failfast, keepdb=keepdb, reverse=reverse, debug_sql=debug_sql, parallel=actual_test_processes(parallel), tags=tags, exclude_tags=exclude_tags, test_name_patterns=test_name_patterns, pdb=pdb, buffer=buffer, timing=timing, ) failures = test_runner.run_tests(test_labels or get_installed()) teardown(state) return failures def get_app_test_labels(verbosity, start_at, start_after): test_labels = [] state = setup(verbosity, test_labels, start_at, start_after) test_labels = get_installed() teardown(state) return test_labels def get_subprocess_args(options): subprocess_args = [ sys.executable, __file__, '--settings=%s' % options.settings ] if options.failfast: subprocess_args.append('--failfast') if options.verbosity: subprocess_args.append('--verbosity=%s' % options.verbosity) if not options.interactive: subprocess_args.append('--noinput') if options.tags: subprocess_args.append('--tag=%s' % options.tags) if options.exclude_tags: subprocess_args.append('--exclude_tag=%s' % options.exclude_tags) return subprocess_args def bisect_tests(bisection_label, options, test_labels, start_at, start_after): if not test_labels: test_labels = get_app_test_labels(options.verbosity, start_at, start_after) print('***** Bisecting test suite: %s' % ' '.join(test_labels)) # Make sure the bisection point isn't in the test list # Also remove tests that need to be run in specific combinations for label in [bisection_label, 'model_inheritance_same_model_name']: try: test_labels.remove(label) except ValueError: pass subprocess_args = get_subprocess_args(options) iteration = 1 while len(test_labels) > 1: midpoint = len(test_labels) // 2 test_labels_a = test_labels[:midpoint] + [bisection_label] test_labels_b = test_labels[midpoint:] + [bisection_label] print('***** Pass %da: Running the first half of the test suite' % iteration) print('***** Test labels: %s' % ' '.join(test_labels_a)) failures_a = subprocess.run(subprocess_args + test_labels_a) print('***** Pass %db: Running the second half of the test suite' % iteration) print('***** Test labels: %s' % ' '.join(test_labels_b)) print('') failures_b = subprocess.run(subprocess_args + test_labels_b) if failures_a.returncode and not failures_b.returncode: print("***** Problem found in first half. Bisecting again...") iteration += 1 test_labels = test_labels_a[:-1] elif failures_b.returncode and not failures_a.returncode: print("***** Problem found in second half. Bisecting again...") iteration += 1 test_labels = test_labels_b[:-1] elif failures_a.returncode and failures_b.returncode: print("***** Multiple sources of failure found") break else: print("***** No source of failure found... try pair execution (--pair)") break if len(test_labels) == 1: print("***** Source of error: %s" % test_labels[0]) def paired_tests(paired_test, options, test_labels, start_at, start_after): if not test_labels: test_labels = get_app_test_labels(options.verbosity, start_at, start_after) print('***** Trying paired execution') # Make sure the constant member of the pair isn't in the test list # Also remove tests that need to be run in specific combinations for label in [paired_test, 'model_inheritance_same_model_name']: try: test_labels.remove(label) except ValueError: pass subprocess_args = get_subprocess_args(options) for i, label in enumerate(test_labels): print('***** %d of %d: Check test pairing with %s' % ( i + 1, len(test_labels), label)) failures = subprocess.call(subprocess_args + [label, paired_test]) if failures: print('***** Found problem pair with %s' % label) return print('***** No problem pair found') if __name__ == "__main__": parser = argparse.ArgumentParser(description="Run the Django test suite.") parser.add_argument( 'modules', nargs='*', metavar='module', help='Optional path(s) to test modules; e.g. "i18n" or ' '"i18n.tests.TranslationTests.test_lazy_objects".', ) parser.add_argument( '-v', '--verbosity', default=1, type=int, choices=[0, 1, 2, 3], help='Verbosity level; 0=minimal output, 1=normal output, 2=all output', ) parser.add_argument( '--noinput', action='store_false', dest='interactive', help='Tells Django to NOT prompt the user for input of any kind.', ) parser.add_argument( '--failfast', action='store_true', help='Tells Django to stop running the test suite after first failed test.', ) parser.add_argument( '--keepdb', action='store_true', help='Tells Django to preserve the test database between runs.', ) parser.add_argument( '--settings', help='Python path to settings module, e.g. "myproject.settings". If ' 'this isn\'t provided, either the DJANGO_SETTINGS_MODULE ' 'environment variable or "test_sqlite" will be used.', ) parser.add_argument( '--bisect', help='Bisect the test suite to discover a test that causes a test ' 'failure when combined with the named test.', ) parser.add_argument( '--pair', help='Run the test suite in pairs with the named test to find problem pairs.', ) parser.add_argument( '--reverse', action='store_true', help='Sort test suites and test cases in opposite order to debug ' 'test side effects not apparent with normal execution lineup.', ) parser.add_argument( '--selenium', action=ActionSelenium, metavar='BROWSERS', help='A comma-separated list of browsers to run the Selenium tests against.', ) parser.add_argument( '--headless', action='store_true', help='Run selenium tests in headless mode, if the browser supports the option.', ) parser.add_argument( '--selenium-hub', help='A URL for a selenium hub instance to use in combination with --selenium.', ) parser.add_argument( '--external-host', default=socket.gethostname(), help='The external host that can be reached by the selenium hub instance when running Selenium ' 'tests via Selenium Hub.', ) parser.add_argument( '--debug-sql', action='store_true', help='Turn on the SQL query logger within tests.', ) parser.add_argument( '--parallel', nargs='?', default=0, type=int, const=default_test_processes(), metavar='N', help='Run tests using up to N parallel processes.', ) parser.add_argument( '--tag', dest='tags', action='append', help='Run only tests with the specified tags. Can be used multiple times.', ) parser.add_argument( '--exclude-tag', dest='exclude_tags', action='append', help='Do not run tests with the specified tag. Can be used multiple times.', ) parser.add_argument( '--start-after', dest='start_after', help='Run tests starting after the specified top-level module.', ) parser.add_argument( '--start-at', dest='start_at', help='Run tests starting at the specified top-level module.', ) parser.add_argument( '--pdb', action='store_true', help='Runs the PDB debugger on error or failure.' ) parser.add_argument( '-b', '--buffer', action='store_true', help='Discard output of passing tests.', ) parser.add_argument( '--timing', action='store_true', help='Output timings, including database set up and total run time.', ) parser.add_argument( '-k', dest='test_name_patterns', action='append', help=( 'Only run test methods and classes matching test name pattern. ' 'Same as unittest -k option. Can be used multiple times.' ), ) options = parser.parse_args() using_selenium_hub = options.selenium and options.selenium_hub if options.selenium_hub and not options.selenium: parser.error('--selenium-hub and --external-host require --selenium to be used.') if using_selenium_hub and not options.external_host: parser.error('--selenium-hub and --external-host must be used together.') # Allow including a trailing slash on app_labels for tab completion convenience options.modules = [os.path.normpath(labels) for labels in options.modules] mutually_exclusive_options = [options.start_at, options.start_after, options.modules] enabled_module_options = [bool(option) for option in mutually_exclusive_options].count(True) if enabled_module_options > 1: print('Aborting: --start-at, --start-after, and test labels are mutually exclusive.') sys.exit(1) for opt_name in ['start_at', 'start_after']: opt_val = getattr(options, opt_name) if opt_val: if '.' in opt_val: print('Aborting: --%s must be a top-level module.' % opt_name.replace('_', '-')) sys.exit(1) setattr(options, opt_name, os.path.normpath(opt_val)) if options.settings: os.environ['DJANGO_SETTINGS_MODULE'] = options.settings else: os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'test_sqlite') options.settings = os.environ['DJANGO_SETTINGS_MODULE'] if options.selenium: if not options.tags: options.tags = ['selenium'] elif 'selenium' not in options.tags: options.tags.append('selenium') if options.selenium_hub: SeleniumTestCaseBase.selenium_hub = options.selenium_hub SeleniumTestCaseBase.external_host = options.external_host SeleniumTestCaseBase.headless = options.headless SeleniumTestCaseBase.browsers = options.selenium if options.bisect: bisect_tests( options.bisect, options, options.modules, options.start_at, options.start_after, ) elif options.pair: paired_tests( options.pair, options, options.modules, options.start_at, options.start_after, ) else: time_keeper = TimeKeeper() if options.timing else NullTimeKeeper() with time_keeper.timed('Total run'): failures = django_tests( options.verbosity, options.interactive, options.failfast, options.keepdb, options.reverse, options.modules, options.debug_sql, options.parallel, options.tags, options.exclude_tags, getattr(options, 'test_name_patterns', None), options.start_at, options.start_after, options.pdb, options.buffer, options.timing, ) time_keeper.print_results() if failures: sys.exit(1)
dd0e2527f4d0a4ba45e8183a451130325cbdc9230a96444e5b2fce9758688222
# Django documentation build configuration file, created by # sphinx-quickstart on Thu Mar 27 09:06:53 2008. # # This file is execfile()d with the current directory set to its containing dir. # # The contents of this file are pickled, so don't put values in the namespace # that aren't picklable (module imports are okay, they're removed automatically). # # All configuration values have a default; values that are commented out # serve to show the default. import sys from os.path import abspath, dirname, join # Workaround for sphinx-build recursion limit overflow: # pickle.dump(doctree, f, pickle.HIGHEST_PROTOCOL) # RuntimeError: maximum recursion depth exceeded while pickling an object # # Python's default allowed recursion depth is 1000 but this isn't enough for # building docs/ref/settings.txt sometimes. # https://groups.google.com/g/sphinx-dev/c/MtRf64eGtv4/discussion sys.setrecursionlimit(2000) # Make sure we get the version of this copy of Django sys.path.insert(1, dirname(dirname(abspath(__file__)))) # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.append(abspath(join(dirname(__file__), "_ext"))) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. needs_sphinx = '1.6.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ "djangodocs", 'sphinx.ext.extlinks', "sphinx.ext.intersphinx", "sphinx.ext.viewcode", "sphinx.ext.autosectionlabel", ] # AutosectionLabel settings. # Uses a <page>:<label> schema which doesn't work for duplicate sub-section # labels, so set max depth. autosectionlabel_prefix_document = True autosectionlabel_maxdepth = 2 # Linkcheck settings. linkcheck_ignore = [ # Special-use addresses and domain names. (RFC 6761/6890) r'^https?://(?:127\.0\.0\.1|\[::1\])(?::\d+)?/', r'^https?://(?:[^/\.]+\.)*example\.(?:com|net|org)(?::\d+)?/', r'^https?://(?:[^/\.]+\.)*(?:example|invalid|localhost|test)(?::\d+)?/', # Pages that are inaccessible because they require authentication. r'^https://github\.com/[^/]+/[^/]+/fork', r'^https://code\.djangoproject\.com/github/login', r'^https://code\.djangoproject\.com/newticket', r'^https://(?:code|www)\.djangoproject\.com/admin/', r'^https://www\.djangoproject\.com/community/add/blogs/', r'^https://www\.google\.com/webmasters/tools/ping', r'^https://search\.google\.com/search-console/welcome', # Fragments used to dynamically switch content or populate fields. r'^https://github\.com/[^#]+#L\d+-L\d+$', r'^https://help\.apple\.com/itc/podcasts_connect/#/itc', # Anchors on certain pages with missing a[name] attributes. r'^https://tools\.ietf\.org/html/rfc1123\.html#section-', ] # Spelling check needs an additional module that is not installed by default. # Add it only if spelling check is requested so docs can be generated without it. if 'spelling' in sys.argv: extensions.append("sphinxcontrib.spelling") # Spelling language. spelling_lang = 'en_US' # Location of word list. spelling_word_list_filename = 'spelling_wordlist' spelling_warning = True # Add any paths that contain templates here, relative to this directory. # templates_path = [] # The suffix of source filenames. source_suffix = '.txt' # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'contents' # General substitutions. project = 'Django' copyright = 'Django Software Foundation and contributors' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '4.0' # The full version, including alpha/beta/rc tags. try: from django import VERSION, get_version except ImportError: release = version else: def django_release(): pep440ver = get_version() if VERSION[3:5] == ('alpha', 0) and 'dev' not in pep440ver: return pep440ver + '.dev' return pep440ver release = django_release() # The "development version" of Django django_next_version = '4.0' extlinks = { 'bpo': ('https://bugs.python.org/issue%s', 'bpo-'), 'commit': ('https://github.com/django/django/commit/%s', ''), 'cve': ('https://nvd.nist.gov/vuln/detail/CVE-%s', 'CVE-'), # A file or directory. GitHub redirects from blob to tree if needed. 'source': ('https://github.com/django/django/blob/main/%s', ''), 'ticket': ('https://code.djangoproject.com/ticket/%s', '#'), } # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # Location for .po/.mo translation files used when language is set locale_dirs = ['locale/'] # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build', '_theme', 'requirements.txt'] # The reST default role (used for this markup: `text`) to use for all documents. default_role = "default-role-error" # If true, '()' will be appended to :func: etc. cross-reference text. add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). add_module_names = False # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'trac' # Links to Python's docs should reference the most recent version of the 3.x # branch, which is located at this URL. intersphinx_mapping = { 'python': ('https://docs.python.org/3/', None), 'sphinx': ('https://www.sphinx-doc.org/en/master/', None), 'psycopg2': ('https://www.psycopg.org/docs/', None), } # Python's docs don't change every week. intersphinx_cache_limit = 90 # days # The 'versionadded' and 'versionchanged' directives are overridden. suppress_warnings = ['app.add_directive'] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = "djangodocs" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. html_theme_path = ["_theme"] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". # html_static_path = ["_static"] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. html_last_updated_fmt = '%b %d, %Y' # Content template for the index page. # html_index = '' # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'Djangodoc' modindex_common_prefix = ["django."] # Appended to every page rst_epilog = """ .. |django-users| replace:: :ref:`django-users <django-users-mailing-list>` .. |django-core-mentorship| replace:: :ref:`django-core-mentorship <django-core-mentorship-mailing-list>` .. |django-developers| replace:: :ref:`django-developers <django-developers-mailing-list>` .. |django-announce| replace:: :ref:`django-announce <django-announce-mailing-list>` .. |django-updates| replace:: :ref:`django-updates <django-updates-mailing-list>` """ # -- Options for LaTeX output -------------------------------------------------- latex_elements = { 'preamble': ( '\\DeclareUnicodeCharacter{2264}{\\ensuremath{\\le}}' '\\DeclareUnicodeCharacter{2265}{\\ensuremath{\\ge}}' '\\DeclareUnicodeCharacter{2665}{[unicode-heart]}' '\\DeclareUnicodeCharacter{2713}{[unicode-checkmark]}' ), } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, document class [howto/manual]). # latex_documents = [] latex_documents = [ ('contents', 'django.tex', 'Django Documentation', 'Django Software Foundation', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [( 'ref/django-admin', 'django-admin', 'Utility script for the Django Web framework', ['Django Software Foundation'], 1 )] # -- Options for Texinfo output ------------------------------------------------ # List of tuples (startdocname, targetname, title, author, dir_entry, # description, category, toctree_only) texinfo_documents = [( master_doc, "django", "", "", "Django", "Documentation of the Django framework", "Web development", False )] # -- Options for Epub output --------------------------------------------------- # Bibliographic Dublin Core info. epub_title = project epub_author = 'Django Software Foundation' epub_publisher = 'Django Software Foundation' epub_copyright = copyright # The basename for the epub file. It defaults to the project name. # epub_basename = 'Django' # The HTML theme for the epub output. Since the default themes are not optimized # for small screen space, using the same theme for HTML and epub output is # usually not wise. This defaults to 'epub', a theme designed to save visual # space. epub_theme = 'djangodocs-epub' # The language of the text. It defaults to the language option # or en if the language is not set. # epub_language = '' # The scheme of the identifier. Typical schemes are ISBN or URL. # epub_scheme = '' # The unique identifier of the text. This can be an ISBN number # or the project homepage. # epub_identifier = '' # A unique identification for the text. # epub_uid = '' # A tuple containing the cover image and cover page html template filenames. epub_cover = ('', 'epub-cover.html') # A sequence of (type, uri, title) tuples for the guide element of content.opf. # epub_guide = () # HTML files that should be inserted before the pages created by sphinx. # The format is a list of tuples containing the path and title. # epub_pre_files = [] # HTML files shat should be inserted after the pages created by sphinx. # The format is a list of tuples containing the path and title. # epub_post_files = [] # A list of files that should not be packed into the epub file. # epub_exclude_files = [] # The depth of the table of contents in toc.ncx. # epub_tocdepth = 3 # Allow duplicate toc entries. # epub_tocdup = True # Choose between 'default' and 'includehidden'. # epub_tocscope = 'default' # Fix unsupported image types using the PIL. # epub_fix_images = False # Scale large images. # epub_max_image_width = 0 # How to display URL addresses: 'footnote', 'no', or 'inline'. # epub_show_urls = 'inline' # If false, no index is generated. # epub_use_index = True
4761190a11c44c8828fabef749004ad8b853ca0dcbfae46d150740e655e38ae9
import ctypes import faulthandler import io import itertools import logging import multiprocessing import os import pickle import sys import textwrap import unittest from importlib import import_module from io import StringIO from django.core.management import call_command from django.db import connections from django.test import SimpleTestCase, TestCase from django.test.utils import ( NullTimeKeeper, TimeKeeper, iter_test_cases, setup_databases as _setup_databases, setup_test_environment, teardown_databases as _teardown_databases, teardown_test_environment, ) from django.utils.datastructures import OrderedSet try: import ipdb as pdb except ImportError: import pdb try: import tblib.pickling_support except ImportError: tblib = None class DebugSQLTextTestResult(unittest.TextTestResult): def __init__(self, stream, descriptions, verbosity): self.logger = logging.getLogger('django.db.backends') self.logger.setLevel(logging.DEBUG) self.debug_sql_stream = None super().__init__(stream, descriptions, verbosity) def startTest(self, test): self.debug_sql_stream = StringIO() self.handler = logging.StreamHandler(self.debug_sql_stream) self.logger.addHandler(self.handler) super().startTest(test) def stopTest(self, test): super().stopTest(test) self.logger.removeHandler(self.handler) if self.showAll: self.debug_sql_stream.seek(0) self.stream.write(self.debug_sql_stream.read()) self.stream.writeln(self.separator2) def addError(self, test, err): super().addError(test, err) if self.debug_sql_stream is None: # Error before tests e.g. in setUpTestData(). sql = '' else: self.debug_sql_stream.seek(0) sql = self.debug_sql_stream.read() self.errors[-1] = self.errors[-1] + (sql,) def addFailure(self, test, err): super().addFailure(test, err) self.debug_sql_stream.seek(0) self.failures[-1] = self.failures[-1] + (self.debug_sql_stream.read(),) def addSubTest(self, test, subtest, err): super().addSubTest(test, subtest, err) if err is not None: self.debug_sql_stream.seek(0) errors = self.failures if issubclass(err[0], test.failureException) else self.errors errors[-1] = errors[-1] + (self.debug_sql_stream.read(),) def printErrorList(self, flavour, errors): for test, err, sql_debug in errors: self.stream.writeln(self.separator1) self.stream.writeln("%s: %s" % (flavour, self.getDescription(test))) self.stream.writeln(self.separator2) self.stream.writeln(err) self.stream.writeln(self.separator2) self.stream.writeln(sql_debug) class PDBDebugResult(unittest.TextTestResult): """ Custom result class that triggers a PDB session when an error or failure occurs. """ def addError(self, test, err): super().addError(test, err) self.debug(err) def addFailure(self, test, err): super().addFailure(test, err) self.debug(err) def debug(self, error): self._restoreStdout() self.buffer = False exc_type, exc_value, traceback = error print("\nOpening PDB: %r" % exc_value) pdb.post_mortem(traceback) class DummyList: """ Dummy list class for faking storage of results in unittest.TestResult. """ __slots__ = () def append(self, item): pass class RemoteTestResult(unittest.TestResult): """ Extend unittest.TestResult to record events in the child processes so they can be replayed in the parent process. Events include things like which tests succeeded or failed. """ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # Fake storage of results to reduce memory usage. These are used by the # unittest default methods, but here 'events' is used instead. dummy_list = DummyList() self.failures = dummy_list self.errors = dummy_list self.skipped = dummy_list self.expectedFailures = dummy_list self.unexpectedSuccesses = dummy_list if tblib is not None: tblib.pickling_support.install() self.events = [] def __getstate__(self): # Make this class picklable by removing the file-like buffer # attributes. This is possible since they aren't used after unpickling # after being sent to ParallelTestSuite. state = self.__dict__.copy() state.pop('_stdout_buffer', None) state.pop('_stderr_buffer', None) state.pop('_original_stdout', None) state.pop('_original_stderr', None) return state @property def test_index(self): return self.testsRun - 1 def _confirm_picklable(self, obj): """ Confirm that obj can be pickled and unpickled as multiprocessing will need to pickle the exception in the child process and unpickle it in the parent process. Let the exception rise, if not. """ pickle.loads(pickle.dumps(obj)) def _print_unpicklable_subtest(self, test, subtest, pickle_exc): print(""" Subtest failed: test: {} subtest: {} Unfortunately, the subtest that failed cannot be pickled, so the parallel test runner cannot handle it cleanly. Here is the pickling error: > {} You should re-run this test with --parallel=1 to reproduce the failure with a cleaner failure message. """.format(test, subtest, pickle_exc)) def check_picklable(self, test, err): # Ensure that sys.exc_info() tuples are picklable. This displays a # clear multiprocessing.pool.RemoteTraceback generated in the child # process instead of a multiprocessing.pool.MaybeEncodingError, making # the root cause easier to figure out for users who aren't familiar # with the multiprocessing module. Since we're in a forked process, # our best chance to communicate with them is to print to stdout. try: self._confirm_picklable(err) except Exception as exc: original_exc_txt = repr(err[1]) original_exc_txt = textwrap.fill(original_exc_txt, 75, initial_indent=' ', subsequent_indent=' ') pickle_exc_txt = repr(exc) pickle_exc_txt = textwrap.fill(pickle_exc_txt, 75, initial_indent=' ', subsequent_indent=' ') if tblib is None: print(""" {} failed: {} Unfortunately, tracebacks cannot be pickled, making it impossible for the parallel test runner to handle this exception cleanly. In order to see the traceback, you should install tblib: python -m pip install tblib """.format(test, original_exc_txt)) else: print(""" {} failed: {} Unfortunately, the exception it raised cannot be pickled, making it impossible for the parallel test runner to handle it cleanly. Here's the error encountered while trying to pickle the exception: {} You should re-run this test with the --parallel=1 option to reproduce the failure and get a correct traceback. """.format(test, original_exc_txt, pickle_exc_txt)) raise def check_subtest_picklable(self, test, subtest): try: self._confirm_picklable(subtest) except Exception as exc: self._print_unpicklable_subtest(test, subtest, exc) raise def startTestRun(self): super().startTestRun() self.events.append(('startTestRun',)) def stopTestRun(self): super().stopTestRun() self.events.append(('stopTestRun',)) def startTest(self, test): super().startTest(test) self.events.append(('startTest', self.test_index)) def stopTest(self, test): super().stopTest(test) self.events.append(('stopTest', self.test_index)) def addError(self, test, err): self.check_picklable(test, err) self.events.append(('addError', self.test_index, err)) super().addError(test, err) def addFailure(self, test, err): self.check_picklable(test, err) self.events.append(('addFailure', self.test_index, err)) super().addFailure(test, err) def addSubTest(self, test, subtest, err): # Follow Python's implementation of unittest.TestResult.addSubTest() by # not doing anything when a subtest is successful. if err is not None: # Call check_picklable() before check_subtest_picklable() since # check_picklable() performs the tblib check. self.check_picklable(test, err) self.check_subtest_picklable(test, subtest) self.events.append(('addSubTest', self.test_index, subtest, err)) super().addSubTest(test, subtest, err) def addSuccess(self, test): self.events.append(('addSuccess', self.test_index)) super().addSuccess(test) def addSkip(self, test, reason): self.events.append(('addSkip', self.test_index, reason)) super().addSkip(test, reason) def addExpectedFailure(self, test, err): # If tblib isn't installed, pickling the traceback will always fail. # However we don't want tblib to be required for running the tests # when they pass or fail as expected. Drop the traceback when an # expected failure occurs. if tblib is None: err = err[0], err[1], None self.check_picklable(test, err) self.events.append(('addExpectedFailure', self.test_index, err)) super().addExpectedFailure(test, err) def addUnexpectedSuccess(self, test): self.events.append(('addUnexpectedSuccess', self.test_index)) super().addUnexpectedSuccess(test) def wasSuccessful(self): """Tells whether or not this result was a success.""" failure_types = {'addError', 'addFailure', 'addSubTest', 'addUnexpectedSuccess'} return all(e[0] not in failure_types for e in self.events) def _exc_info_to_string(self, err, test): # Make this method no-op. It only powers the default unittest behavior # for recording errors, but this class pickles errors into 'events' # instead. return '' class RemoteTestRunner: """ Run tests and record everything but don't display anything. The implementation matches the unpythonic coding style of unittest2. """ resultclass = RemoteTestResult def __init__(self, failfast=False, resultclass=None, buffer=False): self.failfast = failfast self.buffer = buffer if resultclass is not None: self.resultclass = resultclass def run(self, test): result = self.resultclass() unittest.registerResult(result) result.failfast = self.failfast result.buffer = self.buffer test(result) return result def default_test_processes(): """Default number of test processes when using the --parallel option.""" # The current implementation of the parallel test runner requires # multiprocessing to start subprocesses with fork(). if multiprocessing.get_start_method() != 'fork': return 1 try: return int(os.environ['DJANGO_TEST_PROCESSES']) except KeyError: return multiprocessing.cpu_count() _worker_id = 0 def _init_worker(counter): """ Switch to databases dedicated to this worker. This helper lives at module-level because of the multiprocessing module's requirements. """ global _worker_id with counter.get_lock(): counter.value += 1 _worker_id = counter.value for alias in connections: connection = connections[alias] settings_dict = connection.creation.get_test_db_clone_settings(str(_worker_id)) # connection.settings_dict must be updated in place for changes to be # reflected in django.db.connections. If the following line assigned # connection.settings_dict = settings_dict, new threads would connect # to the default database instead of the appropriate clone. connection.settings_dict.update(settings_dict) connection.close() def _run_subsuite(args): """ Run a suite of tests with a RemoteTestRunner and return a RemoteTestResult. This helper lives at module-level and its arguments are wrapped in a tuple because of the multiprocessing module's requirements. """ runner_class, subsuite_index, subsuite, failfast, buffer = args runner = runner_class(failfast=failfast, buffer=buffer) result = runner.run(subsuite) return subsuite_index, result.events class ParallelTestSuite(unittest.TestSuite): """ Run a series of tests in parallel in several processes. While the unittest module's documentation implies that orchestrating the execution of tests is the responsibility of the test runner, in practice, it appears that TestRunner classes are more concerned with formatting and displaying test results. Since there are fewer use cases for customizing TestSuite than TestRunner, implementing parallelization at the level of the TestSuite improves interoperability with existing custom test runners. A single instance of a test runner can still collect results from all tests without being aware that they have been run in parallel. """ # In case someone wants to modify these in a subclass. init_worker = _init_worker run_subsuite = _run_subsuite runner_class = RemoteTestRunner def __init__(self, suite, processes, failfast=False, buffer=False): self.subsuites = partition_suite_by_case(suite) self.processes = processes self.failfast = failfast self.buffer = buffer super().__init__() def run(self, result): """ Distribute test cases across workers. Return an identifier of each test case with its result in order to use imap_unordered to show results as soon as they're available. To minimize pickling errors when getting results from workers: - pass back numeric indexes in self.subsuites instead of tests - make tracebacks picklable with tblib, if available Even with tblib, errors may still occur for dynamically created exception classes which cannot be unpickled. """ counter = multiprocessing.Value(ctypes.c_int, 0) pool = multiprocessing.Pool( processes=self.processes, initializer=self.init_worker.__func__, initargs=[counter], ) args = [ (self.runner_class, index, subsuite, self.failfast, self.buffer) for index, subsuite in enumerate(self.subsuites) ] test_results = pool.imap_unordered(self.run_subsuite.__func__, args) while True: if result.shouldStop: pool.terminate() break try: subsuite_index, events = test_results.next(timeout=0.1) except multiprocessing.TimeoutError: continue except StopIteration: pool.close() break tests = list(self.subsuites[subsuite_index]) for event in events: event_name = event[0] handler = getattr(result, event_name, None) if handler is None: continue test = tests[event[1]] args = event[2:] handler(test, *args) pool.join() return result def __iter__(self): return iter(self.subsuites) class DiscoverRunner: """A Django test runner that uses unittest2 test discovery.""" test_suite = unittest.TestSuite parallel_test_suite = ParallelTestSuite test_runner = unittest.TextTestRunner test_loader = unittest.defaultTestLoader reorder_by = (TestCase, SimpleTestCase) def __init__(self, pattern=None, top_level=None, verbosity=1, interactive=True, failfast=False, keepdb=False, reverse=False, debug_mode=False, debug_sql=False, parallel=0, tags=None, exclude_tags=None, test_name_patterns=None, pdb=False, buffer=False, enable_faulthandler=True, timing=False, **kwargs): self.pattern = pattern self.top_level = top_level self.verbosity = verbosity self.interactive = interactive self.failfast = failfast self.keepdb = keepdb self.reverse = reverse self.debug_mode = debug_mode self.debug_sql = debug_sql self.parallel = parallel self.tags = set(tags or []) self.exclude_tags = set(exclude_tags or []) if not faulthandler.is_enabled() and enable_faulthandler: try: faulthandler.enable(file=sys.stderr.fileno()) except (AttributeError, io.UnsupportedOperation): faulthandler.enable(file=sys.__stderr__.fileno()) self.pdb = pdb if self.pdb and self.parallel > 1: raise ValueError('You cannot use --pdb with parallel tests; pass --parallel=1 to use it.') self.buffer = buffer self.test_name_patterns = None self.time_keeper = TimeKeeper() if timing else NullTimeKeeper() if test_name_patterns: # unittest does not export the _convert_select_pattern function # that converts command-line arguments to patterns. self.test_name_patterns = { pattern if '*' in pattern else '*%s*' % pattern for pattern in test_name_patterns } @classmethod def add_arguments(cls, parser): parser.add_argument( '-t', '--top-level-directory', dest='top_level', help='Top level of project for unittest discovery.', ) parser.add_argument( '-p', '--pattern', default="test*.py", help='The test matching pattern. Defaults to test*.py.', ) parser.add_argument( '--keepdb', action='store_true', help='Preserves the test DB between runs.' ) parser.add_argument( '-r', '--reverse', action='store_true', help='Reverses test case order.', ) parser.add_argument( '--debug-mode', action='store_true', help='Sets settings.DEBUG to True.', ) parser.add_argument( '-d', '--debug-sql', action='store_true', help='Prints logged SQL queries on failure.', ) parser.add_argument( '--parallel', nargs='?', default=1, type=int, const=default_test_processes(), metavar='N', help='Run tests using up to N parallel processes.', ) parser.add_argument( '--tag', action='append', dest='tags', help='Run only tests with the specified tag. Can be used multiple times.', ) parser.add_argument( '--exclude-tag', action='append', dest='exclude_tags', help='Do not run tests with the specified tag. Can be used multiple times.', ) parser.add_argument( '--pdb', action='store_true', help='Runs a debugger (pdb, or ipdb if installed) on error or failure.' ) parser.add_argument( '-b', '--buffer', action='store_true', help='Discard output from passing tests.', ) parser.add_argument( '--no-faulthandler', action='store_false', dest='enable_faulthandler', help='Disables the Python faulthandler module during tests.', ) parser.add_argument( '--timing', action='store_true', help=( 'Output timings, including database set up and total run time.' ), ) parser.add_argument( '-k', action='append', dest='test_name_patterns', help=( 'Only run test methods and classes that match the pattern ' 'or substring. Can be used multiple times. Same as ' 'unittest -k option.' ), ) def setup_test_environment(self, **kwargs): setup_test_environment(debug=self.debug_mode) unittest.installHandler() def load_tests_for_label(self, label, discover_kwargs): label_as_path = os.path.abspath(label) tests = None # If a module, or "module.ClassName[.method_name]", just run those. if not os.path.exists(label_as_path): tests = self.test_loader.loadTestsFromName(label) if tests.countTestCases(): return tests # Try discovery if "label" is a package or directory. is_importable, is_package = try_importing(label) if is_importable: if not is_package: return tests elif not os.path.isdir(label_as_path): if os.path.exists(label_as_path): assert tests is None raise RuntimeError( f'One of the test labels is a path to a file: {label!r}, ' f'which is not supported. Use a dotted module name ' f'instead.' ) return tests kwargs = discover_kwargs.copy() if os.path.isdir(label_as_path) and not self.top_level: kwargs['top_level_dir'] = find_top_level(label_as_path) tests = self.test_loader.discover(start_dir=label, **kwargs) # Make unittest forget the top-level dir it calculated from this run, # to support running tests from two different top-levels. self.test_loader._top_level_dir = None return tests def build_suite(self, test_labels=None, extra_tests=None, **kwargs): test_labels = test_labels or ['.'] extra_tests = extra_tests or [] self.test_loader.testNamePatterns = self.test_name_patterns discover_kwargs = {} if self.pattern is not None: discover_kwargs['pattern'] = self.pattern if self.top_level is not None: discover_kwargs['top_level_dir'] = self.top_level all_tests = [] for label in test_labels: tests = self.load_tests_for_label(label, discover_kwargs) all_tests.extend(iter_test_cases(tests)) all_tests.extend(iter_test_cases(extra_tests)) if self.tags or self.exclude_tags: if self.verbosity >= 2: if self.tags: print('Including test tag(s): %s.' % ', '.join(sorted(self.tags))) if self.exclude_tags: print('Excluding test tag(s): %s.' % ', '.join(sorted(self.exclude_tags))) all_tests = filter_tests_by_tags(all_tests, self.tags, self.exclude_tags) # Put the failures detected at load time first for quicker feedback. # _FailedTest objects include things like test modules that couldn't be # found or that couldn't be loaded due to syntax errors. test_types = (unittest.loader._FailedTest, *self.reorder_by) all_tests = list(reorder_tests(all_tests, test_types, self.reverse)) if self.verbosity >= 1: print('Found %d tests.' % len(all_tests)) suite = self.test_suite(all_tests) if self.parallel > 1: parallel_suite = self.parallel_test_suite( suite, self.parallel, self.failfast, self.buffer, ) # Since tests are distributed across processes on a per-TestCase # basis, there's no need for more processes than TestCases. parallel_units = len(parallel_suite.subsuites) self.parallel = min(self.parallel, parallel_units) # If there's only one TestCase, parallelization isn't needed. if self.parallel > 1: suite = parallel_suite return suite def setup_databases(self, **kwargs): return _setup_databases( self.verbosity, self.interactive, time_keeper=self.time_keeper, keepdb=self.keepdb, debug_sql=self.debug_sql, parallel=self.parallel, **kwargs ) def get_resultclass(self): if self.debug_sql: return DebugSQLTextTestResult elif self.pdb: return PDBDebugResult def get_test_runner_kwargs(self): return { 'failfast': self.failfast, 'resultclass': self.get_resultclass(), 'verbosity': self.verbosity, 'buffer': self.buffer, } def run_checks(self, databases): # Checks are run after database creation since some checks require # database access. call_command('check', verbosity=self.verbosity, databases=databases) def run_suite(self, suite, **kwargs): kwargs = self.get_test_runner_kwargs() runner = self.test_runner(**kwargs) return runner.run(suite) def teardown_databases(self, old_config, **kwargs): """Destroy all the non-mirror databases.""" _teardown_databases( old_config, verbosity=self.verbosity, parallel=self.parallel, keepdb=self.keepdb, ) def teardown_test_environment(self, **kwargs): unittest.removeHandler() teardown_test_environment() def suite_result(self, suite, result, **kwargs): return len(result.failures) + len(result.errors) def _get_databases(self, suite): databases = {} for test in iter_test_cases(suite): test_databases = getattr(test, 'databases', None) if test_databases == '__all__': test_databases = connections if test_databases: serialized_rollback = getattr(test, 'serialized_rollback', False) databases.update( (alias, serialized_rollback or databases.get(alias, False)) for alias in test_databases ) return databases def get_databases(self, suite): databases = self._get_databases(suite) if self.verbosity >= 2: unused_databases = [alias for alias in connections if alias not in databases] if unused_databases: print('Skipping setup of unused database(s): %s.' % ', '.join(sorted(unused_databases))) return databases def run_tests(self, test_labels, extra_tests=None, **kwargs): """ Run the unit tests for all the test labels in the provided list. Test labels should be dotted Python paths to test modules, test classes, or test methods. A list of 'extra' tests may also be provided; these tests will be added to the test suite. Return the number of tests that failed. """ self.setup_test_environment() suite = self.build_suite(test_labels, extra_tests) databases = self.get_databases(suite) serialized_aliases = set( alias for alias, serialize in databases.items() if serialize ) with self.time_keeper.timed('Total database setup'): old_config = self.setup_databases( aliases=databases, serialized_aliases=serialized_aliases, ) run_failed = False try: self.run_checks(databases) result = self.run_suite(suite) except Exception: run_failed = True raise finally: try: with self.time_keeper.timed('Total database teardown'): self.teardown_databases(old_config) self.teardown_test_environment() except Exception: # Silence teardown exceptions if an exception was raised during # runs to avoid shadowing it. if not run_failed: raise self.time_keeper.print_results() return self.suite_result(suite, result) def try_importing(label): """ Try importing a test label, and return (is_importable, is_package). Relative labels like "." and ".." are seen as directories. """ try: mod = import_module(label) except (ImportError, TypeError): return (False, False) return (True, hasattr(mod, '__path__')) def find_top_level(top_level): # Try to be a bit smarter than unittest about finding the default top-level # for a given directory path, to avoid breaking relative imports. # (Unittest's default is to set top-level equal to the path, which means # relative imports will result in "Attempted relative import in # non-package."). # We'd be happy to skip this and require dotted module paths (which don't # cause this problem) instead of file paths (which do), but in the case of # a directory in the cwd, which would be equally valid if considered as a # top-level module or as a directory path, unittest unfortunately prefers # the latter. while True: init_py = os.path.join(top_level, '__init__.py') if not os.path.exists(init_py): break try_next = os.path.dirname(top_level) if try_next == top_level: # __init__.py all the way down? give up. break top_level = try_next return top_level def reorder_tests(tests, classes, reverse=False): """ Reorder an iterable of tests by test type, removing any duplicates. `classes` is a sequence of types. The result is returned as an iterator. All tests of type classes[0] are placed first, then tests of type classes[1], etc. Tests with no match in classes are placed last. If `reverse` is True, sort tests within classes in opposite order but don't reverse test classes. """ bins = [OrderedSet() for i in range(len(classes) + 1)] *class_bins, last_bin = bins for test in tests: for test_bin, test_class in zip(class_bins, classes): if isinstance(test, test_class): break else: test_bin = last_bin test_bin.add(test) if reverse: bins = (reversed(tests) for tests in bins) return itertools.chain(*bins) def partition_suite_by_case(suite): """Partition a test suite by test case, preserving the order of tests.""" suite_class = type(suite) all_tests = iter_test_cases(suite) return [ suite_class(tests) for _, tests in itertools.groupby(all_tests, type) ] def test_match_tags(test, tags, exclude_tags): if isinstance(test, unittest.loader._FailedTest): # Tests that couldn't load always match to prevent tests from falsely # passing due e.g. to syntax errors. return True test_tags = set(getattr(test, 'tags', [])) test_fn_name = getattr(test, '_testMethodName', str(test)) if hasattr(test, test_fn_name): test_fn = getattr(test, test_fn_name) test_fn_tags = list(getattr(test_fn, 'tags', [])) test_tags = test_tags.union(test_fn_tags) if tags and test_tags.isdisjoint(tags): return False return test_tags.isdisjoint(exclude_tags) def filter_tests_by_tags(tests, tags, exclude_tags): """Return the matching tests as an iterator.""" return (test for test in tests if test_match_tags(test, tags, exclude_tags))
bba49ba034567c0012523ba5a8050af310644382f240d52b7dc64816b36067e7
import asyncio import difflib import json import logging import posixpath import sys import threading import unittest import warnings from collections import Counter from contextlib import contextmanager from copy import copy, deepcopy from difflib import get_close_matches from functools import wraps from unittest.suite import _DebugResult from unittest.util import safe_repr from urllib.parse import ( parse_qsl, unquote, urlencode, urljoin, urlparse, urlsplit, urlunparse, ) from urllib.request import url2pathname from asgiref.sync import async_to_sync from django.apps import apps from django.conf import settings from django.core import mail from django.core.exceptions import ImproperlyConfigured, ValidationError from django.core.files import locks from django.core.handlers.wsgi import WSGIHandler, get_path_info from django.core.management import call_command from django.core.management.color import no_style from django.core.management.sql import emit_post_migrate_signal from django.core.servers.basehttp import ThreadedWSGIServer, WSGIRequestHandler from django.db import DEFAULT_DB_ALIAS, connection, connections, transaction from django.forms.fields import CharField from django.http import QueryDict from django.http.request import split_domain_port, validate_host from django.test.client import AsyncClient, Client from django.test.html import HTMLParseError, parse_html from django.test.signals import setting_changed, template_rendered from django.test.utils import ( CaptureQueriesContext, ContextList, compare_xml, modify_settings, override_settings, ) from django.utils.deprecation import RemovedInDjango41Warning from django.utils.functional import classproperty from django.utils.version import PY310 from django.views.static import serve __all__ = ('TestCase', 'TransactionTestCase', 'SimpleTestCase', 'skipIfDBFeature', 'skipUnlessDBFeature') def to_list(value): """ Put value into a list if it's not already one. Return an empty list if value is None. """ if value is None: value = [] elif not isinstance(value, list): value = [value] return value def assert_and_parse_html(self, html, user_msg, msg): try: dom = parse_html(html) except HTMLParseError as e: standardMsg = '%s\n%s' % (msg, e) self.fail(self._formatMessage(user_msg, standardMsg)) return dom class _AssertNumQueriesContext(CaptureQueriesContext): def __init__(self, test_case, num, connection): self.test_case = test_case self.num = num super().__init__(connection) def __exit__(self, exc_type, exc_value, traceback): super().__exit__(exc_type, exc_value, traceback) if exc_type is not None: return executed = len(self) self.test_case.assertEqual( executed, self.num, "%d queries executed, %d expected\nCaptured queries were:\n%s" % ( executed, self.num, '\n'.join( '%d. %s' % (i, query['sql']) for i, query in enumerate(self.captured_queries, start=1) ) ) ) class _AssertTemplateUsedContext: def __init__(self, test_case, template_name): self.test_case = test_case self.template_name = template_name self.rendered_templates = [] self.rendered_template_names = [] self.context = ContextList() def on_template_render(self, sender, signal, template, context, **kwargs): self.rendered_templates.append(template) self.rendered_template_names.append(template.name) self.context.append(copy(context)) def test(self): return self.template_name in self.rendered_template_names def message(self): return '%s was not rendered.' % self.template_name def __enter__(self): template_rendered.connect(self.on_template_render) return self def __exit__(self, exc_type, exc_value, traceback): template_rendered.disconnect(self.on_template_render) if exc_type is not None: return if not self.test(): message = self.message() if self.rendered_templates: message += ' Following templates were rendered: %s' % ( ', '.join(self.rendered_template_names) ) else: message += ' No template was rendered.' self.test_case.fail(message) class _AssertTemplateNotUsedContext(_AssertTemplateUsedContext): def test(self): return self.template_name not in self.rendered_template_names def message(self): return '%s was rendered.' % self.template_name class _DatabaseFailure: def __init__(self, wrapped, message): self.wrapped = wrapped self.message = message def __call__(self): raise AssertionError(self.message) class SimpleTestCase(unittest.TestCase): # The class we'll use for the test client self.client. # Can be overridden in derived classes. client_class = Client async_client_class = AsyncClient _overridden_settings = None _modified_settings = None databases = set() _disallowed_database_msg = ( 'Database %(operation)s to %(alias)r are not allowed in SimpleTestCase ' 'subclasses. Either subclass TestCase or TransactionTestCase to ensure ' 'proper test isolation or add %(alias)r to %(test)s.databases to silence ' 'this failure.' ) _disallowed_connection_methods = [ ('connect', 'connections'), ('temporary_connection', 'connections'), ('cursor', 'queries'), ('chunked_cursor', 'queries'), ] @classmethod def setUpClass(cls): super().setUpClass() if cls._overridden_settings: cls._cls_overridden_context = override_settings(**cls._overridden_settings) cls._cls_overridden_context.enable() if cls._modified_settings: cls._cls_modified_context = modify_settings(cls._modified_settings) cls._cls_modified_context.enable() cls._add_databases_failures() @classmethod def _validate_databases(cls): if cls.databases == '__all__': return frozenset(connections) for alias in cls.databases: if alias not in connections: message = '%s.%s.databases refers to %r which is not defined in settings.DATABASES.' % ( cls.__module__, cls.__qualname__, alias, ) close_matches = get_close_matches(alias, list(connections)) if close_matches: message += ' Did you mean %r?' % close_matches[0] raise ImproperlyConfigured(message) return frozenset(cls.databases) @classmethod def _add_databases_failures(cls): cls.databases = cls._validate_databases() for alias in connections: if alias in cls.databases: continue connection = connections[alias] for name, operation in cls._disallowed_connection_methods: message = cls._disallowed_database_msg % { 'test': '%s.%s' % (cls.__module__, cls.__qualname__), 'alias': alias, 'operation': operation, } method = getattr(connection, name) setattr(connection, name, _DatabaseFailure(method, message)) @classmethod def _remove_databases_failures(cls): for alias in connections: if alias in cls.databases: continue connection = connections[alias] for name, _ in cls._disallowed_connection_methods: method = getattr(connection, name) setattr(connection, name, method.wrapped) @classmethod def tearDownClass(cls): cls._remove_databases_failures() if hasattr(cls, '_cls_modified_context'): cls._cls_modified_context.disable() delattr(cls, '_cls_modified_context') if hasattr(cls, '_cls_overridden_context'): cls._cls_overridden_context.disable() delattr(cls, '_cls_overridden_context') super().tearDownClass() def __call__(self, result=None): """ Wrapper around default __call__ method to perform common Django test set up. This means that user-defined Test Cases aren't required to include a call to super().setUp(). """ self._setup_and_call(result) def debug(self): """Perform the same as __call__(), without catching the exception.""" debug_result = _DebugResult() self._setup_and_call(debug_result, debug=True) def _setup_and_call(self, result, debug=False): """ Perform the following in order: pre-setup, run test, post-teardown, skipping pre/post hooks if test is set to be skipped. If debug=True, reraise any errors in setup and use super().debug() instead of __call__() to run the test. """ testMethod = getattr(self, self._testMethodName) skipped = ( getattr(self.__class__, "__unittest_skip__", False) or getattr(testMethod, "__unittest_skip__", False) ) # Convert async test methods. if asyncio.iscoroutinefunction(testMethod): setattr(self, self._testMethodName, async_to_sync(testMethod)) if not skipped: try: self._pre_setup() except Exception: if debug: raise result.addError(self, sys.exc_info()) return if debug: super().debug() else: super().__call__(result) if not skipped: try: self._post_teardown() except Exception: if debug: raise result.addError(self, sys.exc_info()) return def _pre_setup(self): """ Perform pre-test setup: * Create a test client. * Clear the mail test outbox. """ self.client = self.client_class() self.async_client = self.async_client_class() mail.outbox = [] def _post_teardown(self): """Perform post-test things.""" pass def settings(self, **kwargs): """ A context manager that temporarily sets a setting and reverts to the original value when exiting the context. """ return override_settings(**kwargs) def modify_settings(self, **kwargs): """ A context manager that temporarily applies changes a list setting and reverts back to the original value when exiting the context. """ return modify_settings(**kwargs) def assertRedirects(self, response, expected_url, status_code=302, target_status_code=200, msg_prefix='', fetch_redirect_response=True): """ Assert that a response redirected to a specific URL and that the redirect URL can be loaded. Won't work for external links since it uses the test client to do a request (use fetch_redirect_response=False to check such links without fetching them). """ if msg_prefix: msg_prefix += ": " if hasattr(response, 'redirect_chain'): # The request was a followed redirect self.assertTrue( response.redirect_chain, msg_prefix + "Response didn't redirect as expected: Response code was %d (expected %d)" % (response.status_code, status_code) ) self.assertEqual( response.redirect_chain[0][1], status_code, msg_prefix + "Initial response didn't redirect as expected: Response code was %d (expected %d)" % (response.redirect_chain[0][1], status_code) ) url, status_code = response.redirect_chain[-1] self.assertEqual( response.status_code, target_status_code, msg_prefix + "Response didn't redirect as expected: Final Response code was %d (expected %d)" % (response.status_code, target_status_code) ) else: # Not a followed redirect self.assertEqual( response.status_code, status_code, msg_prefix + "Response didn't redirect as expected: Response code was %d (expected %d)" % (response.status_code, status_code) ) url = response.url scheme, netloc, path, query, fragment = urlsplit(url) # Prepend the request path to handle relative path redirects. if not path.startswith('/'): url = urljoin(response.request['PATH_INFO'], url) path = urljoin(response.request['PATH_INFO'], path) if fetch_redirect_response: # netloc might be empty, or in cases where Django tests the # HTTP scheme, the convention is for netloc to be 'testserver'. # Trust both as "internal" URLs here. domain, port = split_domain_port(netloc) if domain and not validate_host(domain, settings.ALLOWED_HOSTS): raise ValueError( "The test client is unable to fetch remote URLs (got %s). " "If the host is served by Django, add '%s' to ALLOWED_HOSTS. " "Otherwise, use assertRedirects(..., fetch_redirect_response=False)." % (url, domain) ) # Get the redirection page, using the same client that was used # to obtain the original response. extra = response.client.extra or {} redirect_response = response.client.get( path, QueryDict(query), secure=(scheme == 'https'), **extra, ) self.assertEqual( redirect_response.status_code, target_status_code, msg_prefix + "Couldn't retrieve redirection page '%s': response code was %d (expected %d)" % (path, redirect_response.status_code, target_status_code) ) self.assertURLEqual( url, expected_url, msg_prefix + "Response redirected to '%s', expected '%s'" % (url, expected_url) ) def assertURLEqual(self, url1, url2, msg_prefix=''): """ Assert that two URLs are the same, ignoring the order of query string parameters except for parameters with the same name. For example, /path/?x=1&y=2 is equal to /path/?y=2&x=1, but /path/?a=1&a=2 isn't equal to /path/?a=2&a=1. """ def normalize(url): """Sort the URL's query string parameters.""" url = str(url) # Coerce reverse_lazy() URLs. scheme, netloc, path, params, query, fragment = urlparse(url) query_parts = sorted(parse_qsl(query)) return urlunparse((scheme, netloc, path, params, urlencode(query_parts), fragment)) self.assertEqual( normalize(url1), normalize(url2), msg_prefix + "Expected '%s' to equal '%s'." % (url1, url2) ) def _assert_contains(self, response, text, status_code, msg_prefix, html): # If the response supports deferred rendering and hasn't been rendered # yet, then ensure that it does get rendered before proceeding further. if hasattr(response, 'render') and callable(response.render) and not response.is_rendered: response.render() if msg_prefix: msg_prefix += ": " self.assertEqual( response.status_code, status_code, msg_prefix + "Couldn't retrieve content: Response code was %d" " (expected %d)" % (response.status_code, status_code) ) if response.streaming: content = b''.join(response.streaming_content) else: content = response.content if not isinstance(text, bytes) or html: text = str(text) content = content.decode(response.charset) text_repr = "'%s'" % text else: text_repr = repr(text) if html: content = assert_and_parse_html(self, content, None, "Response's content is not valid HTML:") text = assert_and_parse_html(self, text, None, "Second argument is not valid HTML:") real_count = content.count(text) return (text_repr, real_count, msg_prefix) def assertContains(self, response, text, count=None, status_code=200, msg_prefix='', html=False): """ Assert that a response indicates that some content was retrieved successfully, (i.e., the HTTP status code was as expected) and that ``text`` occurs ``count`` times in the content of the response. If ``count`` is None, the count doesn't matter - the assertion is true if the text occurs at least once in the response. """ text_repr, real_count, msg_prefix = self._assert_contains( response, text, status_code, msg_prefix, html) if count is not None: self.assertEqual( real_count, count, msg_prefix + "Found %d instances of %s in response (expected %d)" % (real_count, text_repr, count) ) else: self.assertTrue(real_count != 0, msg_prefix + "Couldn't find %s in response" % text_repr) def assertNotContains(self, response, text, status_code=200, msg_prefix='', html=False): """ Assert that a response indicates that some content was retrieved successfully, (i.e., the HTTP status code was as expected) and that ``text`` doesn't occur in the content of the response. """ text_repr, real_count, msg_prefix = self._assert_contains( response, text, status_code, msg_prefix, html) self.assertEqual(real_count, 0, msg_prefix + "Response should not contain %s" % text_repr) def assertFormError(self, response, form, field, errors, msg_prefix=''): """ Assert that a form used to render the response has a specific field error. """ if msg_prefix: msg_prefix += ": " # Put context(s) into a list to simplify processing. contexts = to_list(response.context) if not contexts: self.fail(msg_prefix + "Response did not use any contexts to render the response") # Put error(s) into a list to simplify processing. errors = to_list(errors) # Search all contexts for the error. found_form = False for i, context in enumerate(contexts): if form not in context: continue found_form = True for err in errors: if field: if field in context[form].errors: field_errors = context[form].errors[field] self.assertTrue( err in field_errors, msg_prefix + "The field '%s' on form '%s' in" " context %d does not contain the error '%s'" " (actual errors: %s)" % (field, form, i, err, repr(field_errors)) ) elif field in context[form].fields: self.fail( msg_prefix + "The field '%s' on form '%s' in context %d contains no errors" % (field, form, i) ) else: self.fail( msg_prefix + "The form '%s' in context %d does not contain the field '%s'" % (form, i, field) ) else: non_field_errors = context[form].non_field_errors() self.assertTrue( err in non_field_errors, msg_prefix + "The form '%s' in context %d does not" " contain the non-field error '%s'" " (actual errors: %s)" % (form, i, err, non_field_errors or 'none') ) if not found_form: self.fail(msg_prefix + "The form '%s' was not used to render the response" % form) def assertFormsetError(self, response, formset, form_index, field, errors, msg_prefix=''): """ Assert that a formset used to render the response has a specific error. For field errors, specify the ``form_index`` and the ``field``. For non-field errors, specify the ``form_index`` and the ``field`` as None. For non-form errors, specify ``form_index`` as None and the ``field`` as None. """ # Add punctuation to msg_prefix if msg_prefix: msg_prefix += ": " # Put context(s) into a list to simplify processing. contexts = to_list(response.context) if not contexts: self.fail(msg_prefix + 'Response did not use any contexts to ' 'render the response') # Put error(s) into a list to simplify processing. errors = to_list(errors) # Search all contexts for the error. found_formset = False for i, context in enumerate(contexts): if formset not in context: continue found_formset = True for err in errors: if field is not None: if field in context[formset].forms[form_index].errors: field_errors = context[formset].forms[form_index].errors[field] self.assertTrue( err in field_errors, msg_prefix + "The field '%s' on formset '%s', " "form %d in context %d does not contain the " "error '%s' (actual errors: %s)" % (field, formset, form_index, i, err, repr(field_errors)) ) elif field in context[formset].forms[form_index].fields: self.fail( msg_prefix + "The field '%s' on formset '%s', form %d in context %d contains no errors" % (field, formset, form_index, i) ) else: self.fail( msg_prefix + "The formset '%s', form %d in context %d does not contain the field '%s'" % (formset, form_index, i, field) ) elif form_index is not None: non_field_errors = context[formset].forms[form_index].non_field_errors() self.assertFalse( not non_field_errors, msg_prefix + "The formset '%s', form %d in context %d " "does not contain any non-field errors." % (formset, form_index, i) ) self.assertTrue( err in non_field_errors, msg_prefix + "The formset '%s', form %d in context %d " "does not contain the non-field error '%s' (actual errors: %s)" % (formset, form_index, i, err, repr(non_field_errors)) ) else: non_form_errors = context[formset].non_form_errors() self.assertFalse( not non_form_errors, msg_prefix + "The formset '%s' in context %d does not " "contain any non-form errors." % (formset, i) ) self.assertTrue( err in non_form_errors, msg_prefix + "The formset '%s' in context %d does not " "contain the non-form error '%s' (actual errors: %s)" % (formset, i, err, repr(non_form_errors)) ) if not found_formset: self.fail(msg_prefix + "The formset '%s' was not used to render the response" % formset) def _assert_template_used(self, response, template_name, msg_prefix): if response is None and template_name is None: raise TypeError('response and/or template_name argument must be provided') if msg_prefix: msg_prefix += ": " if template_name is not None and response is not None and not hasattr(response, 'templates'): raise ValueError( "assertTemplateUsed() and assertTemplateNotUsed() are only " "usable on responses fetched using the Django test Client." ) if not hasattr(response, 'templates') or (response is None and template_name): if response: template_name = response response = None # use this template with context manager return template_name, None, msg_prefix template_names = [t.name for t in response.templates if t.name is not None] return None, template_names, msg_prefix def assertTemplateUsed(self, response=None, template_name=None, msg_prefix='', count=None): """ Assert that the template with the provided name was used in rendering the response. Also usable as context manager. """ context_mgr_template, template_names, msg_prefix = self._assert_template_used( response, template_name, msg_prefix) if context_mgr_template: # Use assertTemplateUsed as context manager. return _AssertTemplateUsedContext(self, context_mgr_template) if not template_names: self.fail(msg_prefix + "No templates used to render the response") self.assertTrue( template_name in template_names, msg_prefix + "Template '%s' was not a template used to render" " the response. Actual template(s) used: %s" % (template_name, ', '.join(template_names)) ) if count is not None: self.assertEqual( template_names.count(template_name), count, msg_prefix + "Template '%s' was expected to be rendered %d " "time(s) but was actually rendered %d time(s)." % (template_name, count, template_names.count(template_name)) ) def assertTemplateNotUsed(self, response=None, template_name=None, msg_prefix=''): """ Assert that the template with the provided name was NOT used in rendering the response. Also usable as context manager. """ context_mgr_template, template_names, msg_prefix = self._assert_template_used( response, template_name, msg_prefix ) if context_mgr_template: # Use assertTemplateNotUsed as context manager. return _AssertTemplateNotUsedContext(self, context_mgr_template) self.assertFalse( template_name in template_names, msg_prefix + "Template '%s' was used unexpectedly in rendering the response" % template_name ) @contextmanager def _assert_raises_or_warns_cm(self, func, cm_attr, expected_exception, expected_message): with func(expected_exception) as cm: yield cm self.assertIn(expected_message, str(getattr(cm, cm_attr))) def _assertFooMessage(self, func, cm_attr, expected_exception, expected_message, *args, **kwargs): callable_obj = None if args: callable_obj, *args = args cm = self._assert_raises_or_warns_cm(func, cm_attr, expected_exception, expected_message) # Assertion used in context manager fashion. if callable_obj is None: return cm # Assertion was passed a callable. with cm: callable_obj(*args, **kwargs) def assertRaisesMessage(self, expected_exception, expected_message, *args, **kwargs): """ Assert that expected_message is found in the message of a raised exception. Args: expected_exception: Exception class expected to be raised. expected_message: expected error message string value. args: Function to be called and extra positional args. kwargs: Extra kwargs. """ return self._assertFooMessage( self.assertRaises, 'exception', expected_exception, expected_message, *args, **kwargs ) def assertWarnsMessage(self, expected_warning, expected_message, *args, **kwargs): """ Same as assertRaisesMessage but for assertWarns() instead of assertRaises(). """ return self._assertFooMessage( self.assertWarns, 'warning', expected_warning, expected_message, *args, **kwargs ) # A similar method is available in Python 3.10+. if not PY310: @contextmanager def assertNoLogs(self, logger, level=None): """ Assert no messages are logged on the logger, with at least the given level. """ if isinstance(level, int): level = logging.getLevelName(level) elif level is None: level = 'INFO' try: with self.assertLogs(logger, level) as cm: yield except AssertionError as e: msg = e.args[0] expected_msg = f'no logs of level {level} or higher triggered on {logger}' if msg != expected_msg: raise e else: self.fail(f'Unexpected logs found: {cm.output!r}') def assertFieldOutput(self, fieldclass, valid, invalid, field_args=None, field_kwargs=None, empty_value=''): """ Assert that a form field behaves correctly with various inputs. Args: fieldclass: the class of the field to be tested. valid: a dictionary mapping valid inputs to their expected cleaned values. invalid: a dictionary mapping invalid inputs to one or more raised error messages. field_args: the args passed to instantiate the field field_kwargs: the kwargs passed to instantiate the field empty_value: the expected clean output for inputs in empty_values """ if field_args is None: field_args = [] if field_kwargs is None: field_kwargs = {} required = fieldclass(*field_args, **field_kwargs) optional = fieldclass(*field_args, **{**field_kwargs, 'required': False}) # test valid inputs for input, output in valid.items(): self.assertEqual(required.clean(input), output) self.assertEqual(optional.clean(input), output) # test invalid inputs for input, errors in invalid.items(): with self.assertRaises(ValidationError) as context_manager: required.clean(input) self.assertEqual(context_manager.exception.messages, errors) with self.assertRaises(ValidationError) as context_manager: optional.clean(input) self.assertEqual(context_manager.exception.messages, errors) # test required inputs error_required = [required.error_messages['required']] for e in required.empty_values: with self.assertRaises(ValidationError) as context_manager: required.clean(e) self.assertEqual(context_manager.exception.messages, error_required) self.assertEqual(optional.clean(e), empty_value) # test that max_length and min_length are always accepted if issubclass(fieldclass, CharField): field_kwargs.update({'min_length': 2, 'max_length': 20}) self.assertIsInstance(fieldclass(*field_args, **field_kwargs), fieldclass) def assertHTMLEqual(self, html1, html2, msg=None): """ Assert that two HTML snippets are semantically the same. Whitespace in most cases is ignored, and attribute ordering is not significant. The arguments must be valid HTML. """ dom1 = assert_and_parse_html(self, html1, msg, 'First argument is not valid HTML:') dom2 = assert_and_parse_html(self, html2, msg, 'Second argument is not valid HTML:') if dom1 != dom2: standardMsg = '%s != %s' % ( safe_repr(dom1, True), safe_repr(dom2, True)) diff = ('\n' + '\n'.join(difflib.ndiff( str(dom1).splitlines(), str(dom2).splitlines(), ))) standardMsg = self._truncateMessage(standardMsg, diff) self.fail(self._formatMessage(msg, standardMsg)) def assertHTMLNotEqual(self, html1, html2, msg=None): """Assert that two HTML snippets are not semantically equivalent.""" dom1 = assert_and_parse_html(self, html1, msg, 'First argument is not valid HTML:') dom2 = assert_and_parse_html(self, html2, msg, 'Second argument is not valid HTML:') if dom1 == dom2: standardMsg = '%s == %s' % ( safe_repr(dom1, True), safe_repr(dom2, True)) self.fail(self._formatMessage(msg, standardMsg)) def assertInHTML(self, needle, haystack, count=None, msg_prefix=''): needle = assert_and_parse_html(self, needle, None, 'First argument is not valid HTML:') haystack = assert_and_parse_html(self, haystack, None, 'Second argument is not valid HTML:') real_count = haystack.count(needle) if count is not None: self.assertEqual( real_count, count, msg_prefix + "Found %d instances of '%s' in response (expected %d)" % (real_count, needle, count) ) else: self.assertTrue(real_count != 0, msg_prefix + "Couldn't find '%s' in response" % needle) def assertJSONEqual(self, raw, expected_data, msg=None): """ Assert that the JSON fragments raw and expected_data are equal. Usual JSON non-significant whitespace rules apply as the heavyweight is delegated to the json library. """ try: data = json.loads(raw) except json.JSONDecodeError: self.fail("First argument is not valid JSON: %r" % raw) if isinstance(expected_data, str): try: expected_data = json.loads(expected_data) except ValueError: self.fail("Second argument is not valid JSON: %r" % expected_data) self.assertEqual(data, expected_data, msg=msg) def assertJSONNotEqual(self, raw, expected_data, msg=None): """ Assert that the JSON fragments raw and expected_data are not equal. Usual JSON non-significant whitespace rules apply as the heavyweight is delegated to the json library. """ try: data = json.loads(raw) except json.JSONDecodeError: self.fail("First argument is not valid JSON: %r" % raw) if isinstance(expected_data, str): try: expected_data = json.loads(expected_data) except json.JSONDecodeError: self.fail("Second argument is not valid JSON: %r" % expected_data) self.assertNotEqual(data, expected_data, msg=msg) def assertXMLEqual(self, xml1, xml2, msg=None): """ Assert that two XML snippets are semantically the same. Whitespace in most cases is ignored and attribute ordering is not significant. The arguments must be valid XML. """ try: result = compare_xml(xml1, xml2) except Exception as e: standardMsg = 'First or second argument is not valid XML\n%s' % e self.fail(self._formatMessage(msg, standardMsg)) else: if not result: standardMsg = '%s != %s' % (safe_repr(xml1, True), safe_repr(xml2, True)) diff = ('\n' + '\n'.join( difflib.ndiff(xml1.splitlines(), xml2.splitlines()) )) standardMsg = self._truncateMessage(standardMsg, diff) self.fail(self._formatMessage(msg, standardMsg)) def assertXMLNotEqual(self, xml1, xml2, msg=None): """ Assert that two XML snippets are not semantically equivalent. Whitespace in most cases is ignored and attribute ordering is not significant. The arguments must be valid XML. """ try: result = compare_xml(xml1, xml2) except Exception as e: standardMsg = 'First or second argument is not valid XML\n%s' % e self.fail(self._formatMessage(msg, standardMsg)) else: if result: standardMsg = '%s == %s' % (safe_repr(xml1, True), safe_repr(xml2, True)) self.fail(self._formatMessage(msg, standardMsg)) class TransactionTestCase(SimpleTestCase): # Subclasses can ask for resetting of auto increment sequence before each # test case reset_sequences = False # Subclasses can enable only a subset of apps for faster tests available_apps = None # Subclasses can define fixtures which will be automatically installed. fixtures = None databases = {DEFAULT_DB_ALIAS} _disallowed_database_msg = ( 'Database %(operation)s to %(alias)r are not allowed in this test. ' 'Add %(alias)r to %(test)s.databases to ensure proper test isolation ' 'and silence this failure.' ) # If transactions aren't available, Django will serialize the database # contents into a fixture during setup and flush and reload them # during teardown (as flush does not restore data from migrations). # This can be slow; this flag allows enabling on a per-case basis. serialized_rollback = False def _pre_setup(self): """ Perform pre-test setup: * If the class has an 'available_apps' attribute, restrict the app registry to these applications, then fire the post_migrate signal -- it must run with the correct set of applications for the test case. * If the class has a 'fixtures' attribute, install those fixtures. """ super()._pre_setup() if self.available_apps is not None: apps.set_available_apps(self.available_apps) setting_changed.send( sender=settings._wrapped.__class__, setting='INSTALLED_APPS', value=self.available_apps, enter=True, ) for db_name in self._databases_names(include_mirrors=False): emit_post_migrate_signal(verbosity=0, interactive=False, db=db_name) try: self._fixture_setup() except Exception: if self.available_apps is not None: apps.unset_available_apps() setting_changed.send( sender=settings._wrapped.__class__, setting='INSTALLED_APPS', value=settings.INSTALLED_APPS, enter=False, ) raise # Clear the queries_log so that it's less likely to overflow (a single # test probably won't execute 9K queries). If queries_log overflows, # then assertNumQueries() doesn't work. for db_name in self._databases_names(include_mirrors=False): connections[db_name].queries_log.clear() @classmethod def _databases_names(cls, include_mirrors=True): # Only consider allowed database aliases, including mirrors or not. return [ alias for alias in connections if alias in cls.databases and ( include_mirrors or not connections[alias].settings_dict['TEST']['MIRROR'] ) ] def _reset_sequences(self, db_name): conn = connections[db_name] if conn.features.supports_sequence_reset: sql_list = conn.ops.sequence_reset_by_name_sql( no_style(), conn.introspection.sequence_list()) if sql_list: with transaction.atomic(using=db_name): with conn.cursor() as cursor: for sql in sql_list: cursor.execute(sql) def _fixture_setup(self): for db_name in self._databases_names(include_mirrors=False): # Reset sequences if self.reset_sequences: self._reset_sequences(db_name) # Provide replica initial data from migrated apps, if needed. if self.serialized_rollback and hasattr(connections[db_name], "_test_serialized_contents"): if self.available_apps is not None: apps.unset_available_apps() connections[db_name].creation.deserialize_db_from_string( connections[db_name]._test_serialized_contents ) if self.available_apps is not None: apps.set_available_apps(self.available_apps) if self.fixtures: # We have to use this slightly awkward syntax due to the fact # that we're using *args and **kwargs together. call_command('loaddata', *self.fixtures, **{'verbosity': 0, 'database': db_name}) def _should_reload_connections(self): return True def _post_teardown(self): """ Perform post-test things: * Flush the contents of the database to leave a clean slate. If the class has an 'available_apps' attribute, don't fire post_migrate. * Force-close the connection so the next test gets a clean cursor. """ try: self._fixture_teardown() super()._post_teardown() if self._should_reload_connections(): # Some DB cursors include SQL statements as part of cursor # creation. If you have a test that does a rollback, the effect # of these statements is lost, which can affect the operation of # tests (e.g., losing a timezone setting causing objects to be # created with the wrong time). To make sure this doesn't # happen, get a clean connection at the start of every test. for conn in connections.all(): conn.close() finally: if self.available_apps is not None: apps.unset_available_apps() setting_changed.send(sender=settings._wrapped.__class__, setting='INSTALLED_APPS', value=settings.INSTALLED_APPS, enter=False) def _fixture_teardown(self): # Allow TRUNCATE ... CASCADE and don't emit the post_migrate signal # when flushing only a subset of the apps for db_name in self._databases_names(include_mirrors=False): # Flush the database inhibit_post_migrate = ( self.available_apps is not None or ( # Inhibit the post_migrate signal when using serialized # rollback to avoid trying to recreate the serialized data. self.serialized_rollback and hasattr(connections[db_name], '_test_serialized_contents') ) ) call_command('flush', verbosity=0, interactive=False, database=db_name, reset_sequences=False, allow_cascade=self.available_apps is not None, inhibit_post_migrate=inhibit_post_migrate) def assertQuerysetEqual(self, qs, values, transform=None, ordered=True, msg=None): values = list(values) # RemovedInDjango41Warning. if transform is None: if ( values and isinstance(values[0], str) and qs and not isinstance(qs[0], str) ): # Transform qs using repr() if the first element of values is a # string and the first element of qs is not (which would be the # case if qs is a flattened values_list). warnings.warn( "In Django 4.1, repr() will not be called automatically " "on a queryset when compared to string values. Set an " "explicit 'transform' to silence this warning.", category=RemovedInDjango41Warning, stacklevel=2, ) transform = repr items = qs if transform is not None: items = map(transform, items) if not ordered: return self.assertDictEqual(Counter(items), Counter(values), msg=msg) # For example qs.iterator() could be passed as qs, but it does not # have 'ordered' attribute. if len(values) > 1 and hasattr(qs, 'ordered') and not qs.ordered: raise ValueError( 'Trying to compare non-ordered queryset against more than one ' 'ordered value.' ) return self.assertEqual(list(items), values, msg=msg) def assertNumQueries(self, num, func=None, *args, using=DEFAULT_DB_ALIAS, **kwargs): conn = connections[using] context = _AssertNumQueriesContext(self, num, conn) if func is None: return context with context: func(*args, **kwargs) def connections_support_transactions(aliases=None): """ Return whether or not all (or specified) connections support transactions. """ conns = connections.all() if aliases is None else (connections[alias] for alias in aliases) return all(conn.features.supports_transactions for conn in conns) class TestData: """ Descriptor to provide TestCase instance isolation for attributes assigned during the setUpTestData() phase. Allow safe alteration of objects assigned in setUpTestData() by test methods by exposing deep copies instead of the original objects. Objects are deep copied using a memo kept on the test case instance in order to maintain their original relationships. """ memo_attr = '_testdata_memo' def __init__(self, name, data): self.name = name self.data = data def get_memo(self, testcase): try: memo = getattr(testcase, self.memo_attr) except AttributeError: memo = {} setattr(testcase, self.memo_attr, memo) return memo def __get__(self, instance, owner): if instance is None: return self.data memo = self.get_memo(instance) try: data = deepcopy(self.data, memo) except TypeError: # RemovedInDjango41Warning. msg = ( "Assigning objects which don't support copy.deepcopy() during " "setUpTestData() is deprecated. Either assign the %s " "attribute during setUpClass() or setUp(), or add support for " "deepcopy() to %s.%s.%s." ) % ( self.name, owner.__module__, owner.__qualname__, self.name, ) warnings.warn(msg, category=RemovedInDjango41Warning, stacklevel=2) data = self.data setattr(instance, self.name, data) return data def __repr__(self): return '<TestData: name=%r, data=%r>' % (self.name, self.data) class TestCase(TransactionTestCase): """ Similar to TransactionTestCase, but use `transaction.atomic()` to achieve test isolation. In most situations, TestCase should be preferred to TransactionTestCase as it allows faster execution. However, there are some situations where using TransactionTestCase might be necessary (e.g. testing some transactional behavior). On database backends with no transaction support, TestCase behaves as TransactionTestCase. """ @classmethod def _enter_atomics(cls): """Open atomic blocks for multiple databases.""" atomics = {} for db_name in cls._databases_names(): atomics[db_name] = transaction.atomic(using=db_name) atomics[db_name].__enter__() return atomics @classmethod def _rollback_atomics(cls, atomics): """Rollback atomic blocks opened by the previous method.""" for db_name in reversed(cls._databases_names()): transaction.set_rollback(True, using=db_name) atomics[db_name].__exit__(None, None, None) @classmethod def _databases_support_transactions(cls): return connections_support_transactions(cls.databases) @classmethod def setUpClass(cls): super().setUpClass() if not cls._databases_support_transactions(): return # Disable the durability check to allow testing durable atomic blocks # in a transaction for performance reasons. transaction.Atomic._ensure_durability = False try: cls.cls_atomics = cls._enter_atomics() if cls.fixtures: for db_name in cls._databases_names(include_mirrors=False): try: call_command('loaddata', *cls.fixtures, **{'verbosity': 0, 'database': db_name}) except Exception: cls._rollback_atomics(cls.cls_atomics) cls._remove_databases_failures() raise pre_attrs = cls.__dict__.copy() try: cls.setUpTestData() except Exception: cls._rollback_atomics(cls.cls_atomics) cls._remove_databases_failures() raise for name, value in cls.__dict__.items(): if value is not pre_attrs.get(name): setattr(cls, name, TestData(name, value)) except Exception: transaction.Atomic._ensure_durability = True raise @classmethod def tearDownClass(cls): transaction.Atomic._ensure_durability = True if cls._databases_support_transactions(): cls._rollback_atomics(cls.cls_atomics) for conn in connections.all(): conn.close() super().tearDownClass() @classmethod def setUpTestData(cls): """Load initial data for the TestCase.""" pass def _should_reload_connections(self): if self._databases_support_transactions(): return False return super()._should_reload_connections() def _fixture_setup(self): if not self._databases_support_transactions(): # If the backend does not support transactions, we should reload # class data before each test self.setUpTestData() return super()._fixture_setup() assert not self.reset_sequences, 'reset_sequences cannot be used on TestCase instances' self.atomics = self._enter_atomics() def _fixture_teardown(self): if not self._databases_support_transactions(): return super()._fixture_teardown() try: for db_name in reversed(self._databases_names()): if self._should_check_constraints(connections[db_name]): connections[db_name].check_constraints() finally: self._rollback_atomics(self.atomics) def _should_check_constraints(self, connection): return ( connection.features.can_defer_constraint_checks and not connection.needs_rollback and connection.is_usable() ) @classmethod @contextmanager def captureOnCommitCallbacks(cls, *, using=DEFAULT_DB_ALIAS, execute=False): """Context manager to capture transaction.on_commit() callbacks.""" callbacks = [] start_count = len(connections[using].run_on_commit) try: yield callbacks finally: run_on_commit = connections[using].run_on_commit[start_count:] callbacks[:] = [func for sids, func in run_on_commit] if execute: for callback in callbacks: callback() class CheckCondition: """Descriptor class for deferred condition checking.""" def __init__(self, *conditions): self.conditions = conditions def add_condition(self, condition, reason): return self.__class__(*self.conditions, (condition, reason)) def __get__(self, instance, cls=None): # Trigger access for all bases. if any(getattr(base, '__unittest_skip__', False) for base in cls.__bases__): return True for condition, reason in self.conditions: if condition(): # Override this descriptor's value and set the skip reason. cls.__unittest_skip__ = True cls.__unittest_skip_why__ = reason return True return False def _deferredSkip(condition, reason, name): def decorator(test_func): nonlocal condition if not (isinstance(test_func, type) and issubclass(test_func, unittest.TestCase)): @wraps(test_func) def skip_wrapper(*args, **kwargs): if (args and isinstance(args[0], unittest.TestCase) and connection.alias not in getattr(args[0], 'databases', {})): raise ValueError( "%s cannot be used on %s as %s doesn't allow queries " "against the %r database." % ( name, args[0], args[0].__class__.__qualname__, connection.alias, ) ) if condition(): raise unittest.SkipTest(reason) return test_func(*args, **kwargs) test_item = skip_wrapper else: # Assume a class is decorated test_item = test_func databases = getattr(test_item, 'databases', None) if not databases or connection.alias not in databases: # Defer raising to allow importing test class's module. def condition(): raise ValueError( "%s cannot be used on %s as it doesn't allow queries " "against the '%s' database." % ( name, test_item, connection.alias, ) ) # Retrieve the possibly existing value from the class's dict to # avoid triggering the descriptor. skip = test_func.__dict__.get('__unittest_skip__') if isinstance(skip, CheckCondition): test_item.__unittest_skip__ = skip.add_condition(condition, reason) elif skip is not True: test_item.__unittest_skip__ = CheckCondition((condition, reason)) return test_item return decorator def skipIfDBFeature(*features): """Skip a test if a database has at least one of the named features.""" return _deferredSkip( lambda: any(getattr(connection.features, feature, False) for feature in features), "Database has feature(s) %s" % ", ".join(features), 'skipIfDBFeature', ) def skipUnlessDBFeature(*features): """Skip a test unless a database has all the named features.""" return _deferredSkip( lambda: not all(getattr(connection.features, feature, False) for feature in features), "Database doesn't support feature(s): %s" % ", ".join(features), 'skipUnlessDBFeature', ) def skipUnlessAnyDBFeature(*features): """Skip a test unless a database has any of the named features.""" return _deferredSkip( lambda: not any(getattr(connection.features, feature, False) for feature in features), "Database doesn't support any of the feature(s): %s" % ", ".join(features), 'skipUnlessAnyDBFeature', ) class QuietWSGIRequestHandler(WSGIRequestHandler): """ A WSGIRequestHandler that doesn't log to standard output any of the requests received, so as to not clutter the test result output. """ def log_message(*args): pass class FSFilesHandler(WSGIHandler): """ WSGI middleware that intercepts calls to a directory, as defined by one of the *_ROOT settings, and serves those files, publishing them under *_URL. """ def __init__(self, application): self.application = application self.base_url = urlparse(self.get_base_url()) super().__init__() def _should_handle(self, path): """ Check if the path should be handled. Ignore the path if: * the host is provided as part of the base_url * the request's path isn't under the media path (or equal) """ return path.startswith(self.base_url[2]) and not self.base_url[1] def file_path(self, url): """Return the relative path to the file on disk for the given URL.""" relative_url = url[len(self.base_url[2]):] return url2pathname(relative_url) def get_response(self, request): from django.http import Http404 if self._should_handle(request.path): try: return self.serve(request) except Http404: pass return super().get_response(request) def serve(self, request): os_rel_path = self.file_path(request.path) os_rel_path = posixpath.normpath(unquote(os_rel_path)) # Emulate behavior of django.contrib.staticfiles.views.serve() when it # invokes staticfiles' finders functionality. # TODO: Modify if/when that internal API is refactored final_rel_path = os_rel_path.replace('\\', '/').lstrip('/') return serve(request, final_rel_path, document_root=self.get_base_dir()) def __call__(self, environ, start_response): if not self._should_handle(get_path_info(environ)): return self.application(environ, start_response) return super().__call__(environ, start_response) class _StaticFilesHandler(FSFilesHandler): """ Handler for serving static files. A private class that is meant to be used solely as a convenience by LiveServerThread. """ def get_base_dir(self): return settings.STATIC_ROOT def get_base_url(self): return settings.STATIC_URL class _MediaFilesHandler(FSFilesHandler): """ Handler for serving the media files. A private class that is meant to be used solely as a convenience by LiveServerThread. """ def get_base_dir(self): return settings.MEDIA_ROOT def get_base_url(self): return settings.MEDIA_URL class LiveServerThread(threading.Thread): """Thread for running a live http server while the tests are running.""" server_class = ThreadedWSGIServer def __init__(self, host, static_handler, connections_override=None, port=0): self.host = host self.port = port self.is_ready = threading.Event() self.error = None self.static_handler = static_handler self.connections_override = connections_override super().__init__() def run(self): """ Set up the live server and databases, and then loop over handling HTTP requests. """ if self.connections_override: # Override this thread's database connections with the ones # provided by the main thread. for alias, conn in self.connections_override.items(): connections[alias] = conn try: # Create the handler for serving static and media files handler = self.static_handler(_MediaFilesHandler(WSGIHandler())) self.httpd = self._create_server() # If binding to port zero, assign the port allocated by the OS. if self.port == 0: self.port = self.httpd.server_address[1] self.httpd.set_app(handler) self.is_ready.set() self.httpd.serve_forever() except Exception as e: self.error = e self.is_ready.set() finally: connections.close_all() def _create_server(self, connections_override=None): return self.server_class( (self.host, self.port), QuietWSGIRequestHandler, allow_reuse_address=False, connections_override=connections_override, ) def terminate(self): if hasattr(self, 'httpd'): # Stop the WSGI server self.httpd.shutdown() self.httpd.server_close() self.join() class LiveServerTestCase(TransactionTestCase): """ Do basically the same as TransactionTestCase but also launch a live HTTP server in a separate thread so that the tests may use another testing framework, such as Selenium for example, instead of the built-in dummy client. It inherits from TransactionTestCase instead of TestCase because the threads don't share the same transactions (unless if using in-memory sqlite) and each thread needs to commit all their transactions so that the other thread can see the changes. """ host = 'localhost' port = 0 server_thread_class = LiveServerThread static_handler = _StaticFilesHandler @classproperty def live_server_url(cls): return 'http://%s:%s' % (cls.host, cls.server_thread.port) @classproperty def allowed_host(cls): return cls.host @classmethod def _make_connections_override(cls): connections_override = {} for conn in connections.all(): # If using in-memory sqlite databases, pass the connections to # the server thread. if conn.vendor == 'sqlite' and conn.is_in_memory_db(): connections_override[conn.alias] = conn return connections_override @classmethod def setUpClass(cls): super().setUpClass() cls._live_server_modified_settings = modify_settings( ALLOWED_HOSTS={'append': cls.allowed_host}, ) cls._live_server_modified_settings.enable() connections_override = cls._make_connections_override() for conn in connections_override.values(): # Explicitly enable thread-shareability for this connection. conn.inc_thread_sharing() cls.server_thread = cls._create_server_thread(connections_override) cls.server_thread.daemon = True cls.server_thread.start() # Wait for the live server to be ready cls.server_thread.is_ready.wait() if cls.server_thread.error: # Clean up behind ourselves, since tearDownClass won't get called in # case of errors. cls._tearDownClassInternal() raise cls.server_thread.error @classmethod def _create_server_thread(cls, connections_override): return cls.server_thread_class( cls.host, cls.static_handler, connections_override=connections_override, port=cls.port, ) @classmethod def _tearDownClassInternal(cls): # Terminate the live server's thread. cls.server_thread.terminate() # Restore shared connections' non-shareability. for conn in cls.server_thread.connections_override.values(): conn.dec_thread_sharing() cls._live_server_modified_settings.disable() super().tearDownClass() @classmethod def tearDownClass(cls): cls._tearDownClassInternal() class SerializeMixin: """ Enforce serialization of TestCases that share a common resource. Define a common 'lockfile' for each set of TestCases to serialize. This file must exist on the filesystem. Place it early in the MRO in order to isolate setUpClass()/tearDownClass(). """ lockfile = None @classmethod def setUpClass(cls): if cls.lockfile is None: raise ValueError( "{}.lockfile isn't set. Set it to a unique value " "in the base class.".format(cls.__name__)) cls._lockfile = open(cls.lockfile) locks.lock(cls._lockfile, locks.LOCK_EX) super().setUpClass() @classmethod def tearDownClass(cls): super().tearDownClass() cls._lockfile.close()
7ac04d10d49f2e91582ee00179bd0c6f1e502292da88631156be347c986a92cc
""" Default Django settings. Override these with settings in the module pointed to by the DJANGO_SETTINGS_MODULE environment variable. """ # This is defined here as a do-nothing function because we can't import # django.utils.translation -- that module depends on the settings. def gettext_noop(s): return s #################### # CORE # #################### DEBUG = False # Whether the framework should propagate raw exceptions rather than catching # them. This is useful under some testing situations and should never be used # on a live site. DEBUG_PROPAGATE_EXCEPTIONS = False # People who get code error notifications. # In the format [('Full Name', '[email protected]'), ('Full Name', '[email protected]')] ADMINS = [] # List of IP addresses, as strings, that: # * See debug comments, when DEBUG is true # * Receive x-headers INTERNAL_IPS = [] # Hosts/domain names that are valid for this site. # "*" matches anything, ".example.com" matches example.com and all subdomains ALLOWED_HOSTS = [] # Local time zone for this installation. All choices can be found here: # https://en.wikipedia.org/wiki/List_of_tz_zones_by_name (although not all # systems may support all possibilities). When USE_TZ is True, this is # interpreted as the default user time zone. TIME_ZONE = 'America/Chicago' # If you set this to True, Django will use timezone-aware datetimes. USE_TZ = False # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = 'en-us' # Languages we provide translations for, out of the box. LANGUAGES = [ ('af', gettext_noop('Afrikaans')), ('ar', gettext_noop('Arabic')), ('ar-dz', gettext_noop('Algerian Arabic')), ('ast', gettext_noop('Asturian')), ('az', gettext_noop('Azerbaijani')), ('bg', gettext_noop('Bulgarian')), ('be', gettext_noop('Belarusian')), ('bn', gettext_noop('Bengali')), ('br', gettext_noop('Breton')), ('bs', gettext_noop('Bosnian')), ('ca', gettext_noop('Catalan')), ('cs', gettext_noop('Czech')), ('cy', gettext_noop('Welsh')), ('da', gettext_noop('Danish')), ('de', gettext_noop('German')), ('dsb', gettext_noop('Lower Sorbian')), ('el', gettext_noop('Greek')), ('en', gettext_noop('English')), ('en-au', gettext_noop('Australian English')), ('en-gb', gettext_noop('British English')), ('eo', gettext_noop('Esperanto')), ('es', gettext_noop('Spanish')), ('es-ar', gettext_noop('Argentinian Spanish')), ('es-co', gettext_noop('Colombian Spanish')), ('es-mx', gettext_noop('Mexican Spanish')), ('es-ni', gettext_noop('Nicaraguan Spanish')), ('es-ve', gettext_noop('Venezuelan Spanish')), ('et', gettext_noop('Estonian')), ('eu', gettext_noop('Basque')), ('fa', gettext_noop('Persian')), ('fi', gettext_noop('Finnish')), ('fr', gettext_noop('French')), ('fy', gettext_noop('Frisian')), ('ga', gettext_noop('Irish')), ('gd', gettext_noop('Scottish Gaelic')), ('gl', gettext_noop('Galician')), ('he', gettext_noop('Hebrew')), ('hi', gettext_noop('Hindi')), ('hr', gettext_noop('Croatian')), ('hsb', gettext_noop('Upper Sorbian')), ('hu', gettext_noop('Hungarian')), ('hy', gettext_noop('Armenian')), ('ia', gettext_noop('Interlingua')), ('id', gettext_noop('Indonesian')), ('ig', gettext_noop('Igbo')), ('io', gettext_noop('Ido')), ('is', gettext_noop('Icelandic')), ('it', gettext_noop('Italian')), ('ja', gettext_noop('Japanese')), ('ka', gettext_noop('Georgian')), ('kab', gettext_noop('Kabyle')), ('kk', gettext_noop('Kazakh')), ('km', gettext_noop('Khmer')), ('kn', gettext_noop('Kannada')), ('ko', gettext_noop('Korean')), ('ky', gettext_noop('Kyrgyz')), ('lb', gettext_noop('Luxembourgish')), ('lt', gettext_noop('Lithuanian')), ('lv', gettext_noop('Latvian')), ('mk', gettext_noop('Macedonian')), ('ml', gettext_noop('Malayalam')), ('mn', gettext_noop('Mongolian')), ('mr', gettext_noop('Marathi')), ('my', gettext_noop('Burmese')), ('nb', gettext_noop('Norwegian Bokmål')), ('ne', gettext_noop('Nepali')), ('nl', gettext_noop('Dutch')), ('nn', gettext_noop('Norwegian Nynorsk')), ('os', gettext_noop('Ossetic')), ('pa', gettext_noop('Punjabi')), ('pl', gettext_noop('Polish')), ('pt', gettext_noop('Portuguese')), ('pt-br', gettext_noop('Brazilian Portuguese')), ('ro', gettext_noop('Romanian')), ('ru', gettext_noop('Russian')), ('sk', gettext_noop('Slovak')), ('sl', gettext_noop('Slovenian')), ('sq', gettext_noop('Albanian')), ('sr', gettext_noop('Serbian')), ('sr-latn', gettext_noop('Serbian Latin')), ('sv', gettext_noop('Swedish')), ('sw', gettext_noop('Swahili')), ('ta', gettext_noop('Tamil')), ('te', gettext_noop('Telugu')), ('tg', gettext_noop('Tajik')), ('th', gettext_noop('Thai')), ('tk', gettext_noop('Turkmen')), ('tr', gettext_noop('Turkish')), ('tt', gettext_noop('Tatar')), ('udm', gettext_noop('Udmurt')), ('uk', gettext_noop('Ukrainian')), ('ur', gettext_noop('Urdu')), ('uz', gettext_noop('Uzbek')), ('vi', gettext_noop('Vietnamese')), ('zh-hans', gettext_noop('Simplified Chinese')), ('zh-hant', gettext_noop('Traditional Chinese')), ] # Languages using BiDi (right-to-left) layout LANGUAGES_BIDI = ["he", "ar", "ar-dz", "fa", "ur"] # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = True LOCALE_PATHS = [] # Settings for language cookie LANGUAGE_COOKIE_NAME = 'django_language' LANGUAGE_COOKIE_AGE = None LANGUAGE_COOKIE_DOMAIN = None LANGUAGE_COOKIE_PATH = '/' LANGUAGE_COOKIE_SECURE = False LANGUAGE_COOKIE_HTTPONLY = False LANGUAGE_COOKIE_SAMESITE = None # If you set this to True, Django will format dates, numbers and calendars # according to user current locale. USE_L10N = False # Not-necessarily-technical managers of the site. They get broken link # notifications and other various emails. MANAGERS = ADMINS # Default charset to use for all HttpResponse objects, if a MIME type isn't # manually specified. It's used to construct the Content-Type header. DEFAULT_CHARSET = 'utf-8' # Email address that error messages come from. SERVER_EMAIL = 'root@localhost' # Database connection info. If left empty, will default to the dummy backend. DATABASES = {} # Classes used to implement DB routing behavior. DATABASE_ROUTERS = [] # The email backend to use. For possible shortcuts see django.core.mail. # The default is to use the SMTP backend. # Third-party backends can be specified by providing a Python path # to a module that defines an EmailBackend class. EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' # Host for sending email. EMAIL_HOST = 'localhost' # Port for sending email. EMAIL_PORT = 25 # Whether to send SMTP 'Date' header in the local time zone or in UTC. EMAIL_USE_LOCALTIME = False # Optional SMTP authentication information for EMAIL_HOST. EMAIL_HOST_USER = '' EMAIL_HOST_PASSWORD = '' EMAIL_USE_TLS = False EMAIL_USE_SSL = False EMAIL_SSL_CERTFILE = None EMAIL_SSL_KEYFILE = None EMAIL_TIMEOUT = None # List of strings representing installed apps. INSTALLED_APPS = [] TEMPLATES = [] # Default form rendering class. FORM_RENDERER = 'django.forms.renderers.DjangoTemplates' # Default email address to use for various automated correspondence from # the site managers. DEFAULT_FROM_EMAIL = 'webmaster@localhost' # Subject-line prefix for email messages send with django.core.mail.mail_admins # or ...mail_managers. Make sure to include the trailing space. EMAIL_SUBJECT_PREFIX = '[Django] ' # Whether to append trailing slashes to URLs. APPEND_SLASH = True # Whether to prepend the "www." subdomain to URLs that don't have it. PREPEND_WWW = False # Override the server-derived value of SCRIPT_NAME FORCE_SCRIPT_NAME = None # List of compiled regular expression objects representing User-Agent strings # that are not allowed to visit any page, systemwide. Use this for bad # robots/crawlers. Here are a few examples: # import re # DISALLOWED_USER_AGENTS = [ # re.compile(r'^NaverBot.*'), # re.compile(r'^EmailSiphon.*'), # re.compile(r'^SiteSucker.*'), # re.compile(r'^sohu-search'), # ] DISALLOWED_USER_AGENTS = [] ABSOLUTE_URL_OVERRIDES = {} # List of compiled regular expression objects representing URLs that need not # be reported by BrokenLinkEmailsMiddleware. Here are a few examples: # import re # IGNORABLE_404_URLS = [ # re.compile(r'^/apple-touch-icon.*\.png$'), # re.compile(r'^/favicon.ico$'), # re.compile(r'^/robots.txt$'), # re.compile(r'^/phpmyadmin/'), # re.compile(r'\.(cgi|php|pl)$'), # ] IGNORABLE_404_URLS = [] # A secret key for this particular Django installation. Used in secret-key # hashing algorithms. Set this in your settings, or Django will complain # loudly. SECRET_KEY = '' # Default file storage mechanism that holds media. DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage' # Absolute filesystem path to the directory that will hold user-uploaded files. # Example: "/var/www/example.com/media/" MEDIA_ROOT = '' # URL that handles the media served from MEDIA_ROOT. # Examples: "http://example.com/media/", "http://media.example.com/" MEDIA_URL = '' # Absolute path to the directory static files should be collected to. # Example: "/var/www/example.com/static/" STATIC_ROOT = None # URL that handles the static files served from STATIC_ROOT. # Example: "http://example.com/static/", "http://static.example.com/" STATIC_URL = None # List of upload handler classes to be applied in order. FILE_UPLOAD_HANDLERS = [ 'django.core.files.uploadhandler.MemoryFileUploadHandler', 'django.core.files.uploadhandler.TemporaryFileUploadHandler', ] # Maximum size, in bytes, of a request before it will be streamed to the # file system instead of into memory. FILE_UPLOAD_MAX_MEMORY_SIZE = 2621440 # i.e. 2.5 MB # Maximum size in bytes of request data (excluding file uploads) that will be # read before a SuspiciousOperation (RequestDataTooBig) is raised. DATA_UPLOAD_MAX_MEMORY_SIZE = 2621440 # i.e. 2.5 MB # Maximum number of GET/POST parameters that will be read before a # SuspiciousOperation (TooManyFieldsSent) is raised. DATA_UPLOAD_MAX_NUMBER_FIELDS = 1000 # Directory in which upload streamed files will be temporarily saved. A value of # `None` will make Django use the operating system's default temporary directory # (i.e. "/tmp" on *nix systems). FILE_UPLOAD_TEMP_DIR = None # The numeric mode to set newly-uploaded files to. The value should be a mode # you'd pass directly to os.chmod; see https://docs.python.org/library/os.html#files-and-directories. FILE_UPLOAD_PERMISSIONS = 0o644 # The numeric mode to assign to newly-created directories, when uploading files. # The value should be a mode as you'd pass to os.chmod; # see https://docs.python.org/library/os.html#files-and-directories. FILE_UPLOAD_DIRECTORY_PERMISSIONS = None # Python module path where user will place custom format definition. # The directory where this setting is pointing should contain subdirectories # named as the locales, containing a formats.py file # (i.e. "myproject.locale" for myproject/locale/en/formats.py etc. use) FORMAT_MODULE_PATH = None # Default formatting for date objects. See all available format strings here: # https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date DATE_FORMAT = 'N j, Y' # Default formatting for datetime objects. See all available format strings here: # https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date DATETIME_FORMAT = 'N j, Y, P' # Default formatting for time objects. See all available format strings here: # https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date TIME_FORMAT = 'P' # Default formatting for date objects when only the year and month are relevant. # See all available format strings here: # https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date YEAR_MONTH_FORMAT = 'F Y' # Default formatting for date objects when only the month and day are relevant. # See all available format strings here: # https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date MONTH_DAY_FORMAT = 'F j' # Default short formatting for date objects. See all available format strings here: # https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date SHORT_DATE_FORMAT = 'm/d/Y' # Default short formatting for datetime objects. # See all available format strings here: # https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date SHORT_DATETIME_FORMAT = 'm/d/Y P' # Default formats to be used when parsing dates from input boxes, in order # See all available format string here: # https://docs.python.org/library/datetime.html#strftime-behavior # * Note that these format strings are different from the ones to display dates DATE_INPUT_FORMATS = [ '%Y-%m-%d', '%m/%d/%Y', '%m/%d/%y', # '2006-10-25', '10/25/2006', '10/25/06' '%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006' '%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006' '%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006' '%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006' ] # Default formats to be used when parsing times from input boxes, in order # See all available format string here: # https://docs.python.org/library/datetime.html#strftime-behavior # * Note that these format strings are different from the ones to display dates TIME_INPUT_FORMATS = [ '%H:%M:%S', # '14:30:59' '%H:%M:%S.%f', # '14:30:59.000200' '%H:%M', # '14:30' ] # Default formats to be used when parsing dates and times from input boxes, # in order # See all available format string here: # https://docs.python.org/library/datetime.html#strftime-behavior # * Note that these format strings are different from the ones to display dates DATETIME_INPUT_FORMATS = [ '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' '%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200' '%Y-%m-%d %H:%M', # '2006-10-25 14:30' '%m/%d/%Y %H:%M:%S', # '10/25/2006 14:30:59' '%m/%d/%Y %H:%M:%S.%f', # '10/25/2006 14:30:59.000200' '%m/%d/%Y %H:%M', # '10/25/2006 14:30' '%m/%d/%y %H:%M:%S', # '10/25/06 14:30:59' '%m/%d/%y %H:%M:%S.%f', # '10/25/06 14:30:59.000200' '%m/%d/%y %H:%M', # '10/25/06 14:30' ] # First day of week, to be used on calendars # 0 means Sunday, 1 means Monday... FIRST_DAY_OF_WEEK = 0 # Decimal separator symbol DECIMAL_SEPARATOR = '.' # Boolean that sets whether to add thousand separator when formatting numbers USE_THOUSAND_SEPARATOR = False # Number of digits that will be together, when splitting them by # THOUSAND_SEPARATOR. 0 means no grouping, 3 means splitting by thousands... NUMBER_GROUPING = 0 # Thousand separator symbol THOUSAND_SEPARATOR = ',' # The tablespaces to use for each model when not specified otherwise. DEFAULT_TABLESPACE = '' DEFAULT_INDEX_TABLESPACE = '' # Default primary key field type. DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' # Default X-Frame-Options header value X_FRAME_OPTIONS = 'DENY' USE_X_FORWARDED_HOST = False USE_X_FORWARDED_PORT = False # The Python dotted path to the WSGI application that Django's internal server # (runserver) will use. If `None`, the return value of # 'django.core.wsgi.get_wsgi_application' is used, thus preserving the same # behavior as previous versions of Django. Otherwise this should point to an # actual WSGI application object. WSGI_APPLICATION = None # If your Django app is behind a proxy that sets a header to specify secure # connections, AND that proxy ensures that user-submitted headers with the # same name are ignored (so that people can't spoof it), set this value to # a tuple of (header_name, header_value). For any requests that come in with # that header/value, request.is_secure() will return True. # WARNING! Only set this if you fully understand what you're doing. Otherwise, # you may be opening yourself up to a security risk. SECURE_PROXY_SSL_HEADER = None ############## # MIDDLEWARE # ############## # List of middleware to use. Order is important; in the request phase, these # middleware will be applied in the order given, and in the response # phase the middleware will be applied in reverse order. MIDDLEWARE = [] ############ # SESSIONS # ############ # Cache to store session data if using the cache session backend. SESSION_CACHE_ALIAS = 'default' # Cookie name. This can be whatever you want. SESSION_COOKIE_NAME = 'sessionid' # Age of cookie, in seconds (default: 2 weeks). SESSION_COOKIE_AGE = 60 * 60 * 24 * 7 * 2 # A string like "example.com", or None for standard domain cookie. SESSION_COOKIE_DOMAIN = None # Whether the session cookie should be secure (https:// only). SESSION_COOKIE_SECURE = False # The path of the session cookie. SESSION_COOKIE_PATH = '/' # Whether to use the HttpOnly flag. SESSION_COOKIE_HTTPONLY = True # Whether to set the flag restricting cookie leaks on cross-site requests. # This can be 'Lax', 'Strict', 'None', or False to disable the flag. SESSION_COOKIE_SAMESITE = 'Lax' # Whether to save the session data on every request. SESSION_SAVE_EVERY_REQUEST = False # Whether a user's session cookie expires when the Web browser is closed. SESSION_EXPIRE_AT_BROWSER_CLOSE = False # The module to store session data SESSION_ENGINE = 'django.contrib.sessions.backends.db' # Directory to store session files if using the file session module. If None, # the backend will use a sensible default. SESSION_FILE_PATH = None # class to serialize session data SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer' ######### # CACHE # ######### # The cache backends to use. CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', } } CACHE_MIDDLEWARE_KEY_PREFIX = '' CACHE_MIDDLEWARE_SECONDS = 600 CACHE_MIDDLEWARE_ALIAS = 'default' ################## # AUTHENTICATION # ################## AUTH_USER_MODEL = 'auth.User' AUTHENTICATION_BACKENDS = ['django.contrib.auth.backends.ModelBackend'] LOGIN_URL = '/accounts/login/' LOGIN_REDIRECT_URL = '/accounts/profile/' LOGOUT_REDIRECT_URL = None # The number of seconds a password reset link is valid for (default: 3 days). PASSWORD_RESET_TIMEOUT = 60 * 60 * 24 * 3 # the first hasher in this list is the preferred algorithm. any # password using different algorithms will be converted automatically # upon login PASSWORD_HASHERS = [ 'django.contrib.auth.hashers.PBKDF2PasswordHasher', 'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher', 'django.contrib.auth.hashers.Argon2PasswordHasher', 'django.contrib.auth.hashers.BCryptSHA256PasswordHasher', ] AUTH_PASSWORD_VALIDATORS = [] ########### # SIGNING # ########### SIGNING_BACKEND = 'django.core.signing.TimestampSigner' ######## # CSRF # ######## # Dotted path to callable to be used as view when a request is # rejected by the CSRF middleware. CSRF_FAILURE_VIEW = 'django.views.csrf.csrf_failure' # Settings for CSRF cookie. CSRF_COOKIE_NAME = 'csrftoken' CSRF_COOKIE_AGE = 60 * 60 * 24 * 7 * 52 CSRF_COOKIE_DOMAIN = None CSRF_COOKIE_PATH = '/' CSRF_COOKIE_SECURE = False CSRF_COOKIE_HTTPONLY = False CSRF_COOKIE_SAMESITE = 'Lax' CSRF_HEADER_NAME = 'HTTP_X_CSRFTOKEN' CSRF_TRUSTED_ORIGINS = [] CSRF_USE_SESSIONS = False ############ # MESSAGES # ############ # Class to use as messages backend MESSAGE_STORAGE = 'django.contrib.messages.storage.fallback.FallbackStorage' # Default values of MESSAGE_LEVEL and MESSAGE_TAGS are defined within # django.contrib.messages to avoid imports in this settings file. ########### # LOGGING # ########### # The callable to use to configure logging LOGGING_CONFIG = 'logging.config.dictConfig' # Custom logging configuration. LOGGING = {} # Default exception reporter class used in case none has been # specifically assigned to the HttpRequest instance. DEFAULT_EXCEPTION_REPORTER = 'django.views.debug.ExceptionReporter' # Default exception reporter filter class used in case none has been # specifically assigned to the HttpRequest instance. DEFAULT_EXCEPTION_REPORTER_FILTER = 'django.views.debug.SafeExceptionReporterFilter' ########### # TESTING # ########### # The name of the class to use to run the test suite TEST_RUNNER = 'django.test.runner.DiscoverRunner' # Apps that don't need to be serialized at test database creation time # (only apps with migrations are to start with) TEST_NON_SERIALIZED_APPS = [] ############ # FIXTURES # ############ # The list of directories to search for fixtures FIXTURE_DIRS = [] ############### # STATICFILES # ############### # A list of locations of additional static files STATICFILES_DIRS = [] # The default file storage backend used during the build process STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage' # List of finder classes that know how to find static files in # various locations. STATICFILES_FINDERS = [ 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', # 'django.contrib.staticfiles.finders.DefaultStorageFinder', ] ############## # MIGRATIONS # ############## # Migration module overrides for apps, by app label. MIGRATION_MODULES = {} ################# # SYSTEM CHECKS # ################# # List of all issues generated by system checks that should be silenced. Light # issues like warnings, infos or debugs will not generate a message. Silencing # serious issues like errors and criticals does not result in hiding the # message, but Django will not stop you from e.g. running server. SILENCED_SYSTEM_CHECKS = [] ####################### # SECURITY MIDDLEWARE # ####################### SECURE_CONTENT_TYPE_NOSNIFF = True SECURE_CROSS_ORIGIN_OPENER_POLICY = 'same-origin' SECURE_HSTS_INCLUDE_SUBDOMAINS = False SECURE_HSTS_PRELOAD = False SECURE_HSTS_SECONDS = 0 SECURE_REDIRECT_EXEMPT = [] SECURE_REFERRER_POLICY = 'same-origin' SECURE_SSL_HOST = None SECURE_SSL_REDIRECT = False
3778f90fb9b98daf4cb605676ccd1519ddc13a948829ea75fc8590391959a2d2
import functools import itertools import logging import os import signal import subprocess import sys import threading import time import traceback import weakref from collections import defaultdict from pathlib import Path from types import ModuleType from zipimport import zipimporter import django from django.apps import apps from django.core.signals import request_finished from django.dispatch import Signal from django.utils.functional import cached_property from django.utils.version import get_version_tuple autoreload_started = Signal() file_changed = Signal() DJANGO_AUTORELOAD_ENV = 'RUN_MAIN' logger = logging.getLogger('django.utils.autoreload') # If an error is raised while importing a file, it's not placed in sys.modules. # This means that any future modifications aren't caught. Keep a list of these # file paths to allow watching them in the future. _error_files = [] _exception = None try: import termios except ImportError: termios = None try: import pywatchman except ImportError: pywatchman = None def is_django_module(module): """Return True if the given module is nested under Django.""" return module.__name__.startswith('django.') def is_django_path(path): """Return True if the given file path is nested under Django.""" return Path(django.__file__).parent in Path(path).parents def check_errors(fn): @functools.wraps(fn) def wrapper(*args, **kwargs): global _exception try: fn(*args, **kwargs) except Exception: _exception = sys.exc_info() et, ev, tb = _exception if getattr(ev, 'filename', None) is None: # get the filename from the last item in the stack filename = traceback.extract_tb(tb)[-1][0] else: filename = ev.filename if filename not in _error_files: _error_files.append(filename) raise return wrapper def raise_last_exception(): global _exception if _exception is not None: raise _exception[1] def ensure_echo_on(): """ Ensure that echo mode is enabled. Some tools such as PDB disable it which causes usability issues after reload. """ if not termios or not sys.stdin.isatty(): return attr_list = termios.tcgetattr(sys.stdin) if not attr_list[3] & termios.ECHO: attr_list[3] |= termios.ECHO if hasattr(signal, 'SIGTTOU'): old_handler = signal.signal(signal.SIGTTOU, signal.SIG_IGN) else: old_handler = None termios.tcsetattr(sys.stdin, termios.TCSANOW, attr_list) if old_handler is not None: signal.signal(signal.SIGTTOU, old_handler) def iter_all_python_module_files(): # This is a hot path during reloading. Create a stable sorted list of # modules based on the module name and pass it to iter_modules_and_files(). # This ensures cached results are returned in the usual case that modules # aren't loaded on the fly. keys = sorted(sys.modules) modules = tuple(m for m in map(sys.modules.__getitem__, keys) if not isinstance(m, weakref.ProxyTypes)) return iter_modules_and_files(modules, frozenset(_error_files)) @functools.lru_cache(maxsize=1) def iter_modules_and_files(modules, extra_files): """Iterate through all modules needed to be watched.""" sys_file_paths = [] for module in modules: # During debugging (with PyDev) the 'typing.io' and 'typing.re' objects # are added to sys.modules, however they are types not modules and so # cause issues here. if not isinstance(module, ModuleType): continue if module.__name__ == '__main__': # __main__ (usually manage.py) doesn't always have a __spec__ set. # Handle this by falling back to using __file__, resolved below. # See https://docs.python.org/reference/import.html#main-spec # __file__ may not exists, e.g. when running ipdb debugger. if hasattr(module, '__file__'): sys_file_paths.append(module.__file__) continue if getattr(module, '__spec__', None) is None: continue spec = module.__spec__ # Modules could be loaded from places without a concrete location. If # this is the case, skip them. if spec.has_location: origin = spec.loader.archive if isinstance(spec.loader, zipimporter) else spec.origin sys_file_paths.append(origin) results = set() for filename in itertools.chain(sys_file_paths, extra_files): if not filename: continue path = Path(filename) try: if not path.exists(): # The module could have been removed, don't fail loudly if this # is the case. continue except ValueError as e: # Network filesystems may return null bytes in file paths. logger.debug('"%s" raised when resolving path: "%s"', e, path) continue resolved_path = path.resolve().absolute() results.add(resolved_path) return frozenset(results) @functools.lru_cache(maxsize=1) def common_roots(paths): """ Return a tuple of common roots that are shared between the given paths. File system watchers operate on directories and aren't cheap to create. Try to find the minimum set of directories to watch that encompass all of the files that need to be watched. """ # Inspired from Werkzeug: # https://github.com/pallets/werkzeug/blob/7477be2853df70a022d9613e765581b9411c3c39/werkzeug/_reloader.py # Create a sorted list of the path components, longest first. path_parts = sorted([x.parts for x in paths], key=len, reverse=True) tree = {} for chunks in path_parts: node = tree # Add each part of the path to the tree. for chunk in chunks: node = node.setdefault(chunk, {}) # Clear the last leaf in the tree. node.clear() # Turn the tree into a list of Path instances. def _walk(node, path): for prefix, child in node.items(): yield from _walk(child, path + (prefix,)) if not node: yield Path(*path) return tuple(_walk(tree, ())) def sys_path_directories(): """ Yield absolute directories from sys.path, ignoring entries that don't exist. """ for path in sys.path: path = Path(path) if not path.exists(): continue resolved_path = path.resolve().absolute() # If the path is a file (like a zip file), watch the parent directory. if resolved_path.is_file(): yield resolved_path.parent else: yield resolved_path def get_child_arguments(): """ Return the executable. This contains a workaround for Windows if the executable is reported to not have the .exe extension which can cause bugs on reloading. """ import __main__ py_script = Path(sys.argv[0]) args = [sys.executable] + ['-W%s' % o for o in sys.warnoptions] # __spec__ is set when the server was started with the `-m` option, # see https://docs.python.org/3/reference/import.html#main-spec # __spec__ may not exist, e.g. when running in a Conda env. if getattr(__main__, '__spec__', None) is not None and __main__.__spec__.parent: args += ['-m', __main__.__spec__.parent] args += sys.argv[1:] elif not py_script.exists(): # sys.argv[0] may not exist for several reasons on Windows. # It may exist with a .exe extension or have a -script.py suffix. exe_entrypoint = py_script.with_suffix('.exe') if exe_entrypoint.exists(): # Should be executed directly, ignoring sys.executable. return [exe_entrypoint, *sys.argv[1:]] script_entrypoint = py_script.with_name('%s-script.py' % py_script.name) if script_entrypoint.exists(): # Should be executed as usual. return [*args, script_entrypoint, *sys.argv[1:]] raise RuntimeError('Script %s does not exist.' % py_script) else: args += sys.argv return args def trigger_reload(filename): logger.info('%s changed, reloading.', filename) sys.exit(3) def restart_with_reloader(): new_environ = {**os.environ, DJANGO_AUTORELOAD_ENV: 'true'} args = get_child_arguments() while True: p = subprocess.run(args, env=new_environ, close_fds=False) if p.returncode != 3: return p.returncode class BaseReloader: def __init__(self): self.extra_files = set() self.directory_globs = defaultdict(set) self._stop_condition = threading.Event() def watch_dir(self, path, glob): path = Path(path) try: path = path.absolute() except FileNotFoundError: logger.debug( 'Unable to watch directory %s as it cannot be resolved.', path, exc_info=True, ) return logger.debug('Watching dir %s with glob %s.', path, glob) self.directory_globs[path].add(glob) def watched_files(self, include_globs=True): """ Yield all files that need to be watched, including module files and files within globs. """ yield from iter_all_python_module_files() yield from self.extra_files if include_globs: for directory, patterns in self.directory_globs.items(): for pattern in patterns: yield from directory.glob(pattern) def wait_for_apps_ready(self, app_reg, django_main_thread): """ Wait until Django reports that the apps have been loaded. If the given thread has terminated before the apps are ready, then a SyntaxError or other non-recoverable error has been raised. In that case, stop waiting for the apps_ready event and continue processing. Return True if the thread is alive and the ready event has been triggered, or False if the thread is terminated while waiting for the event. """ while django_main_thread.is_alive(): if app_reg.ready_event.wait(timeout=0.1): return True else: logger.debug('Main Django thread has terminated before apps are ready.') return False def run(self, django_main_thread): logger.debug('Waiting for apps ready_event.') self.wait_for_apps_ready(apps, django_main_thread) from django.urls import get_resolver # Prevent a race condition where URL modules aren't loaded when the # reloader starts by accessing the urlconf_module property. try: get_resolver().urlconf_module except Exception: # Loading the urlconf can result in errors during development. # If this occurs then swallow the error and continue. pass logger.debug('Apps ready_event triggered. Sending autoreload_started signal.') autoreload_started.send(sender=self) self.run_loop() def run_loop(self): ticker = self.tick() while not self.should_stop: try: next(ticker) except StopIteration: break self.stop() def tick(self): """ This generator is called in a loop from run_loop. It's important that the method takes care of pausing or otherwise waiting for a period of time. This split between run_loop() and tick() is to improve the testability of the reloader implementations by decoupling the work they do from the loop. """ raise NotImplementedError('subclasses must implement tick().') @classmethod def check_availability(cls): raise NotImplementedError('subclasses must implement check_availability().') def notify_file_changed(self, path): results = file_changed.send(sender=self, file_path=path) logger.debug('%s notified as changed. Signal results: %s.', path, results) if not any(res[1] for res in results): trigger_reload(path) # These are primarily used for testing. @property def should_stop(self): return self._stop_condition.is_set() def stop(self): self._stop_condition.set() class StatReloader(BaseReloader): SLEEP_TIME = 1 # Check for changes once per second. def tick(self): mtimes = {} while True: for filepath, mtime in self.snapshot_files(): old_time = mtimes.get(filepath) mtimes[filepath] = mtime if old_time is None: logger.debug('File %s first seen with mtime %s', filepath, mtime) continue elif mtime > old_time: logger.debug('File %s previous mtime: %s, current mtime: %s', filepath, old_time, mtime) self.notify_file_changed(filepath) time.sleep(self.SLEEP_TIME) yield def snapshot_files(self): # watched_files may produce duplicate paths if globs overlap. seen_files = set() for file in self.watched_files(): if file in seen_files: continue try: mtime = file.stat().st_mtime except OSError: # This is thrown when the file does not exist. continue seen_files.add(file) yield file, mtime @classmethod def check_availability(cls): return True class WatchmanUnavailable(RuntimeError): pass class WatchmanReloader(BaseReloader): def __init__(self): self.roots = defaultdict(set) self.processed_request = threading.Event() self.client_timeout = int(os.environ.get('DJANGO_WATCHMAN_TIMEOUT', 5)) super().__init__() @cached_property def client(self): return pywatchman.client(timeout=self.client_timeout) def _watch_root(self, root): # In practice this shouldn't occur, however, it's possible that a # directory that doesn't exist yet is being watched. If it's outside of # sys.path then this will end up a new root. How to handle this isn't # clear: Not adding the root will likely break when subscribing to the # changes, however, as this is currently an internal API, no files # will be being watched outside of sys.path. Fixing this by checking # inside watch_glob() and watch_dir() is expensive, instead this could # could fall back to the StatReloader if this case is detected? For # now, watching its parent, if possible, is sufficient. if not root.exists(): if not root.parent.exists(): logger.warning('Unable to watch root dir %s as neither it or its parent exist.', root) return root = root.parent result = self.client.query('watch-project', str(root.absolute())) if 'warning' in result: logger.warning('Watchman warning: %s', result['warning']) logger.debug('Watchman watch-project result: %s', result) return result['watch'], result.get('relative_path') @functools.lru_cache() def _get_clock(self, root): return self.client.query('clock', root)['clock'] def _subscribe(self, directory, name, expression): root, rel_path = self._watch_root(directory) # Only receive notifications of files changing, filtering out other types # like special files: https://facebook.github.io/watchman/docs/type only_files_expression = [ 'allof', ['anyof', ['type', 'f'], ['type', 'l']], expression ] query = { 'expression': only_files_expression, 'fields': ['name'], 'since': self._get_clock(root), 'dedup_results': True, } if rel_path: query['relative_root'] = rel_path logger.debug('Issuing watchman subscription %s, for root %s. Query: %s', name, root, query) self.client.query('subscribe', root, name, query) def _subscribe_dir(self, directory, filenames): if not directory.exists(): if not directory.parent.exists(): logger.warning('Unable to watch directory %s as neither it or its parent exist.', directory) return prefix = 'files-parent-%s' % directory.name filenames = ['%s/%s' % (directory.name, filename) for filename in filenames] directory = directory.parent expression = ['name', filenames, 'wholename'] else: prefix = 'files' expression = ['name', filenames] self._subscribe(directory, '%s:%s' % (prefix, directory), expression) def _watch_glob(self, directory, patterns): """ Watch a directory with a specific glob. If the directory doesn't yet exist, attempt to watch the parent directory and amend the patterns to include this. It's important this method isn't called more than one per directory when updating all subscriptions. Subsequent calls will overwrite the named subscription, so it must include all possible glob expressions. """ prefix = 'glob' if not directory.exists(): if not directory.parent.exists(): logger.warning('Unable to watch directory %s as neither it or its parent exist.', directory) return prefix = 'glob-parent-%s' % directory.name patterns = ['%s/%s' % (directory.name, pattern) for pattern in patterns] directory = directory.parent expression = ['anyof'] for pattern in patterns: expression.append(['match', pattern, 'wholename']) self._subscribe(directory, '%s:%s' % (prefix, directory), expression) def watched_roots(self, watched_files): extra_directories = self.directory_globs.keys() watched_file_dirs = [f.parent for f in watched_files] sys_paths = list(sys_path_directories()) return frozenset((*extra_directories, *watched_file_dirs, *sys_paths)) def _update_watches(self): watched_files = list(self.watched_files(include_globs=False)) found_roots = common_roots(self.watched_roots(watched_files)) logger.debug('Watching %s files', len(watched_files)) logger.debug('Found common roots: %s', found_roots) # Setup initial roots for performance, shortest roots first. for root in sorted(found_roots): self._watch_root(root) for directory, patterns in self.directory_globs.items(): self._watch_glob(directory, patterns) # Group sorted watched_files by their parent directory. sorted_files = sorted(watched_files, key=lambda p: p.parent) for directory, group in itertools.groupby(sorted_files, key=lambda p: p.parent): # These paths need to be relative to the parent directory. self._subscribe_dir(directory, [str(p.relative_to(directory)) for p in group]) def update_watches(self): try: self._update_watches() except Exception as ex: # If the service is still available, raise the original exception. if self.check_server_status(ex): raise def _check_subscription(self, sub): subscription = self.client.getSubscription(sub) if not subscription: return logger.debug('Watchman subscription %s has results.', sub) for result in subscription: # When using watch-project, it's not simple to get the relative # directory without storing some specific state. Store the full # path to the directory in the subscription name, prefixed by its # type (glob, files). root_directory = Path(result['subscription'].split(':', 1)[1]) logger.debug('Found root directory %s', root_directory) for file in result.get('files', []): self.notify_file_changed(root_directory / file) def request_processed(self, **kwargs): logger.debug('Request processed. Setting update_watches event.') self.processed_request.set() def tick(self): request_finished.connect(self.request_processed) self.update_watches() while True: if self.processed_request.is_set(): self.update_watches() self.processed_request.clear() try: self.client.receive() except pywatchman.SocketTimeout: pass except pywatchman.WatchmanError as ex: logger.debug('Watchman error: %s, checking server status.', ex) self.check_server_status(ex) else: for sub in list(self.client.subs.keys()): self._check_subscription(sub) yield # Protect against busy loops. time.sleep(0.1) def stop(self): self.client.close() super().stop() def check_server_status(self, inner_ex=None): """Return True if the server is available.""" try: self.client.query('version') except Exception: raise WatchmanUnavailable(str(inner_ex)) from inner_ex return True @classmethod def check_availability(cls): if not pywatchman: raise WatchmanUnavailable('pywatchman not installed.') client = pywatchman.client(timeout=0.1) try: result = client.capabilityCheck() except Exception: # The service is down? raise WatchmanUnavailable('Cannot connect to the watchman service.') version = get_version_tuple(result['version']) # Watchman 4.9 includes multiple improvements to watching project # directories as well as case insensitive filesystems. logger.debug('Watchman version %s', version) if version < (4, 9): raise WatchmanUnavailable('Watchman 4.9 or later is required.') def get_reloader(): """Return the most suitable reloader for this environment.""" try: WatchmanReloader.check_availability() except WatchmanUnavailable: return StatReloader() return WatchmanReloader() def start_django(reloader, main_func, *args, **kwargs): ensure_echo_on() main_func = check_errors(main_func) django_main_thread = threading.Thread(target=main_func, args=args, kwargs=kwargs, name='django-main-thread') django_main_thread.daemon = True django_main_thread.start() while not reloader.should_stop: try: reloader.run(django_main_thread) except WatchmanUnavailable as ex: # It's possible that the watchman service shuts down or otherwise # becomes unavailable. In that case, use the StatReloader. reloader = StatReloader() logger.error('Error connecting to Watchman: %s', ex) logger.info('Watching for file changes with %s', reloader.__class__.__name__) def run_with_reloader(main_func, *args, **kwargs): signal.signal(signal.SIGTERM, lambda *args: sys.exit(0)) try: if os.environ.get(DJANGO_AUTORELOAD_ENV) == 'true': reloader = get_reloader() logger.info('Watching for file changes with %s', reloader.__class__.__name__) start_django(reloader, main_func, *args, **kwargs) else: exit_code = restart_with_reloader() sys.exit(exit_code) except KeyboardInterrupt: pass
f5a03581321ee39d83cf75352a897eb5c0e9c443742248927e76dc68eb11ece1
import html.entities import re import unicodedata from gzip import GzipFile from io import BytesIO from django.core.exceptions import SuspiciousFileOperation from django.utils.functional import SimpleLazyObject, keep_lazy_text, lazy from django.utils.regex_helper import _lazy_re_compile from django.utils.translation import gettext as _, gettext_lazy, pgettext @keep_lazy_text def capfirst(x): """Capitalize the first letter of a string.""" if not x: return x if not isinstance(x, str): x = str(x) return x[0].upper() + x[1:] # Set up regular expressions re_words = _lazy_re_compile(r'<[^>]+?>|([^<>\s]+)', re.S) re_chars = _lazy_re_compile(r'<[^>]+?>|(.)', re.S) re_tag = _lazy_re_compile(r'<(/)?(\S+?)(?:(\s*/)|\s.*?)?>', re.S) re_newlines = _lazy_re_compile(r'\r\n|\r') # Used in normalize_newlines re_camel_case = _lazy_re_compile(r'(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))') @keep_lazy_text def wrap(text, width): """ A word-wrap function that preserves existing line breaks. Expects that existing line breaks are posix newlines. Preserve all white space except added line breaks consume the space on which they break the line. Don't wrap long words, thus the output text may have lines longer than ``width``. """ def _generator(): for line in text.splitlines(True): # True keeps trailing linebreaks max_width = min((line.endswith('\n') and width + 1 or width), width) while len(line) > max_width: space = line[:max_width + 1].rfind(' ') + 1 if space == 0: space = line.find(' ') + 1 if space == 0: yield line line = '' break yield '%s\n' % line[:space - 1] line = line[space:] max_width = min((line.endswith('\n') and width + 1 or width), width) if line: yield line return ''.join(_generator()) class Truncator(SimpleLazyObject): """ An object used to truncate text, either by characters or words. """ def __init__(self, text): super().__init__(lambda: str(text)) def add_truncation_text(self, text, truncate=None): if truncate is None: truncate = pgettext( 'String to return when truncating text', '%(truncated_text)s…') if '%(truncated_text)s' in truncate: return truncate % {'truncated_text': text} # The truncation text didn't contain the %(truncated_text)s string # replacement argument so just append it to the text. if text.endswith(truncate): # But don't append the truncation text if the current text already # ends in this. return text return '%s%s' % (text, truncate) def chars(self, num, truncate=None, html=False): """ Return the text truncated to be no longer than the specified number of characters. `truncate` specifies what should be used to notify that the string has been truncated, defaulting to a translatable string of an ellipsis. """ self._setup() length = int(num) text = unicodedata.normalize('NFC', self._wrapped) # Calculate the length to truncate to (max length - end_text length) truncate_len = length for char in self.add_truncation_text('', truncate): if not unicodedata.combining(char): truncate_len -= 1 if truncate_len == 0: break if html: return self._truncate_html(length, truncate, text, truncate_len, False) return self._text_chars(length, truncate, text, truncate_len) def _text_chars(self, length, truncate, text, truncate_len): """Truncate a string after a certain number of chars.""" s_len = 0 end_index = None for i, char in enumerate(text): if unicodedata.combining(char): # Don't consider combining characters # as adding to the string length continue s_len += 1 if end_index is None and s_len > truncate_len: end_index = i if s_len > length: # Return the truncated string return self.add_truncation_text(text[:end_index or 0], truncate) # Return the original string since no truncation was necessary return text def words(self, num, truncate=None, html=False): """ Truncate a string after a certain number of words. `truncate` specifies what should be used to notify that the string has been truncated, defaulting to ellipsis. """ self._setup() length = int(num) if html: return self._truncate_html(length, truncate, self._wrapped, length, True) return self._text_words(length, truncate) def _text_words(self, length, truncate): """ Truncate a string after a certain number of words. Strip newlines in the string. """ words = self._wrapped.split() if len(words) > length: words = words[:length] return self.add_truncation_text(' '.join(words), truncate) return ' '.join(words) def _truncate_html(self, length, truncate, text, truncate_len, words): """ Truncate HTML to a certain number of chars (not counting tags and comments), or, if words is True, then to a certain number of words. Close opened tags if they were correctly closed in the given HTML. Preserve newlines in the HTML. """ if words and length <= 0: return '' html4_singlets = ( 'br', 'col', 'link', 'base', 'img', 'param', 'area', 'hr', 'input' ) # Count non-HTML chars/words and keep note of open tags pos = 0 end_text_pos = 0 current_len = 0 open_tags = [] regex = re_words if words else re_chars while current_len <= length: m = regex.search(text, pos) if not m: # Checked through whole string break pos = m.end(0) if m[1]: # It's an actual non-HTML word or char current_len += 1 if current_len == truncate_len: end_text_pos = pos continue # Check for tag tag = re_tag.match(m[0]) if not tag or current_len >= truncate_len: # Don't worry about non tags or tags after our truncate point continue closing_tag, tagname, self_closing = tag.groups() # Element names are always case-insensitive tagname = tagname.lower() if self_closing or tagname in html4_singlets: pass elif closing_tag: # Check for match in open tags list try: i = open_tags.index(tagname) except ValueError: pass else: # SGML: An end tag closes, back to the matching start tag, # all unclosed intervening start tags with omitted end tags open_tags = open_tags[i + 1:] else: # Add it to the start of the open tags list open_tags.insert(0, tagname) if current_len <= length: return text out = text[:end_text_pos] truncate_text = self.add_truncation_text('', truncate) if truncate_text: out += truncate_text # Close any tags still open for tag in open_tags: out += '</%s>' % tag # Return string return out @keep_lazy_text def get_valid_filename(name): """ Return the given string converted to a string that can be used for a clean filename. Remove leading and trailing spaces; convert other spaces to underscores; and remove anything that is not an alphanumeric, dash, underscore, or dot. >>> get_valid_filename("john's portrait in 2004.jpg") 'johns_portrait_in_2004.jpg' """ s = str(name).strip().replace(' ', '_') s = re.sub(r'(?u)[^-\w.]', '', s) if s in {'', '.', '..'}: raise SuspiciousFileOperation("Could not derive file name from '%s'" % name) return s @keep_lazy_text def get_text_list(list_, last_word=gettext_lazy('or')): """ >>> get_text_list(['a', 'b', 'c', 'd']) 'a, b, c or d' >>> get_text_list(['a', 'b', 'c'], 'and') 'a, b and c' >>> get_text_list(['a', 'b'], 'and') 'a and b' >>> get_text_list(['a']) 'a' >>> get_text_list([]) '' """ if not list_: return '' if len(list_) == 1: return str(list_[0]) return '%s %s %s' % ( # Translators: This string is used as a separator between list elements _(', ').join(str(i) for i in list_[:-1]), str(last_word), str(list_[-1]) ) @keep_lazy_text def normalize_newlines(text): """Normalize CRLF and CR newlines to just LF.""" return re_newlines.sub('\n', str(text)) @keep_lazy_text def phone2numeric(phone): """Convert a phone number with letters into its numeric equivalent.""" char2number = { 'a': '2', 'b': '2', 'c': '2', 'd': '3', 'e': '3', 'f': '3', 'g': '4', 'h': '4', 'i': '4', 'j': '5', 'k': '5', 'l': '5', 'm': '6', 'n': '6', 'o': '6', 'p': '7', 'q': '7', 'r': '7', 's': '7', 't': '8', 'u': '8', 'v': '8', 'w': '9', 'x': '9', 'y': '9', 'z': '9', } return ''.join(char2number.get(c, c) for c in phone.lower()) # From http://www.xhaus.com/alan/python/httpcomp.html#gzip # Used with permission. def compress_string(s): zbuf = BytesIO() with GzipFile(mode='wb', compresslevel=6, fileobj=zbuf, mtime=0) as zfile: zfile.write(s) return zbuf.getvalue() class StreamingBuffer(BytesIO): def read(self): ret = self.getvalue() self.seek(0) self.truncate() return ret # Like compress_string, but for iterators of strings. def compress_sequence(sequence): buf = StreamingBuffer() with GzipFile(mode='wb', compresslevel=6, fileobj=buf, mtime=0) as zfile: # Output headers... yield buf.read() for item in sequence: zfile.write(item) data = buf.read() if data: yield data yield buf.read() # Expression to match some_token and some_token="with spaces" (and similarly # for single-quoted strings). smart_split_re = _lazy_re_compile(r""" ((?: [^\s'"]* (?: (?:"(?:[^"\\]|\\.)*" | '(?:[^'\\]|\\.)*') [^\s'"]* )+ ) | \S+) """, re.VERBOSE) def smart_split(text): r""" Generator that splits a string by spaces, leaving quoted phrases together. Supports both single and double quotes, and supports escaping quotes with backslashes. In the output, strings will keep their initial and trailing quote marks and escaped quotes will remain escaped (the results can then be further processed with unescape_string_literal()). >>> list(smart_split(r'This is "a person\'s" test.')) ['This', 'is', '"a person\\\'s"', 'test.'] >>> list(smart_split(r"Another 'person\'s' test.")) ['Another', "'person\\'s'", 'test.'] >>> list(smart_split(r'A "\"funky\" style" test.')) ['A', '"\\"funky\\" style"', 'test.'] """ for bit in smart_split_re.finditer(str(text)): yield bit[0] def _replace_entity(match): text = match[1] if text[0] == '#': text = text[1:] try: if text[0] in 'xX': c = int(text[1:], 16) else: c = int(text) return chr(c) except ValueError: return match[0] else: try: return chr(html.entities.name2codepoint[text]) except KeyError: return match[0] _entity_re = _lazy_re_compile(r"&(#?[xX]?(?:[0-9a-fA-F]+|\w{1,8}));") @keep_lazy_text def unescape_string_literal(s): r""" Convert quoted string literals to unquoted strings with escaped quotes and backslashes unquoted:: >>> unescape_string_literal('"abc"') 'abc' >>> unescape_string_literal("'abc'") 'abc' >>> unescape_string_literal('"a \"bc\""') 'a "bc"' >>> unescape_string_literal("'\'ab\' c'") "'ab' c" """ if s[0] not in "\"'" or s[-1] != s[0]: raise ValueError("Not a string literal: %r" % s) quote = s[0] return s[1:-1].replace(r'\%s' % quote, quote).replace(r'\\', '\\') @keep_lazy_text def slugify(value, allow_unicode=False): """ Convert to ASCII if 'allow_unicode' is False. Convert spaces or repeated dashes to single dashes. Remove characters that aren't alphanumerics, underscores, or hyphens. Convert to lowercase. Also strip leading and trailing whitespace, dashes, and underscores. """ value = str(value) if allow_unicode: value = unicodedata.normalize('NFKC', value) else: value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore').decode('ascii') value = re.sub(r'[^\w\s-]', '', value.lower()) return re.sub(r'[-\s]+', '-', value).strip('-_') def camel_case_to_spaces(value): """ Split CamelCase and convert to lowercase. Strip surrounding whitespace. """ return re_camel_case.sub(r' \1', value).strip().lower() def _format_lazy(format_string, *args, **kwargs): """ Apply str.format() on 'format_string' where format_string, args, and/or kwargs might be lazy. """ return format_string.format(*args, **kwargs) format_lazy = lazy(_format_lazy, str)
7fa19b9ae2d17ab3b35795717d6a46f0f87ce5a0dded1761642b3a78bbc16721
""" A class for storing a tree graph. Primarily used for filter constructs in the ORM. """ import copy from django.utils.hashable import make_hashable class Node: """ A single internal node in the tree graph. A Node should be viewed as a connection (the root) with the children being either leaf nodes or other Node instances. """ # Standard connector type. Clients usually won't use this at all and # subclasses will usually override the value. default = 'DEFAULT' def __init__(self, children=None, connector=None, negated=False): """Construct a new Node. If no connector is given, use the default.""" self.children = children[:] if children else [] self.connector = connector or self.default self.negated = negated # Required because django.db.models.query_utils.Q. Q. __init__() is # problematic, but it is a natural Node subclass in all other respects. @classmethod def _new_instance(cls, children=None, connector=None, negated=False): """ Create a new instance of this class when new Nodes (or subclasses) are needed in the internal code in this class. Normally, it just shadows __init__(). However, subclasses with an __init__ signature that aren't an extension of Node.__init__ might need to implement this method to allow a Node to create a new instance of them (if they have any extra setting up to do). """ obj = Node(children, connector, negated) obj.__class__ = cls return obj def __str__(self): template = '(NOT (%s: %s))' if self.negated else '(%s: %s)' return template % (self.connector, ', '.join(str(c) for c in self.children)) def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, self) def __deepcopy__(self, memodict): obj = Node(connector=self.connector, negated=self.negated) obj.__class__ = self.__class__ obj.children = copy.deepcopy(self.children, memodict) return obj def __len__(self): """Return the number of children this node has.""" return len(self.children) def __bool__(self): """Return whether or not this node has children.""" return bool(self.children) def __contains__(self, other): """Return True if 'other' is a direct child of this instance.""" return other in self.children def __eq__(self, other): return ( self.__class__ == other.__class__ and (self.connector, self.negated) == (other.connector, other.negated) and self.children == other.children ) def __hash__(self): return hash((self.__class__, self.connector, self.negated, *make_hashable(self.children))) def add(self, data, conn_type, squash=True): """ Combine this tree and the data represented by data using the connector conn_type. The combine is done by squashing the node other away if possible. This tree (self) will never be pushed to a child node of the combined tree, nor will the connector or negated properties change. Return a node which can be used in place of data regardless if the node other got squashed or not. If `squash` is False the data is prepared and added as a child to this tree without further logic. """ if self.connector == conn_type and data in self.children: return data if not squash: self.children.append(data) return data if self.connector == conn_type: # We can reuse self.children to append or squash the node other. if (isinstance(data, Node) and not data.negated and (data.connector == conn_type or len(data) == 1)): # We can squash the other node's children directly into this # node. We are just doing (AB)(CD) == (ABCD) here, with the # addition that if the length of the other node is 1 the # connector doesn't matter. However, for the len(self) == 1 # case we don't want to do the squashing, as it would alter # self.connector. self.children.extend(data.children) return self else: # We could use perhaps additional logic here to see if some # children could be used for pushdown here. self.children.append(data) return data else: obj = self._new_instance(self.children, self.connector, self.negated) self.connector = conn_type self.children = [obj, data] return data def negate(self): """Negate the sense of the root connector.""" self.negated = not self.negated
55753641e4958f255d070410befd483f3af32a54848ba3e1aaed035eb3995843
""" PHP date() style date formatting See http://www.php.net/date for format strings Usage: >>> import datetime >>> d = datetime.datetime.now() >>> df = DateFormat(d) >>> print(df.format('jS F Y H:i')) 7th October 2003 11:39 >>> """ import calendar import datetime from email.utils import format_datetime as format_datetime_rfc5322 from django.utils.dates import ( MONTHS, MONTHS_3, MONTHS_ALT, MONTHS_AP, WEEKDAYS, WEEKDAYS_ABBR, ) from django.utils.regex_helper import _lazy_re_compile from django.utils.timezone import ( _datetime_ambiguous_or_imaginary, get_default_timezone, is_naive, make_aware, ) from django.utils.translation import gettext as _ re_formatchars = _lazy_re_compile(r'(?<!\\)([aAbcdDeEfFgGhHiIjlLmMnNoOPrsStTUuwWyYzZ])') re_escaped = _lazy_re_compile(r'\\(.)') class Formatter: def format(self, formatstr): pieces = [] for i, piece in enumerate(re_formatchars.split(str(formatstr))): if i % 2: if type(self.data) is datetime.date and hasattr(TimeFormat, piece): raise TypeError( "The format for date objects may not contain " "time-related format specifiers (found '%s')." % piece ) pieces.append(str(getattr(self, piece)())) elif piece: pieces.append(re_escaped.sub(r'\1', piece)) return ''.join(pieces) class TimeFormat(Formatter): def __init__(self, obj): self.data = obj self.timezone = None # We only support timezone when formatting datetime objects, # not date objects (timezone information not appropriate), # or time objects (against established django policy). if isinstance(obj, datetime.datetime): if is_naive(obj): self.timezone = get_default_timezone() else: self.timezone = obj.tzinfo def a(self): "'a.m.' or 'p.m.'" if self.data.hour > 11: return _('p.m.') return _('a.m.') def A(self): "'AM' or 'PM'" if self.data.hour > 11: return _('PM') return _('AM') def e(self): """ Timezone name. If timezone information is not available, return an empty string. """ if not self.timezone: return "" try: if hasattr(self.data, 'tzinfo') and self.data.tzinfo: return self.data.tzname() or '' except NotImplementedError: pass return "" def f(self): """ Time, in 12-hour hours and minutes, with minutes left off if they're zero. Examples: '1', '1:30', '2:05', '2' Proprietary extension. """ if self.data.minute == 0: return self.g() return '%s:%s' % (self.g(), self.i()) def g(self): "Hour, 12-hour format without leading zeros; i.e. '1' to '12'" return self.data.hour % 12 or 12 def G(self): "Hour, 24-hour format without leading zeros; i.e. '0' to '23'" return self.data.hour def h(self): "Hour, 12-hour format; i.e. '01' to '12'" return '%02d' % self.g() def H(self): "Hour, 24-hour format; i.e. '00' to '23'" return '%02d' % self.G() def i(self): "Minutes; i.e. '00' to '59'" return '%02d' % self.data.minute def O(self): # NOQA: E743, E741 """ Difference to Greenwich time in hours; e.g. '+0200', '-0430'. If timezone information is not available, return an empty string. """ if not self.timezone: return "" seconds = self.Z() if seconds == "": return "" sign = '-' if seconds < 0 else '+' seconds = abs(seconds) return "%s%02d%02d" % (sign, seconds // 3600, (seconds // 60) % 60) def P(self): """ Time, in 12-hour hours, minutes and 'a.m.'/'p.m.', with minutes left off if they're zero and the strings 'midnight' and 'noon' if appropriate. Examples: '1 a.m.', '1:30 p.m.', 'midnight', 'noon', '12:30 p.m.' Proprietary extension. """ if self.data.minute == 0 and self.data.hour == 0: return _('midnight') if self.data.minute == 0 and self.data.hour == 12: return _('noon') return '%s %s' % (self.f(), self.a()) def s(self): "Seconds; i.e. '00' to '59'" return '%02d' % self.data.second def T(self): """ Time zone of this machine; e.g. 'EST' or 'MDT'. If timezone information is not available, return an empty string. """ if not self.timezone: return "" if not _datetime_ambiguous_or_imaginary(self.data, self.timezone): name = self.timezone.tzname(self.data) else: name = self.format('O') return str(name) def u(self): "Microseconds; i.e. '000000' to '999999'" return '%06d' % self.data.microsecond def Z(self): """ Time zone offset in seconds (i.e. '-43200' to '43200'). The offset for timezones west of UTC is always negative, and for those east of UTC is always positive. If timezone information is not available, return an empty string. """ if ( not self.timezone or _datetime_ambiguous_or_imaginary(self.data, self.timezone) ): return "" offset = self.timezone.utcoffset(self.data) # `offset` is a datetime.timedelta. For negative values (to the west of # UTC) only days can be negative (days=-1) and seconds are always # positive. e.g. UTC-1 -> timedelta(days=-1, seconds=82800, microseconds=0) # Positive offsets have days=0 return offset.days * 86400 + offset.seconds class DateFormat(TimeFormat): def b(self): "Month, textual, 3 letters, lowercase; e.g. 'jan'" return MONTHS_3[self.data.month] def c(self): """ ISO 8601 Format Example : '2008-01-02T10:30:00.000123' """ return self.data.isoformat() def d(self): "Day of the month, 2 digits with leading zeros; i.e. '01' to '31'" return '%02d' % self.data.day def D(self): "Day of the week, textual, 3 letters; e.g. 'Fri'" return WEEKDAYS_ABBR[self.data.weekday()] def E(self): "Alternative month names as required by some locales. Proprietary extension." return MONTHS_ALT[self.data.month] def F(self): "Month, textual, long; e.g. 'January'" return MONTHS[self.data.month] def I(self): # NOQA: E743, E741 "'1' if Daylight Savings Time, '0' otherwise." if ( not self.timezone or _datetime_ambiguous_or_imaginary(self.data, self.timezone) ): return '' return '1' if self.timezone.dst(self.data) else '0' def j(self): "Day of the month without leading zeros; i.e. '1' to '31'" return self.data.day def l(self): # NOQA: E743, E741 "Day of the week, textual, long; e.g. 'Friday'" return WEEKDAYS[self.data.weekday()] def L(self): "Boolean for whether it is a leap year; i.e. True or False" return calendar.isleap(self.data.year) def m(self): "Month; i.e. '01' to '12'" return '%02d' % self.data.month def M(self): "Month, textual, 3 letters; e.g. 'Jan'" return MONTHS_3[self.data.month].title() def n(self): "Month without leading zeros; i.e. '1' to '12'" return self.data.month def N(self): "Month abbreviation in Associated Press style. Proprietary extension." return MONTHS_AP[self.data.month] def o(self): "ISO 8601 year number matching the ISO week number (W)" return self.data.isocalendar()[0] def r(self): "RFC 5322 formatted date; e.g. 'Thu, 21 Dec 2000 16:01:07 +0200'" if type(self.data) is datetime.date: raise TypeError( "The format for date objects may not contain time-related " "format specifiers (found 'r')." ) if is_naive(self.data): dt = make_aware(self.data, timezone=self.timezone) else: dt = self.data return format_datetime_rfc5322(dt) def S(self): "English ordinal suffix for the day of the month, 2 characters; i.e. 'st', 'nd', 'rd' or 'th'" if self.data.day in (11, 12, 13): # Special case return 'th' last = self.data.day % 10 if last == 1: return 'st' if last == 2: return 'nd' if last == 3: return 'rd' return 'th' def t(self): "Number of days in the given month; i.e. '28' to '31'" return '%02d' % calendar.monthrange(self.data.year, self.data.month)[1] def U(self): "Seconds since the Unix epoch (January 1 1970 00:00:00 GMT)" value = self.data if not isinstance(value, datetime.datetime): value = datetime.datetime.combine(value, datetime.time.min) return int(value.timestamp()) def w(self): "Day of the week, numeric, i.e. '0' (Sunday) to '6' (Saturday)" return (self.data.weekday() + 1) % 7 def W(self): "ISO-8601 week number of year, weeks starting on Monday" return self.data.isocalendar()[1] def y(self): """Year, 2 digits with leading zeros; e.g. '99'.""" return '%02d' % (self.data.year % 100) def Y(self): """Year, 4 digits with leading zeros; e.g. '1999'.""" return '%04d' % self.data.year def z(self): """Day of the year, i.e. 1 to 366.""" return self.data.timetuple().tm_yday def format(value, format_string): "Convenience function" df = DateFormat(value) return df.format(format_string) def time_format(value, format_string): "Convenience function" tf = TimeFormat(value) return tf.format(format_string)
711e4624739b7f4c09a5d8660ee08b652668886a3af1f4cd790c38f8a777872c
""" Syndication feed generation library -- used for generating RSS, etc. Sample usage: >>> from django.utils import feedgenerator >>> feed = feedgenerator.Rss201rev2Feed( ... title="Poynter E-Media Tidbits", ... link="http://www.poynter.org/column.asp?id=31", ... description="A group Weblog by the sharpest minds in online media/journalism/publishing.", ... language="en", ... ) >>> feed.add_item( ... title="Hello", ... link="http://www.holovaty.com/test/", ... description="Testing." ... ) >>> with open('test.rss', 'w') as fp: ... feed.write(fp, 'utf-8') For definitions of the different versions of RSS, see: https://web.archive.org/web/20110718035220/http://diveintomark.org/archives/2004/02/04/incompatible-rss """ import datetime import email from io import StringIO from urllib.parse import urlparse from django.utils.encoding import iri_to_uri from django.utils.timezone import utc from django.utils.xmlutils import SimplerXMLGenerator def rfc2822_date(date): if not isinstance(date, datetime.datetime): date = datetime.datetime.combine(date, datetime.time()) return email.utils.format_datetime(date) def rfc3339_date(date): if not isinstance(date, datetime.datetime): date = datetime.datetime.combine(date, datetime.time()) return date.isoformat() + ('Z' if date.utcoffset() is None else '') def get_tag_uri(url, date): """ Create a TagURI. See https://web.archive.org/web/20110514113830/http://diveintomark.org/archives/2004/05/28/howto-atom-id """ bits = urlparse(url) d = '' if date is not None: d = ',%s' % date.strftime('%Y-%m-%d') return 'tag:%s%s:%s/%s' % (bits.hostname, d, bits.path, bits.fragment) class SyndicationFeed: "Base class for all syndication feeds. Subclasses should provide write()" def __init__(self, title, link, description, language=None, author_email=None, author_name=None, author_link=None, subtitle=None, categories=None, feed_url=None, feed_copyright=None, feed_guid=None, ttl=None, **kwargs): def to_str(s): return str(s) if s is not None else s categories = categories and [str(c) for c in categories] self.feed = { 'title': to_str(title), 'link': iri_to_uri(link), 'description': to_str(description), 'language': to_str(language), 'author_email': to_str(author_email), 'author_name': to_str(author_name), 'author_link': iri_to_uri(author_link), 'subtitle': to_str(subtitle), 'categories': categories or (), 'feed_url': iri_to_uri(feed_url), 'feed_copyright': to_str(feed_copyright), 'id': feed_guid or link, 'ttl': to_str(ttl), **kwargs, } self.items = [] def add_item(self, title, link, description, author_email=None, author_name=None, author_link=None, pubdate=None, comments=None, unique_id=None, unique_id_is_permalink=None, categories=(), item_copyright=None, ttl=None, updateddate=None, enclosures=None, **kwargs): """ Add an item to the feed. All args are expected to be strings except pubdate and updateddate, which are datetime.datetime objects, and enclosures, which is an iterable of instances of the Enclosure class. """ def to_str(s): return str(s) if s is not None else s categories = categories and [to_str(c) for c in categories] self.items.append({ 'title': to_str(title), 'link': iri_to_uri(link), 'description': to_str(description), 'author_email': to_str(author_email), 'author_name': to_str(author_name), 'author_link': iri_to_uri(author_link), 'pubdate': pubdate, 'updateddate': updateddate, 'comments': to_str(comments), 'unique_id': to_str(unique_id), 'unique_id_is_permalink': unique_id_is_permalink, 'enclosures': enclosures or (), 'categories': categories or (), 'item_copyright': to_str(item_copyright), 'ttl': to_str(ttl), **kwargs, }) def num_items(self): return len(self.items) def root_attributes(self): """ Return extra attributes to place on the root (i.e. feed/channel) element. Called from write(). """ return {} def add_root_elements(self, handler): """ Add elements in the root (i.e. feed/channel) element. Called from write(). """ pass def item_attributes(self, item): """ Return extra attributes to place on each item (i.e. item/entry) element. """ return {} def add_item_elements(self, handler, item): """ Add elements on each item (i.e. item/entry) element. """ pass def write(self, outfile, encoding): """ Output the feed in the given encoding to outfile, which is a file-like object. Subclasses should override this. """ raise NotImplementedError('subclasses of SyndicationFeed must provide a write() method') def writeString(self, encoding): """ Return the feed in the given encoding as a string. """ s = StringIO() self.write(s, encoding) return s.getvalue() def latest_post_date(self): """ Return the latest item's pubdate or updateddate. If no items have either of these attributes this return the current UTC date/time. """ latest_date = None date_keys = ('updateddate', 'pubdate') for item in self.items: for date_key in date_keys: item_date = item.get(date_key) if item_date: if latest_date is None or item_date > latest_date: latest_date = item_date return latest_date or datetime.datetime.now(tz=utc) class Enclosure: """An RSS enclosure""" def __init__(self, url, length, mime_type): "All args are expected to be strings" self.length, self.mime_type = length, mime_type self.url = iri_to_uri(url) class RssFeed(SyndicationFeed): content_type = 'application/rss+xml; charset=utf-8' def write(self, outfile, encoding): handler = SimplerXMLGenerator(outfile, encoding) handler.startDocument() handler.startElement("rss", self.rss_attributes()) handler.startElement("channel", self.root_attributes()) self.add_root_elements(handler) self.write_items(handler) self.endChannelElement(handler) handler.endElement("rss") def rss_attributes(self): return { 'version': self._version, 'xmlns:atom': 'http://www.w3.org/2005/Atom', } def write_items(self, handler): for item in self.items: handler.startElement('item', self.item_attributes(item)) self.add_item_elements(handler, item) handler.endElement("item") def add_root_elements(self, handler): handler.addQuickElement("title", self.feed['title']) handler.addQuickElement("link", self.feed['link']) handler.addQuickElement("description", self.feed['description']) if self.feed['feed_url'] is not None: handler.addQuickElement("atom:link", None, {"rel": "self", "href": self.feed['feed_url']}) if self.feed['language'] is not None: handler.addQuickElement("language", self.feed['language']) for cat in self.feed['categories']: handler.addQuickElement("category", cat) if self.feed['feed_copyright'] is not None: handler.addQuickElement("copyright", self.feed['feed_copyright']) handler.addQuickElement("lastBuildDate", rfc2822_date(self.latest_post_date())) if self.feed['ttl'] is not None: handler.addQuickElement("ttl", self.feed['ttl']) def endChannelElement(self, handler): handler.endElement("channel") class RssUserland091Feed(RssFeed): _version = "0.91" def add_item_elements(self, handler, item): handler.addQuickElement("title", item['title']) handler.addQuickElement("link", item['link']) if item['description'] is not None: handler.addQuickElement("description", item['description']) class Rss201rev2Feed(RssFeed): # Spec: https://cyber.harvard.edu/rss/rss.html _version = "2.0" def add_item_elements(self, handler, item): handler.addQuickElement("title", item['title']) handler.addQuickElement("link", item['link']) if item['description'] is not None: handler.addQuickElement("description", item['description']) # Author information. if item["author_name"] and item["author_email"]: handler.addQuickElement("author", "%s (%s)" % (item['author_email'], item['author_name'])) elif item["author_email"]: handler.addQuickElement("author", item["author_email"]) elif item["author_name"]: handler.addQuickElement( "dc:creator", item["author_name"], {"xmlns:dc": "http://purl.org/dc/elements/1.1/"} ) if item['pubdate'] is not None: handler.addQuickElement("pubDate", rfc2822_date(item['pubdate'])) if item['comments'] is not None: handler.addQuickElement("comments", item['comments']) if item['unique_id'] is not None: guid_attrs = {} if isinstance(item.get('unique_id_is_permalink'), bool): guid_attrs['isPermaLink'] = str(item['unique_id_is_permalink']).lower() handler.addQuickElement("guid", item['unique_id'], guid_attrs) if item['ttl'] is not None: handler.addQuickElement("ttl", item['ttl']) # Enclosure. if item['enclosures']: enclosures = list(item['enclosures']) if len(enclosures) > 1: raise ValueError( "RSS feed items may only have one enclosure, see " "http://www.rssboard.org/rss-profile#element-channel-item-enclosure" ) enclosure = enclosures[0] handler.addQuickElement('enclosure', '', { 'url': enclosure.url, 'length': enclosure.length, 'type': enclosure.mime_type, }) # Categories. for cat in item['categories']: handler.addQuickElement("category", cat) class Atom1Feed(SyndicationFeed): # Spec: https://tools.ietf.org/html/rfc4287 content_type = 'application/atom+xml; charset=utf-8' ns = "http://www.w3.org/2005/Atom" def write(self, outfile, encoding): handler = SimplerXMLGenerator(outfile, encoding) handler.startDocument() handler.startElement('feed', self.root_attributes()) self.add_root_elements(handler) self.write_items(handler) handler.endElement("feed") def root_attributes(self): if self.feed['language'] is not None: return {"xmlns": self.ns, "xml:lang": self.feed['language']} else: return {"xmlns": self.ns} def add_root_elements(self, handler): handler.addQuickElement("title", self.feed['title']) handler.addQuickElement("link", "", {"rel": "alternate", "href": self.feed['link']}) if self.feed['feed_url'] is not None: handler.addQuickElement("link", "", {"rel": "self", "href": self.feed['feed_url']}) handler.addQuickElement("id", self.feed['id']) handler.addQuickElement("updated", rfc3339_date(self.latest_post_date())) if self.feed['author_name'] is not None: handler.startElement("author", {}) handler.addQuickElement("name", self.feed['author_name']) if self.feed['author_email'] is not None: handler.addQuickElement("email", self.feed['author_email']) if self.feed['author_link'] is not None: handler.addQuickElement("uri", self.feed['author_link']) handler.endElement("author") if self.feed['subtitle'] is not None: handler.addQuickElement("subtitle", self.feed['subtitle']) for cat in self.feed['categories']: handler.addQuickElement("category", "", {"term": cat}) if self.feed['feed_copyright'] is not None: handler.addQuickElement("rights", self.feed['feed_copyright']) def write_items(self, handler): for item in self.items: handler.startElement("entry", self.item_attributes(item)) self.add_item_elements(handler, item) handler.endElement("entry") def add_item_elements(self, handler, item): handler.addQuickElement("title", item['title']) handler.addQuickElement("link", "", {"href": item['link'], "rel": "alternate"}) if item['pubdate'] is not None: handler.addQuickElement('published', rfc3339_date(item['pubdate'])) if item['updateddate'] is not None: handler.addQuickElement('updated', rfc3339_date(item['updateddate'])) # Author information. if item['author_name'] is not None: handler.startElement("author", {}) handler.addQuickElement("name", item['author_name']) if item['author_email'] is not None: handler.addQuickElement("email", item['author_email']) if item['author_link'] is not None: handler.addQuickElement("uri", item['author_link']) handler.endElement("author") # Unique ID. if item['unique_id'] is not None: unique_id = item['unique_id'] else: unique_id = get_tag_uri(item['link'], item['pubdate']) handler.addQuickElement("id", unique_id) # Summary. if item['description'] is not None: handler.addQuickElement("summary", item['description'], {"type": "html"}) # Enclosures. for enclosure in item['enclosures']: handler.addQuickElement('link', '', { 'rel': 'enclosure', 'href': enclosure.url, 'length': enclosure.length, 'type': enclosure.mime_type, }) # Categories. for cat in item['categories']: handler.addQuickElement("category", "", {"term": cat}) # Rights. if item['item_copyright'] is not None: handler.addQuickElement("rights", item['item_copyright']) # This isolates the decision of what the system default is, so calling code can # do "feedgenerator.DefaultFeed" instead of "feedgenerator.Rss201rev2Feed". DefaultFeed = Rss201rev2Feed
f7d85ae10a53520b259b8f608ed23c3a8e919c0d4c8bede479268525750a9660
import asyncio import functools import os from django.core.exceptions import SynchronousOnlyOperation def async_unsafe(message): """ Decorator to mark functions as async-unsafe. Someone trying to access the function while in an async context will get an error message. """ def decorator(func): @functools.wraps(func) def inner(*args, **kwargs): if not os.environ.get('DJANGO_ALLOW_ASYNC_UNSAFE'): # Detect a running event loop in this thread. try: asyncio.get_running_loop() except RuntimeError: pass else: raise SynchronousOnlyOperation(message) # Pass onwards. return func(*args, **kwargs) return inner # If the message is actually a function, then be a no-arguments decorator. if callable(message): func = message message = 'You cannot call this from an async context - use a thread or sync_to_async.' return decorator(func) else: return decorator
aaaa7711663d99400acb75b1d7f0c5768c1a98846d8631fdeb9d6b509cb1e080
import datetime import decimal import functools import re import unicodedata from importlib import import_module from django.conf import settings from django.utils import dateformat, numberformat from django.utils.functional import lazy from django.utils.translation import ( check_for_language, get_language, to_locale, ) # format_cache is a mapping from (format_type, lang) to the format string. # By using the cache, it is possible to avoid running get_format_modules # repeatedly. _format_cache = {} _format_modules_cache = {} ISO_INPUT_FORMATS = { 'DATE_INPUT_FORMATS': ['%Y-%m-%d'], 'TIME_INPUT_FORMATS': ['%H:%M:%S', '%H:%M:%S.%f', '%H:%M'], 'DATETIME_INPUT_FORMATS': [ '%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H:%M:%S.%f', '%Y-%m-%d %H:%M', '%Y-%m-%d' ], } FORMAT_SETTINGS = frozenset([ 'DECIMAL_SEPARATOR', 'THOUSAND_SEPARATOR', 'NUMBER_GROUPING', 'FIRST_DAY_OF_WEEK', 'MONTH_DAY_FORMAT', 'TIME_FORMAT', 'DATE_FORMAT', 'DATETIME_FORMAT', 'SHORT_DATE_FORMAT', 'SHORT_DATETIME_FORMAT', 'YEAR_MONTH_FORMAT', 'DATE_INPUT_FORMATS', 'TIME_INPUT_FORMATS', 'DATETIME_INPUT_FORMATS', ]) def reset_format_cache(): """Clear any cached formats. This method is provided primarily for testing purposes, so that the effects of cached formats can be removed. """ global _format_cache, _format_modules_cache _format_cache = {} _format_modules_cache = {} def iter_format_modules(lang, format_module_path=None): """Find format modules.""" if not check_for_language(lang): return if format_module_path is None: format_module_path = settings.FORMAT_MODULE_PATH format_locations = [] if format_module_path: if isinstance(format_module_path, str): format_module_path = [format_module_path] for path in format_module_path: format_locations.append(path + '.%s') format_locations.append('django.conf.locale.%s') locale = to_locale(lang) locales = [locale] if '_' in locale: locales.append(locale.split('_')[0]) for location in format_locations: for loc in locales: try: yield import_module('%s.formats' % (location % loc)) except ImportError: pass def get_format_modules(lang=None, reverse=False): """Return a list of the format modules found.""" if lang is None: lang = get_language() if lang not in _format_modules_cache: _format_modules_cache[lang] = list(iter_format_modules(lang, settings.FORMAT_MODULE_PATH)) modules = _format_modules_cache[lang] if reverse: return list(reversed(modules)) return modules def get_format(format_type, lang=None, use_l10n=None): """ For a specific format type, return the format for the current language (locale). Default to the format in the settings. format_type is the name of the format, e.g. 'DATE_FORMAT'. If use_l10n is provided and is not None, it forces the value to be localized (or not), overriding the value of settings.USE_L10N. """ use_l10n = use_l10n or (use_l10n is None and settings.USE_L10N) if use_l10n and lang is None: lang = get_language() cache_key = (format_type, lang) try: return _format_cache[cache_key] except KeyError: pass # The requested format_type has not been cached yet. Try to find it in any # of the format_modules for the given lang if l10n is enabled. If it's not # there or if l10n is disabled, fall back to the project settings. val = None if use_l10n: for module in get_format_modules(lang): val = getattr(module, format_type, None) if val is not None: break if val is None: if format_type not in FORMAT_SETTINGS: return format_type val = getattr(settings, format_type) elif format_type in ISO_INPUT_FORMATS: # If a list of input formats from one of the format_modules was # retrieved, make sure the ISO_INPUT_FORMATS are in this list. val = list(val) for iso_input in ISO_INPUT_FORMATS.get(format_type, ()): if iso_input not in val: val.append(iso_input) _format_cache[cache_key] = val return val get_format_lazy = lazy(get_format, str, list, tuple) def date_format(value, format=None, use_l10n=None): """ Format a datetime.date or datetime.datetime object using a localizable format. If use_l10n is provided and is not None, that will force the value to be localized (or not), overriding the value of settings.USE_L10N. """ return dateformat.format(value, get_format(format or 'DATE_FORMAT', use_l10n=use_l10n)) def time_format(value, format=None, use_l10n=None): """ Format a datetime.time object using a localizable format. If use_l10n is provided and is not None, it forces the value to be localized (or not), overriding the value of settings.USE_L10N. """ return dateformat.time_format(value, get_format(format or 'TIME_FORMAT', use_l10n=use_l10n)) def number_format(value, decimal_pos=None, use_l10n=None, force_grouping=False): """ Format a numeric value using localization settings. If use_l10n is provided and is not None, it forces the value to be localized (or not), overriding the value of settings.USE_L10N. """ if use_l10n or (use_l10n is None and settings.USE_L10N): lang = get_language() else: lang = None return numberformat.format( value, get_format('DECIMAL_SEPARATOR', lang, use_l10n=use_l10n), decimal_pos, get_format('NUMBER_GROUPING', lang, use_l10n=use_l10n), get_format('THOUSAND_SEPARATOR', lang, use_l10n=use_l10n), force_grouping=force_grouping, use_l10n=use_l10n, ) def localize(value, use_l10n=None): """ Check if value is a localizable type (date, number...) and return it formatted as a string using current locale format. If use_l10n is provided and is not None, it forces the value to be localized (or not), overriding the value of settings.USE_L10N. """ if isinstance(value, str): # Handle strings first for performance reasons. return value elif isinstance(value, bool): # Make sure booleans don't get treated as numbers return str(value) elif isinstance(value, (decimal.Decimal, float, int)): if use_l10n is False: return str(value) return number_format(value, use_l10n=use_l10n) elif isinstance(value, datetime.datetime): return date_format(value, 'DATETIME_FORMAT', use_l10n=use_l10n) elif isinstance(value, datetime.date): return date_format(value, use_l10n=use_l10n) elif isinstance(value, datetime.time): return time_format(value, 'TIME_FORMAT', use_l10n=use_l10n) return value def localize_input(value, default=None): """ Check if an input value is a localizable type and return it formatted with the appropriate formatting string of the current locale. """ if isinstance(value, str): # Handle strings first for performance reasons. return value elif isinstance(value, bool): # Don't treat booleans as numbers. return str(value) elif isinstance(value, (decimal.Decimal, float, int)): return number_format(value) elif isinstance(value, datetime.datetime): format = default or get_format('DATETIME_INPUT_FORMATS')[0] format = sanitize_strftime_format(format) return value.strftime(format) elif isinstance(value, datetime.date): format = default or get_format('DATE_INPUT_FORMATS')[0] format = sanitize_strftime_format(format) return value.strftime(format) elif isinstance(value, datetime.time): format = default or get_format('TIME_INPUT_FORMATS')[0] return value.strftime(format) return value @functools.lru_cache() def sanitize_strftime_format(fmt): """ Ensure that certain specifiers are correctly padded with leading zeros. For years < 1000 specifiers %C, %F, %G, and %Y don't work as expected for strftime provided by glibc on Linux as they don't pad the year or century with leading zeros. Support for specifying the padding explicitly is available, however, which can be used to fix this issue. FreeBSD, macOS, and Windows do not support explicitly specifying the padding, but return four digit years (with leading zeros) as expected. This function checks whether the %Y produces a correctly padded string and, if not, makes the following substitutions: - %C → %02C - %F → %010F - %G → %04G - %Y → %04Y See https://bugs.python.org/issue13305 for more details. """ if datetime.date(1, 1, 1).strftime('%Y') == '0001': return fmt mapping = {'C': 2, 'F': 10, 'G': 4, 'Y': 4} return re.sub( r'((?:^|[^%])(?:%%)*)%([CFGY])', lambda m: r'%s%%0%s%s' % (m[1], mapping[m[2]], m[2]), fmt, ) def sanitize_separators(value): """ Sanitize a value according to the current decimal and thousand separator setting. Used with form field input. """ if isinstance(value, str): parts = [] decimal_separator = get_format('DECIMAL_SEPARATOR') if decimal_separator in value: value, decimals = value.split(decimal_separator, 1) parts.append(decimals) if settings.USE_THOUSAND_SEPARATOR: thousand_sep = get_format('THOUSAND_SEPARATOR') if thousand_sep == '.' and value.count('.') == 1 and len(value.split('.')[-1]) != 3: # Special case where we suspect a dot meant decimal separator (see #22171) pass else: for replacement in { thousand_sep, unicodedata.normalize('NFKD', thousand_sep)}: value = value.replace(replacement, '') parts.append(value) value = '.'.join(reversed(parts)) return value
73d4e52c96e3c3234da7bcfdbe3cb33c172b63f3059afe54add8d52818111ba0
""" Cross Site Request Forgery Middleware. This module provides a middleware that implements protection against request forgeries from other sites. """ import logging import string from collections import defaultdict from urllib.parse import urlparse from django.conf import settings from django.core.exceptions import DisallowedHost, ImproperlyConfigured from django.urls import get_callable from django.utils.cache import patch_vary_headers from django.utils.crypto import constant_time_compare, get_random_string from django.utils.deprecation import MiddlewareMixin from django.utils.functional import cached_property from django.utils.http import is_same_domain from django.utils.log import log_response from django.utils.regex_helper import _lazy_re_compile logger = logging.getLogger('django.security.csrf') token_re = _lazy_re_compile('[^a-zA-Z0-9]') REASON_BAD_ORIGIN = "Origin checking failed - %s does not match any trusted origins." REASON_NO_REFERER = "Referer checking failed - no Referer." REASON_BAD_REFERER = "Referer checking failed - %s does not match any trusted origins." REASON_NO_CSRF_COOKIE = "CSRF cookie not set." REASON_BAD_TOKEN = "CSRF token missing or incorrect." REASON_MALFORMED_REFERER = "Referer checking failed - Referer is malformed." REASON_INSECURE_REFERER = "Referer checking failed - Referer is insecure while host is secure." CSRF_SECRET_LENGTH = 32 CSRF_TOKEN_LENGTH = 2 * CSRF_SECRET_LENGTH CSRF_ALLOWED_CHARS = string.ascii_letters + string.digits CSRF_SESSION_KEY = '_csrftoken' def _get_failure_view(): """Return the view to be used for CSRF rejections.""" return get_callable(settings.CSRF_FAILURE_VIEW) def _get_new_csrf_string(): return get_random_string(CSRF_SECRET_LENGTH, allowed_chars=CSRF_ALLOWED_CHARS) def _mask_cipher_secret(secret): """ Given a secret (assumed to be a string of CSRF_ALLOWED_CHARS), generate a token by adding a mask and applying it to the secret. """ mask = _get_new_csrf_string() chars = CSRF_ALLOWED_CHARS pairs = zip((chars.index(x) for x in secret), (chars.index(x) for x in mask)) cipher = ''.join(chars[(x + y) % len(chars)] for x, y in pairs) return mask + cipher def _unmask_cipher_token(token): """ Given a token (assumed to be a string of CSRF_ALLOWED_CHARS, of length CSRF_TOKEN_LENGTH, and that its first half is a mask), use it to decrypt the second half to produce the original secret. """ mask = token[:CSRF_SECRET_LENGTH] token = token[CSRF_SECRET_LENGTH:] chars = CSRF_ALLOWED_CHARS pairs = zip((chars.index(x) for x in token), (chars.index(x) for x in mask)) return ''.join(chars[x - y] for x, y in pairs) # Note negative values are ok def _get_new_csrf_token(): return _mask_cipher_secret(_get_new_csrf_string()) def get_token(request): """ Return the CSRF token required for a POST form. The token is an alphanumeric value. A new token is created if one is not already set. A side effect of calling this function is to make the csrf_protect decorator and the CsrfViewMiddleware add a CSRF cookie and a 'Vary: Cookie' header to the outgoing response. For this reason, you may need to use this function lazily, as is done by the csrf context processor. """ if "CSRF_COOKIE" not in request.META: csrf_secret = _get_new_csrf_string() request.META["CSRF_COOKIE"] = _mask_cipher_secret(csrf_secret) else: csrf_secret = _unmask_cipher_token(request.META["CSRF_COOKIE"]) request.META["CSRF_COOKIE_USED"] = True return _mask_cipher_secret(csrf_secret) def rotate_token(request): """ Change the CSRF token in use for a request - should be done on login for security purposes. """ request.META.update({ "CSRF_COOKIE_USED": True, "CSRF_COOKIE": _get_new_csrf_token(), }) request.csrf_cookie_needs_reset = True def _sanitize_token(token): # Allow only ASCII alphanumerics if token_re.search(token): return _get_new_csrf_token() elif len(token) == CSRF_TOKEN_LENGTH: return token elif len(token) == CSRF_SECRET_LENGTH: # Older Django versions set cookies to values of CSRF_SECRET_LENGTH # alphanumeric characters. For backwards compatibility, accept # such values as unmasked secrets. # It's easier to mask here and be consistent later, rather than add # different code paths in the checks, although that might be a tad more # efficient. return _mask_cipher_secret(token) return _get_new_csrf_token() def _compare_masked_tokens(request_csrf_token, csrf_token): # Assume both arguments are sanitized -- that is, strings of # length CSRF_TOKEN_LENGTH, all CSRF_ALLOWED_CHARS. return constant_time_compare( _unmask_cipher_token(request_csrf_token), _unmask_cipher_token(csrf_token), ) class CsrfViewMiddleware(MiddlewareMixin): """ Require a present and correct csrfmiddlewaretoken for POST requests that have a CSRF cookie, and set an outgoing CSRF cookie. This middleware should be used in conjunction with the {% csrf_token %} template tag. """ @cached_property def csrf_trusted_origins_hosts(self): return [ urlparse(origin).netloc.lstrip('*') for origin in settings.CSRF_TRUSTED_ORIGINS ] @cached_property def allowed_origins_exact(self): return { origin for origin in settings.CSRF_TRUSTED_ORIGINS if '*' not in origin } @cached_property def allowed_origin_subdomains(self): """ A mapping of allowed schemes to list of allowed netlocs, where all subdomains of the netloc are allowed. """ allowed_origin_subdomains = defaultdict(list) for parsed in (urlparse(origin) for origin in settings.CSRF_TRUSTED_ORIGINS if '*' in origin): allowed_origin_subdomains[parsed.scheme].append(parsed.netloc.lstrip('*')) return allowed_origin_subdomains # The _accept and _reject methods currently only exist for the sake of the # requires_csrf_token decorator. def _accept(self, request): # Avoid checking the request twice by adding a custom attribute to # request. This will be relevant when both decorator and middleware # are used. request.csrf_processing_done = True return None def _reject(self, request, reason): response = _get_failure_view()(request, reason=reason) log_response( 'Forbidden (%s): %s', reason, request.path, response=response, request=request, logger=logger, ) return response def _get_token(self, request): if settings.CSRF_USE_SESSIONS: try: return request.session.get(CSRF_SESSION_KEY) except AttributeError: raise ImproperlyConfigured( 'CSRF_USE_SESSIONS is enabled, but request.session is not ' 'set. SessionMiddleware must appear before CsrfViewMiddleware ' 'in MIDDLEWARE.' ) else: try: cookie_token = request.COOKIES[settings.CSRF_COOKIE_NAME] except KeyError: return None csrf_token = _sanitize_token(cookie_token) if csrf_token != cookie_token: # Cookie token needed to be replaced; # the cookie needs to be reset. request.csrf_cookie_needs_reset = True return csrf_token def _set_token(self, request, response): if settings.CSRF_USE_SESSIONS: if request.session.get(CSRF_SESSION_KEY) != request.META['CSRF_COOKIE']: request.session[CSRF_SESSION_KEY] = request.META['CSRF_COOKIE'] else: response.set_cookie( settings.CSRF_COOKIE_NAME, request.META['CSRF_COOKIE'], max_age=settings.CSRF_COOKIE_AGE, domain=settings.CSRF_COOKIE_DOMAIN, path=settings.CSRF_COOKIE_PATH, secure=settings.CSRF_COOKIE_SECURE, httponly=settings.CSRF_COOKIE_HTTPONLY, samesite=settings.CSRF_COOKIE_SAMESITE, ) # Set the Vary header since content varies with the CSRF cookie. patch_vary_headers(response, ('Cookie',)) def _origin_verified(self, request): request_origin = request.META['HTTP_ORIGIN'] try: good_host = request.get_host() except DisallowedHost: pass else: good_origin = '%s://%s' % ( 'https' if request.is_secure() else 'http', good_host, ) if request_origin == good_origin: return True if request_origin in self.allowed_origins_exact: return True try: parsed_origin = urlparse(request_origin) except ValueError: return False request_scheme = parsed_origin.scheme request_netloc = parsed_origin.netloc return any( is_same_domain(request_netloc, host) for host in self.allowed_origin_subdomains.get(request_scheme, ()) ) def process_request(self, request): csrf_token = self._get_token(request) if csrf_token is not None: # Use same token next time. request.META['CSRF_COOKIE'] = csrf_token def process_view(self, request, callback, callback_args, callback_kwargs): if getattr(request, 'csrf_processing_done', False): return None # Wait until request.META["CSRF_COOKIE"] has been manipulated before # bailing out, so that get_token still works if getattr(callback, 'csrf_exempt', False): return None # Assume that anything not defined as 'safe' by RFC7231 needs protection if request.method not in ('GET', 'HEAD', 'OPTIONS', 'TRACE'): if getattr(request, '_dont_enforce_csrf_checks', False): # Mechanism to turn off CSRF checks for test suite. # It comes after the creation of CSRF cookies, so that # everything else continues to work exactly the same # (e.g. cookies are sent, etc.), but before any # branches that call reject(). return self._accept(request) # Reject the request if the Origin header doesn't match an allowed # value. if 'HTTP_ORIGIN' in request.META: if not self._origin_verified(request): return self._reject(request, REASON_BAD_ORIGIN % request.META['HTTP_ORIGIN']) elif request.is_secure(): # If the Origin header wasn't provided, reject HTTPS requests # if the Referer header doesn't match an allowed value. # # Suppose user visits http://example.com/ # An active network attacker (man-in-the-middle, MITM) sends a # POST form that targets https://example.com/detonate-bomb/ and # submits it via JavaScript. # # The attacker will need to provide a CSRF cookie and token, but # that's no problem for a MITM and the session-independent # secret we're using. So the MITM can circumvent the CSRF # protection. This is true for any HTTP connection, but anyone # using HTTPS expects better! For this reason, for # https://example.com/ we need additional protection that treats # http://example.com/ as completely untrusted. Under HTTPS, # Barth et al. found that the Referer header is missing for # same-domain requests in only about 0.2% of cases or less, so # we can use strict Referer checking. referer = request.META.get('HTTP_REFERER') if referer is None: return self._reject(request, REASON_NO_REFERER) try: referer = urlparse(referer) except ValueError: return self._reject(request, REASON_MALFORMED_REFERER) # Make sure we have a valid URL for Referer. if '' in (referer.scheme, referer.netloc): return self._reject(request, REASON_MALFORMED_REFERER) # Ensure that our Referer is also secure. if referer.scheme != 'https': return self._reject(request, REASON_INSECURE_REFERER) good_referer = ( settings.SESSION_COOKIE_DOMAIN if settings.CSRF_USE_SESSIONS else settings.CSRF_COOKIE_DOMAIN ) if good_referer is None: # If no cookie domain is configured, allow matching the # current host:port exactly if it's permitted by # ALLOWED_HOSTS. try: # request.get_host() includes the port. good_referer = request.get_host() except DisallowedHost: pass else: server_port = request.get_port() if server_port not in ('443', '80'): good_referer = '%s:%s' % (good_referer, server_port) # Create an iterable of all acceptable HTTP referers. good_hosts = self.csrf_trusted_origins_hosts if good_referer is not None: good_hosts = (*good_hosts, good_referer) if not any(is_same_domain(referer.netloc, host) for host in good_hosts): reason = REASON_BAD_REFERER % referer.geturl() return self._reject(request, reason) # Access csrf_token via self._get_token() as rotate_token() may # have been called by an authentication middleware during the # process_request() phase. csrf_token = self._get_token(request) if csrf_token is None: # No CSRF cookie. For POST requests, we insist on a CSRF cookie, # and in this way we can avoid all CSRF attacks, including login # CSRF. return self._reject(request, REASON_NO_CSRF_COOKIE) # Check non-cookie token for match. request_csrf_token = "" if request.method == "POST": try: request_csrf_token = request.POST.get('csrfmiddlewaretoken', '') except OSError: # Handle a broken connection before we've completed reading # the POST data. process_view shouldn't raise any # exceptions, so we'll ignore and serve the user a 403 # (assuming they're still listening, which they probably # aren't because of the error). pass if request_csrf_token == "": # Fall back to X-CSRFToken, to make things easier for AJAX, # and possible for PUT/DELETE. request_csrf_token = request.META.get(settings.CSRF_HEADER_NAME, '') request_csrf_token = _sanitize_token(request_csrf_token) if not _compare_masked_tokens(request_csrf_token, csrf_token): return self._reject(request, REASON_BAD_TOKEN) return self._accept(request) def process_response(self, request, response): if not getattr(request, 'csrf_cookie_needs_reset', False): if getattr(response, 'csrf_cookie_set', False): return response if not request.META.get("CSRF_COOKIE_USED", False): return response # Set the CSRF cookie even if it's already set, so we renew # the expiry timer. self._set_token(request, response) response.csrf_cookie_set = True return response
24367c3137a21b241ff67f3b486c11019e3e6b2943076570c28bcc464ddf5ffd
""" HTML Widget classes """ import copy import datetime import warnings from collections import defaultdict from itertools import chain from django.forms.utils import to_current_timezone from django.templatetags.static import static from django.utils import formats from django.utils.datastructures import OrderedSet from django.utils.dates import MONTHS from django.utils.formats import get_format from django.utils.html import format_html, html_safe from django.utils.regex_helper import _lazy_re_compile from django.utils.safestring import mark_safe from django.utils.topological_sort import ( CyclicDependencyError, stable_topological_sort, ) from django.utils.translation import gettext_lazy as _ from .renderers import get_default_renderer __all__ = ( 'Media', 'MediaDefiningClass', 'Widget', 'TextInput', 'NumberInput', 'EmailInput', 'URLInput', 'PasswordInput', 'HiddenInput', 'MultipleHiddenInput', 'FileInput', 'ClearableFileInput', 'Textarea', 'DateInput', 'DateTimeInput', 'TimeInput', 'CheckboxInput', 'Select', 'NullBooleanSelect', 'SelectMultiple', 'RadioSelect', 'CheckboxSelectMultiple', 'MultiWidget', 'SplitDateTimeWidget', 'SplitHiddenDateTimeWidget', 'SelectDateWidget', ) MEDIA_TYPES = ('css', 'js') class MediaOrderConflictWarning(RuntimeWarning): pass @html_safe class Media: def __init__(self, media=None, css=None, js=None): if media is not None: css = getattr(media, 'css', {}) js = getattr(media, 'js', []) else: if css is None: css = {} if js is None: js = [] self._css_lists = [css] self._js_lists = [js] def __repr__(self): return 'Media(css=%r, js=%r)' % (self._css, self._js) def __str__(self): return self.render() @property def _css(self): css = defaultdict(list) for css_list in self._css_lists: for medium, sublist in css_list.items(): css[medium].append(sublist) return {medium: self.merge(*lists) for medium, lists in css.items()} @property def _js(self): return self.merge(*self._js_lists) def render(self): return mark_safe('\n'.join(chain.from_iterable(getattr(self, 'render_' + name)() for name in MEDIA_TYPES))) def render_js(self): return [ format_html( '<script src="{}"></script>', self.absolute_path(path) ) for path in self._js ] def render_css(self): # To keep rendering order consistent, we can't just iterate over items(). # We need to sort the keys, and iterate over the sorted list. media = sorted(self._css) return chain.from_iterable([ format_html( '<link href="{}" type="text/css" media="{}" rel="stylesheet">', self.absolute_path(path), medium ) for path in self._css[medium] ] for medium in media) def absolute_path(self, path): """ Given a relative or absolute path to a static asset, return an absolute path. An absolute path will be returned unchanged while a relative path will be passed to django.templatetags.static.static(). """ if path.startswith(('http://', 'https://', '/')): return path return static(path) def __getitem__(self, name): """Return a Media object that only contains media of the given type.""" if name in MEDIA_TYPES: return Media(**{str(name): getattr(self, '_' + name)}) raise KeyError('Unknown media type "%s"' % name) @staticmethod def merge(*lists): """ Merge lists while trying to keep the relative order of the elements. Warn if the lists have the same elements in a different relative order. For static assets it can be important to have them included in the DOM in a certain order. In JavaScript you may not be able to reference a global or in CSS you might want to override a style. """ dependency_graph = defaultdict(set) all_items = OrderedSet() for list_ in filter(None, lists): head = list_[0] # The first items depend on nothing but have to be part of the # dependency graph to be included in the result. dependency_graph.setdefault(head, set()) for item in list_: all_items.add(item) # No self dependencies if head != item: dependency_graph[item].add(head) head = item try: return stable_topological_sort(all_items, dependency_graph) except CyclicDependencyError: warnings.warn( 'Detected duplicate Media files in an opposite order: {}'.format( ', '.join(repr(list_) for list_ in lists) ), MediaOrderConflictWarning, ) return list(all_items) def __add__(self, other): combined = Media() combined._css_lists = self._css_lists[:] combined._js_lists = self._js_lists[:] for item in other._css_lists: if item and item not in self._css_lists: combined._css_lists.append(item) for item in other._js_lists: if item and item not in self._js_lists: combined._js_lists.append(item) return combined def media_property(cls): def _media(self): # Get the media property of the superclass, if it exists sup_cls = super(cls, self) try: base = sup_cls.media except AttributeError: base = Media() # Get the media definition for this class definition = getattr(cls, 'Media', None) if definition: extend = getattr(definition, 'extend', True) if extend: if extend is True: m = base else: m = Media() for medium in extend: m = m + base[medium] return m + Media(definition) return Media(definition) return base return property(_media) class MediaDefiningClass(type): """ Metaclass for classes that can have media definitions. """ def __new__(mcs, name, bases, attrs): new_class = super().__new__(mcs, name, bases, attrs) if 'media' not in attrs: new_class.media = media_property(new_class) return new_class class Widget(metaclass=MediaDefiningClass): needs_multipart_form = False # Determines does this widget need multipart form is_localized = False is_required = False supports_microseconds = True def __init__(self, attrs=None): self.attrs = {} if attrs is None else attrs.copy() def __deepcopy__(self, memo): obj = copy.copy(self) obj.attrs = self.attrs.copy() memo[id(self)] = obj return obj @property def is_hidden(self): return self.input_type == 'hidden' if hasattr(self, 'input_type') else False def subwidgets(self, name, value, attrs=None): context = self.get_context(name, value, attrs) yield context['widget'] def format_value(self, value): """ Return a value as it should appear when rendered in a template. """ if value == '' or value is None: return None if self.is_localized: return formats.localize_input(value) return str(value) def get_context(self, name, value, attrs): return { 'widget': { 'name': name, 'is_hidden': self.is_hidden, 'required': self.is_required, 'value': self.format_value(value), 'attrs': self.build_attrs(self.attrs, attrs), 'template_name': self.template_name, }, } def render(self, name, value, attrs=None, renderer=None): """Render the widget as an HTML string.""" context = self.get_context(name, value, attrs) return self._render(self.template_name, context, renderer) def _render(self, template_name, context, renderer=None): if renderer is None: renderer = get_default_renderer() return mark_safe(renderer.render(template_name, context)) def build_attrs(self, base_attrs, extra_attrs=None): """Build an attribute dictionary.""" return {**base_attrs, **(extra_attrs or {})} def value_from_datadict(self, data, files, name): """ Given a dictionary of data and this widget's name, return the value of this widget or None if it's not provided. """ return data.get(name) def value_omitted_from_data(self, data, files, name): return name not in data def id_for_label(self, id_): """ Return the HTML ID attribute of this Widget for use by a <label>, given the ID of the field. Return None if no ID is available. This hook is necessary because some widgets have multiple HTML elements and, thus, multiple IDs. In that case, this method should return an ID value that corresponds to the first ID in the widget's tags. """ return id_ def use_required_attribute(self, initial): return not self.is_hidden class Input(Widget): """ Base class for all <input> widgets. """ input_type = None # Subclasses must define this. template_name = 'django/forms/widgets/input.html' def __init__(self, attrs=None): if attrs is not None: attrs = attrs.copy() self.input_type = attrs.pop('type', self.input_type) super().__init__(attrs) def get_context(self, name, value, attrs): context = super().get_context(name, value, attrs) context['widget']['type'] = self.input_type return context class TextInput(Input): input_type = 'text' template_name = 'django/forms/widgets/text.html' class NumberInput(Input): input_type = 'number' template_name = 'django/forms/widgets/number.html' class EmailInput(Input): input_type = 'email' template_name = 'django/forms/widgets/email.html' class URLInput(Input): input_type = 'url' template_name = 'django/forms/widgets/url.html' class PasswordInput(Input): input_type = 'password' template_name = 'django/forms/widgets/password.html' def __init__(self, attrs=None, render_value=False): super().__init__(attrs) self.render_value = render_value def get_context(self, name, value, attrs): if not self.render_value: value = None return super().get_context(name, value, attrs) class HiddenInput(Input): input_type = 'hidden' template_name = 'django/forms/widgets/hidden.html' class MultipleHiddenInput(HiddenInput): """ Handle <input type="hidden"> for fields that have a list of values. """ template_name = 'django/forms/widgets/multiple_hidden.html' def get_context(self, name, value, attrs): context = super().get_context(name, value, attrs) final_attrs = context['widget']['attrs'] id_ = context['widget']['attrs'].get('id') subwidgets = [] for index, value_ in enumerate(context['widget']['value']): widget_attrs = final_attrs.copy() if id_: # An ID attribute was given. Add a numeric index as a suffix # so that the inputs don't all have the same ID attribute. widget_attrs['id'] = '%s_%s' % (id_, index) widget = HiddenInput() widget.is_required = self.is_required subwidgets.append(widget.get_context(name, value_, widget_attrs)['widget']) context['widget']['subwidgets'] = subwidgets return context def value_from_datadict(self, data, files, name): try: getter = data.getlist except AttributeError: getter = data.get return getter(name) def format_value(self, value): return [] if value is None else value class FileInput(Input): input_type = 'file' needs_multipart_form = True template_name = 'django/forms/widgets/file.html' def format_value(self, value): """File input never renders a value.""" return def value_from_datadict(self, data, files, name): "File widgets take data from FILES, not POST" return files.get(name) def value_omitted_from_data(self, data, files, name): return name not in files def use_required_attribute(self, initial): return super().use_required_attribute(initial) and not initial FILE_INPUT_CONTRADICTION = object() class ClearableFileInput(FileInput): clear_checkbox_label = _('Clear') initial_text = _('Currently') input_text = _('Change') template_name = 'django/forms/widgets/clearable_file_input.html' def clear_checkbox_name(self, name): """ Given the name of the file input, return the name of the clear checkbox input. """ return name + '-clear' def clear_checkbox_id(self, name): """ Given the name of the clear checkbox input, return the HTML id for it. """ return name + '_id' def is_initial(self, value): """ Return whether value is considered to be initial value. """ return bool(value and getattr(value, 'url', False)) def format_value(self, value): """ Return the file object if it has a defined url attribute. """ if self.is_initial(value): return value def get_context(self, name, value, attrs): context = super().get_context(name, value, attrs) checkbox_name = self.clear_checkbox_name(name) checkbox_id = self.clear_checkbox_id(checkbox_name) context['widget'].update({ 'checkbox_name': checkbox_name, 'checkbox_id': checkbox_id, 'is_initial': self.is_initial(value), 'input_text': self.input_text, 'initial_text': self.initial_text, 'clear_checkbox_label': self.clear_checkbox_label, }) return context def value_from_datadict(self, data, files, name): upload = super().value_from_datadict(data, files, name) if not self.is_required and CheckboxInput().value_from_datadict( data, files, self.clear_checkbox_name(name)): if upload: # If the user contradicts themselves (uploads a new file AND # checks the "clear" checkbox), we return a unique marker # object that FileField will turn into a ValidationError. return FILE_INPUT_CONTRADICTION # False signals to clear any existing value, as opposed to just None return False return upload def value_omitted_from_data(self, data, files, name): return ( super().value_omitted_from_data(data, files, name) and self.clear_checkbox_name(name) not in data ) class Textarea(Widget): template_name = 'django/forms/widgets/textarea.html' def __init__(self, attrs=None): # Use slightly better defaults than HTML's 20x2 box default_attrs = {'cols': '40', 'rows': '10'} if attrs: default_attrs.update(attrs) super().__init__(default_attrs) class DateTimeBaseInput(TextInput): format_key = '' supports_microseconds = False def __init__(self, attrs=None, format=None): super().__init__(attrs) self.format = format or None def format_value(self, value): return formats.localize_input(value, self.format or formats.get_format(self.format_key)[0]) class DateInput(DateTimeBaseInput): format_key = 'DATE_INPUT_FORMATS' template_name = 'django/forms/widgets/date.html' class DateTimeInput(DateTimeBaseInput): format_key = 'DATETIME_INPUT_FORMATS' template_name = 'django/forms/widgets/datetime.html' class TimeInput(DateTimeBaseInput): format_key = 'TIME_INPUT_FORMATS' template_name = 'django/forms/widgets/time.html' # Defined at module level so that CheckboxInput is picklable (#17976) def boolean_check(v): return not (v is False or v is None or v == '') class CheckboxInput(Input): input_type = 'checkbox' template_name = 'django/forms/widgets/checkbox.html' def __init__(self, attrs=None, check_test=None): super().__init__(attrs) # check_test is a callable that takes a value and returns True # if the checkbox should be checked for that value. self.check_test = boolean_check if check_test is None else check_test def format_value(self, value): """Only return the 'value' attribute if value isn't empty.""" if value is True or value is False or value is None or value == '': return return str(value) def get_context(self, name, value, attrs): if self.check_test(value): attrs = {**(attrs or {}), 'checked': True} return super().get_context(name, value, attrs) def value_from_datadict(self, data, files, name): if name not in data: # A missing value means False because HTML form submission does not # send results for unselected checkboxes. return False value = data.get(name) # Translate true and false strings to boolean values. values = {'true': True, 'false': False} if isinstance(value, str): value = values.get(value.lower(), value) return bool(value) def value_omitted_from_data(self, data, files, name): # HTML checkboxes don't appear in POST data if not checked, so it's # never known if the value is actually omitted. return False class ChoiceWidget(Widget): allow_multiple_selected = False input_type = None template_name = None option_template_name = None add_id_index = True checked_attribute = {'checked': True} option_inherits_attrs = True def __init__(self, attrs=None, choices=()): super().__init__(attrs) # choices can be any iterable, but we may need to render this widget # multiple times. Thus, collapse it into a list so it can be consumed # more than once. self.choices = list(choices) def __deepcopy__(self, memo): obj = copy.copy(self) obj.attrs = self.attrs.copy() obj.choices = copy.copy(self.choices) memo[id(self)] = obj return obj def subwidgets(self, name, value, attrs=None): """ Yield all "subwidgets" of this widget. Used to enable iterating options from a BoundField for choice widgets. """ value = self.format_value(value) yield from self.options(name, value, attrs) def options(self, name, value, attrs=None): """Yield a flat list of options for this widgets.""" for group in self.optgroups(name, value, attrs): yield from group[1] def optgroups(self, name, value, attrs=None): """Return a list of optgroups for this widget.""" groups = [] has_selected = False for index, (option_value, option_label) in enumerate(self.choices): if option_value is None: option_value = '' subgroup = [] if isinstance(option_label, (list, tuple)): group_name = option_value subindex = 0 choices = option_label else: group_name = None subindex = None choices = [(option_value, option_label)] groups.append((group_name, subgroup, index)) for subvalue, sublabel in choices: selected = ( (not has_selected or self.allow_multiple_selected) and str(subvalue) in value ) has_selected |= selected subgroup.append(self.create_option( name, subvalue, sublabel, selected, index, subindex=subindex, attrs=attrs, )) if subindex is not None: subindex += 1 return groups def create_option(self, name, value, label, selected, index, subindex=None, attrs=None): index = str(index) if subindex is None else "%s_%s" % (index, subindex) option_attrs = self.build_attrs(self.attrs, attrs) if self.option_inherits_attrs else {} if selected: option_attrs.update(self.checked_attribute) if 'id' in option_attrs: option_attrs['id'] = self.id_for_label(option_attrs['id'], index) return { 'name': name, 'value': value, 'label': label, 'selected': selected, 'index': index, 'attrs': option_attrs, 'type': self.input_type, 'template_name': self.option_template_name, 'wrap_label': True, } def get_context(self, name, value, attrs): context = super().get_context(name, value, attrs) context['widget']['optgroups'] = self.optgroups(name, context['widget']['value'], attrs) return context def id_for_label(self, id_, index='0'): """ Use an incremented id for each option where the main widget references the zero index. """ if id_ and self.add_id_index: id_ = '%s_%s' % (id_, index) return id_ def value_from_datadict(self, data, files, name): getter = data.get if self.allow_multiple_selected: try: getter = data.getlist except AttributeError: pass return getter(name) def format_value(self, value): """Return selected values as a list.""" if value is None and self.allow_multiple_selected: return [] if not isinstance(value, (tuple, list)): value = [value] return [str(v) if v is not None else '' for v in value] class Select(ChoiceWidget): input_type = 'select' template_name = 'django/forms/widgets/select.html' option_template_name = 'django/forms/widgets/select_option.html' add_id_index = False checked_attribute = {'selected': True} option_inherits_attrs = False def get_context(self, name, value, attrs): context = super().get_context(name, value, attrs) if self.allow_multiple_selected: context['widget']['attrs']['multiple'] = True return context @staticmethod def _choice_has_empty_value(choice): """Return True if the choice's value is empty string or None.""" value, _ = choice return value is None or value == '' def use_required_attribute(self, initial): """ Don't render 'required' if the first <option> has a value, as that's invalid HTML. """ use_required_attribute = super().use_required_attribute(initial) # 'required' is always okay for <select multiple>. if self.allow_multiple_selected: return use_required_attribute first_choice = next(iter(self.choices), None) return use_required_attribute and first_choice is not None and self._choice_has_empty_value(first_choice) class NullBooleanSelect(Select): """ A Select Widget intended to be used with NullBooleanField. """ def __init__(self, attrs=None): choices = ( ('unknown', _('Unknown')), ('true', _('Yes')), ('false', _('No')), ) super().__init__(attrs, choices) def format_value(self, value): try: return { True: 'true', False: 'false', 'true': 'true', 'false': 'false', # For backwards compatibility with Django < 2.2. '2': 'true', '3': 'false', }[value] except KeyError: return 'unknown' def value_from_datadict(self, data, files, name): value = data.get(name) return { True: True, 'True': True, 'False': False, False: False, 'true': True, 'false': False, # For backwards compatibility with Django < 2.2. '2': True, '3': False, }.get(value) class SelectMultiple(Select): allow_multiple_selected = True def value_from_datadict(self, data, files, name): try: getter = data.getlist except AttributeError: getter = data.get return getter(name) def value_omitted_from_data(self, data, files, name): # An unselected <select multiple> doesn't appear in POST data, so it's # never known if the value is actually omitted. return False class RadioSelect(ChoiceWidget): input_type = 'radio' template_name = 'django/forms/widgets/radio.html' option_template_name = 'django/forms/widgets/radio_option.html' class CheckboxSelectMultiple(ChoiceWidget): allow_multiple_selected = True input_type = 'checkbox' template_name = 'django/forms/widgets/checkbox_select.html' option_template_name = 'django/forms/widgets/checkbox_option.html' def use_required_attribute(self, initial): # Don't use the 'required' attribute because browser validation would # require all checkboxes to be checked instead of at least one. return False def value_omitted_from_data(self, data, files, name): # HTML checkboxes don't appear in POST data if not checked, so it's # never known if the value is actually omitted. return False def id_for_label(self, id_, index=None): """ Don't include for="field_0" in <label> because clicking such a label would toggle the first checkbox. """ if index is None: return '' return super().id_for_label(id_, index) class MultiWidget(Widget): """ A widget that is composed of multiple widgets. In addition to the values added by Widget.get_context(), this widget adds a list of subwidgets to the context as widget['subwidgets']. These can be looped over and rendered like normal widgets. You'll probably want to use this class with MultiValueField. """ template_name = 'django/forms/widgets/multiwidget.html' def __init__(self, widgets, attrs=None): if isinstance(widgets, dict): self.widgets_names = [ ('_%s' % name) if name else '' for name in widgets ] widgets = widgets.values() else: self.widgets_names = ['_%s' % i for i in range(len(widgets))] self.widgets = [w() if isinstance(w, type) else w for w in widgets] super().__init__(attrs) @property def is_hidden(self): return all(w.is_hidden for w in self.widgets) def get_context(self, name, value, attrs): context = super().get_context(name, value, attrs) if self.is_localized: for widget in self.widgets: widget.is_localized = self.is_localized # value is a list of values, each corresponding to a widget # in self.widgets. if not isinstance(value, list): value = self.decompress(value) final_attrs = context['widget']['attrs'] input_type = final_attrs.pop('type', None) id_ = final_attrs.get('id') subwidgets = [] for i, (widget_name, widget) in enumerate(zip(self.widgets_names, self.widgets)): if input_type is not None: widget.input_type = input_type widget_name = name + widget_name try: widget_value = value[i] except IndexError: widget_value = None if id_: widget_attrs = final_attrs.copy() widget_attrs['id'] = '%s_%s' % (id_, i) else: widget_attrs = final_attrs subwidgets.append(widget.get_context(widget_name, widget_value, widget_attrs)['widget']) context['widget']['subwidgets'] = subwidgets return context def id_for_label(self, id_): if id_: id_ += '_0' return id_ def value_from_datadict(self, data, files, name): return [ widget.value_from_datadict(data, files, name + widget_name) for widget_name, widget in zip(self.widgets_names, self.widgets) ] def value_omitted_from_data(self, data, files, name): return all( widget.value_omitted_from_data(data, files, name + widget_name) for widget_name, widget in zip(self.widgets_names, self.widgets) ) def decompress(self, value): """ Return a list of decompressed values for the given compressed value. The given value can be assumed to be valid, but not necessarily non-empty. """ raise NotImplementedError('Subclasses must implement this method.') def _get_media(self): """ Media for a multiwidget is the combination of all media of the subwidgets. """ media = Media() for w in self.widgets: media = media + w.media return media media = property(_get_media) def __deepcopy__(self, memo): obj = super().__deepcopy__(memo) obj.widgets = copy.deepcopy(self.widgets) return obj @property def needs_multipart_form(self): return any(w.needs_multipart_form for w in self.widgets) class SplitDateTimeWidget(MultiWidget): """ A widget that splits datetime input into two <input type="text"> boxes. """ supports_microseconds = False template_name = 'django/forms/widgets/splitdatetime.html' def __init__(self, attrs=None, date_format=None, time_format=None, date_attrs=None, time_attrs=None): widgets = ( DateInput( attrs=attrs if date_attrs is None else date_attrs, format=date_format, ), TimeInput( attrs=attrs if time_attrs is None else time_attrs, format=time_format, ), ) super().__init__(widgets) def decompress(self, value): if value: value = to_current_timezone(value) return [value.date(), value.time()] return [None, None] class SplitHiddenDateTimeWidget(SplitDateTimeWidget): """ A widget that splits datetime input into two <input type="hidden"> inputs. """ template_name = 'django/forms/widgets/splithiddendatetime.html' def __init__(self, attrs=None, date_format=None, time_format=None, date_attrs=None, time_attrs=None): super().__init__(attrs, date_format, time_format, date_attrs, time_attrs) for widget in self.widgets: widget.input_type = 'hidden' class SelectDateWidget(Widget): """ A widget that splits date input into three <select> boxes. This also serves as an example of a Widget that has more than one HTML element and hence implements value_from_datadict. """ none_value = ('', '---') month_field = '%s_month' day_field = '%s_day' year_field = '%s_year' template_name = 'django/forms/widgets/select_date.html' input_type = 'select' select_widget = Select date_re = _lazy_re_compile(r'(\d{4}|0)-(\d\d?)-(\d\d?)$') def __init__(self, attrs=None, years=None, months=None, empty_label=None): self.attrs = attrs or {} # Optional list or tuple of years to use in the "year" select box. if years: self.years = years else: this_year = datetime.date.today().year self.years = range(this_year, this_year + 10) # Optional dict of months to use in the "month" select box. if months: self.months = months else: self.months = MONTHS # Optional string, list, or tuple to use as empty_label. if isinstance(empty_label, (list, tuple)): if not len(empty_label) == 3: raise ValueError('empty_label list/tuple must have 3 elements.') self.year_none_value = ('', empty_label[0]) self.month_none_value = ('', empty_label[1]) self.day_none_value = ('', empty_label[2]) else: if empty_label is not None: self.none_value = ('', empty_label) self.year_none_value = self.none_value self.month_none_value = self.none_value self.day_none_value = self.none_value def get_context(self, name, value, attrs): context = super().get_context(name, value, attrs) date_context = {} year_choices = [(i, str(i)) for i in self.years] if not self.is_required: year_choices.insert(0, self.year_none_value) year_name = self.year_field % name date_context['year'] = self.select_widget(attrs, choices=year_choices).get_context( name=year_name, value=context['widget']['value']['year'], attrs={**context['widget']['attrs'], 'id': 'id_%s' % year_name}, ) month_choices = list(self.months.items()) if not self.is_required: month_choices.insert(0, self.month_none_value) month_name = self.month_field % name date_context['month'] = self.select_widget(attrs, choices=month_choices).get_context( name=month_name, value=context['widget']['value']['month'], attrs={**context['widget']['attrs'], 'id': 'id_%s' % month_name}, ) day_choices = [(i, i) for i in range(1, 32)] if not self.is_required: day_choices.insert(0, self.day_none_value) day_name = self.day_field % name date_context['day'] = self.select_widget(attrs, choices=day_choices,).get_context( name=day_name, value=context['widget']['value']['day'], attrs={**context['widget']['attrs'], 'id': 'id_%s' % day_name}, ) subwidgets = [] for field in self._parse_date_fmt(): subwidgets.append(date_context[field]['widget']) context['widget']['subwidgets'] = subwidgets return context def format_value(self, value): """ Return a dict containing the year, month, and day of the current value. Use dict instead of a datetime to allow invalid dates such as February 31 to display correctly. """ year, month, day = None, None, None if isinstance(value, (datetime.date, datetime.datetime)): year, month, day = value.year, value.month, value.day elif isinstance(value, str): match = self.date_re.match(value) if match: # Convert any zeros in the date to empty strings to match the # empty option value. year, month, day = [int(val) or '' for val in match.groups()] else: input_format = get_format('DATE_INPUT_FORMATS')[0] try: d = datetime.datetime.strptime(value, input_format) except ValueError: pass else: year, month, day = d.year, d.month, d.day return {'year': year, 'month': month, 'day': day} @staticmethod def _parse_date_fmt(): fmt = get_format('DATE_FORMAT') escaped = False for char in fmt: if escaped: escaped = False elif char == '\\': escaped = True elif char in 'Yy': yield 'year' elif char in 'bEFMmNn': yield 'month' elif char in 'dj': yield 'day' def id_for_label(self, id_): for first_select in self._parse_date_fmt(): return '%s_%s' % (id_, first_select) return '%s_month' % id_ def value_from_datadict(self, data, files, name): y = data.get(self.year_field % name) m = data.get(self.month_field % name) d = data.get(self.day_field % name) if y == m == d == '': return None if y is not None and m is not None and d is not None: input_format = get_format('DATE_INPUT_FORMATS')[0] input_format = formats.sanitize_strftime_format(input_format) try: date_value = datetime.date(int(y), int(m), int(d)) except ValueError: # Return pseudo-ISO dates with zeros for any unselected values, # e.g. '2017-0-23'. return '%s-%s-%s' % (y or 0, m or 0, d or 0) return date_value.strftime(input_format) return data.get(name) def value_omitted_from_data(self, data, files, name): return not any( ('{}_{}'.format(name, interval) in data) for interval in ('year', 'month', 'day') )
d361ecea65ce03894cd236e0a8433533c2e44bead53ac765bea9839b3e96babc
import ipaddress import re import warnings from pathlib import Path from urllib.parse import urlsplit, urlunsplit from django.core.exceptions import ValidationError from django.utils.deconstruct import deconstructible from django.utils.deprecation import RemovedInDjango41Warning from django.utils.encoding import punycode from django.utils.ipv6 import is_valid_ipv6_address from django.utils.regex_helper import _lazy_re_compile from django.utils.translation import gettext_lazy as _, ngettext_lazy # These values, if given to validate(), will trigger the self.required check. EMPTY_VALUES = (None, '', [], (), {}) @deconstructible class RegexValidator: regex = '' message = _('Enter a valid value.') code = 'invalid' inverse_match = False flags = 0 def __init__(self, regex=None, message=None, code=None, inverse_match=None, flags=None): if regex is not None: self.regex = regex if message is not None: self.message = message if code is not None: self.code = code if inverse_match is not None: self.inverse_match = inverse_match if flags is not None: self.flags = flags if self.flags and not isinstance(self.regex, str): raise TypeError("If the flags are set, regex must be a regular expression string.") self.regex = _lazy_re_compile(self.regex, self.flags) def __call__(self, value): """ Validate that the input contains (or does *not* contain, if inverse_match is True) a match for the regular expression. """ regex_matches = self.regex.search(str(value)) invalid_input = regex_matches if self.inverse_match else not regex_matches if invalid_input: raise ValidationError(self.message, code=self.code, params={'value': value}) def __eq__(self, other): return ( isinstance(other, RegexValidator) and self.regex.pattern == other.regex.pattern and self.regex.flags == other.regex.flags and (self.message == other.message) and (self.code == other.code) and (self.inverse_match == other.inverse_match) ) @deconstructible class URLValidator(RegexValidator): ul = '\u00a1-\uffff' # Unicode letters range (must not be a raw string). # IP patterns ipv4_re = r'(?:25[0-5]|2[0-4]\d|[0-1]?\d?\d)(?:\.(?:25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}' ipv6_re = r'\[[0-9a-f:.]+\]' # (simple regex, validated later) # Host patterns hostname_re = r'[a-z' + ul + r'0-9](?:[a-z' + ul + r'0-9-]{0,61}[a-z' + ul + r'0-9])?' # Max length for domain name labels is 63 characters per RFC 1034 sec. 3.1 domain_re = r'(?:\.(?!-)[a-z' + ul + r'0-9-]{1,63}(?<!-))*' tld_re = ( r'\.' # dot r'(?!-)' # can't start with a dash r'(?:[a-z' + ul + '-]{2,63}' # domain label r'|xn--[a-z0-9]{1,59})' # or punycode label r'(?<!-)' # can't end with a dash r'\.?' # may have a trailing dot ) host_re = '(' + hostname_re + domain_re + tld_re + '|localhost)' regex = _lazy_re_compile( r'^(?:[a-z0-9.+-]*)://' # scheme is validated separately r'(?:[^\s:@/]+(?::[^\s:@/]*)?@)?' # user:pass authentication r'(?:' + ipv4_re + '|' + ipv6_re + '|' + host_re + ')' r'(?::\d{2,5})?' # port r'(?:[/?#][^\s]*)?' # resource path r'\Z', re.IGNORECASE) message = _('Enter a valid URL.') schemes = ['http', 'https', 'ftp', 'ftps'] unsafe_chars = frozenset('\t\r\n') def __init__(self, schemes=None, **kwargs): super().__init__(**kwargs) if schemes is not None: self.schemes = schemes def __call__(self, value): if not isinstance(value, str): raise ValidationError(self.message, code=self.code, params={'value': value}) if self.unsafe_chars.intersection(value): raise ValidationError(self.message, code=self.code, params={'value': value}) # Check if the scheme is valid. scheme = value.split('://')[0].lower() if scheme not in self.schemes: raise ValidationError(self.message, code=self.code, params={'value': value}) # Then check full URL try: super().__call__(value) except ValidationError as e: # Trivial case failed. Try for possible IDN domain if value: try: scheme, netloc, path, query, fragment = urlsplit(value) except ValueError: # for example, "Invalid IPv6 URL" raise ValidationError(self.message, code=self.code, params={'value': value}) try: netloc = punycode(netloc) # IDN -> ACE except UnicodeError: # invalid domain part raise e url = urlunsplit((scheme, netloc, path, query, fragment)) super().__call__(url) else: raise else: # Now verify IPv6 in the netloc part host_match = re.search(r'^\[(.+)\](?::\d{2,5})?$', urlsplit(value).netloc) if host_match: potential_ip = host_match[1] try: validate_ipv6_address(potential_ip) except ValidationError: raise ValidationError(self.message, code=self.code, params={'value': value}) # The maximum length of a full host name is 253 characters per RFC 1034 # section 3.1. It's defined to be 255 bytes or less, but this includes # one byte for the length of the name and one byte for the trailing dot # that's used to indicate absolute names in DNS. if len(urlsplit(value).hostname) > 253: raise ValidationError(self.message, code=self.code, params={'value': value}) integer_validator = RegexValidator( _lazy_re_compile(r'^-?\d+\Z'), message=_('Enter a valid integer.'), code='invalid', ) def validate_integer(value): return integer_validator(value) @deconstructible class EmailValidator: message = _('Enter a valid email address.') code = 'invalid' user_regex = _lazy_re_compile( r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*\Z" # dot-atom r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-\011\013\014\016-\177])*"\Z)', # quoted-string re.IGNORECASE) domain_regex = _lazy_re_compile( # max length for domain name labels is 63 characters per RFC 1034 r'((?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+)(?:[A-Z0-9-]{2,63}(?<!-))\Z', re.IGNORECASE) literal_regex = _lazy_re_compile( # literal form, ipv4 or ipv6 address (SMTP 4.1.3) r'\[([A-f0-9:.]+)\]\Z', re.IGNORECASE) domain_allowlist = ['localhost'] @property def domain_whitelist(self): warnings.warn( 'The domain_whitelist attribute is deprecated in favor of ' 'domain_allowlist.', RemovedInDjango41Warning, stacklevel=2, ) return self.domain_allowlist @domain_whitelist.setter def domain_whitelist(self, allowlist): warnings.warn( 'The domain_whitelist attribute is deprecated in favor of ' 'domain_allowlist.', RemovedInDjango41Warning, stacklevel=2, ) self.domain_allowlist = allowlist def __init__(self, message=None, code=None, allowlist=None, *, whitelist=None): if whitelist is not None: allowlist = whitelist warnings.warn( 'The whitelist argument is deprecated in favor of allowlist.', RemovedInDjango41Warning, stacklevel=2, ) if message is not None: self.message = message if code is not None: self.code = code if allowlist is not None: self.domain_allowlist = allowlist def __call__(self, value): if not value or '@' not in value: raise ValidationError(self.message, code=self.code, params={'value': value}) user_part, domain_part = value.rsplit('@', 1) if not self.user_regex.match(user_part): raise ValidationError(self.message, code=self.code, params={'value': value}) if (domain_part not in self.domain_allowlist and not self.validate_domain_part(domain_part)): # Try for possible IDN domain-part try: domain_part = punycode(domain_part) except UnicodeError: pass else: if self.validate_domain_part(domain_part): return raise ValidationError(self.message, code=self.code, params={'value': value}) def validate_domain_part(self, domain_part): if self.domain_regex.match(domain_part): return True literal_match = self.literal_regex.match(domain_part) if literal_match: ip_address = literal_match[1] try: validate_ipv46_address(ip_address) return True except ValidationError: pass return False def __eq__(self, other): return ( isinstance(other, EmailValidator) and (self.domain_allowlist == other.domain_allowlist) and (self.message == other.message) and (self.code == other.code) ) validate_email = EmailValidator() slug_re = _lazy_re_compile(r'^[-a-zA-Z0-9_]+\Z') validate_slug = RegexValidator( slug_re, # Translators: "letters" means latin letters: a-z and A-Z. _('Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.'), 'invalid' ) slug_unicode_re = _lazy_re_compile(r'^[-\w]+\Z') validate_unicode_slug = RegexValidator( slug_unicode_re, _('Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or hyphens.'), 'invalid' ) def validate_ipv4_address(value): try: ipaddress.IPv4Address(value) except ValueError: raise ValidationError(_('Enter a valid IPv4 address.'), code='invalid', params={'value': value}) def validate_ipv6_address(value): if not is_valid_ipv6_address(value): raise ValidationError(_('Enter a valid IPv6 address.'), code='invalid', params={'value': value}) def validate_ipv46_address(value): try: validate_ipv4_address(value) except ValidationError: try: validate_ipv6_address(value) except ValidationError: raise ValidationError(_('Enter a valid IPv4 or IPv6 address.'), code='invalid', params={'value': value}) ip_address_validator_map = { 'both': ([validate_ipv46_address], _('Enter a valid IPv4 or IPv6 address.')), 'ipv4': ([validate_ipv4_address], _('Enter a valid IPv4 address.')), 'ipv6': ([validate_ipv6_address], _('Enter a valid IPv6 address.')), } def ip_address_validators(protocol, unpack_ipv4): """ Depending on the given parameters, return the appropriate validators for the GenericIPAddressField. """ if protocol != 'both' and unpack_ipv4: raise ValueError( "You can only use `unpack_ipv4` if `protocol` is set to 'both'") try: return ip_address_validator_map[protocol.lower()] except KeyError: raise ValueError("The protocol '%s' is unknown. Supported: %s" % (protocol, list(ip_address_validator_map))) def int_list_validator(sep=',', message=None, code='invalid', allow_negative=False): regexp = _lazy_re_compile(r'^%(neg)s\d+(?:%(sep)s%(neg)s\d+)*\Z' % { 'neg': '(-)?' if allow_negative else '', 'sep': re.escape(sep), }) return RegexValidator(regexp, message=message, code=code) validate_comma_separated_integer_list = int_list_validator( message=_('Enter only digits separated by commas.'), ) @deconstructible class BaseValidator: message = _('Ensure this value is %(limit_value)s (it is %(show_value)s).') code = 'limit_value' def __init__(self, limit_value, message=None): self.limit_value = limit_value if message: self.message = message def __call__(self, value): cleaned = self.clean(value) limit_value = self.limit_value() if callable(self.limit_value) else self.limit_value params = {'limit_value': limit_value, 'show_value': cleaned, 'value': value} if self.compare(cleaned, limit_value): raise ValidationError(self.message, code=self.code, params=params) def __eq__(self, other): if not isinstance(other, self.__class__): return NotImplemented return ( self.limit_value == other.limit_value and self.message == other.message and self.code == other.code ) def compare(self, a, b): return a is not b def clean(self, x): return x @deconstructible class MaxValueValidator(BaseValidator): message = _('Ensure this value is less than or equal to %(limit_value)s.') code = 'max_value' def compare(self, a, b): return a > b @deconstructible class MinValueValidator(BaseValidator): message = _('Ensure this value is greater than or equal to %(limit_value)s.') code = 'min_value' def compare(self, a, b): return a < b @deconstructible class MinLengthValidator(BaseValidator): message = ngettext_lazy( 'Ensure this value has at least %(limit_value)d character (it has %(show_value)d).', 'Ensure this value has at least %(limit_value)d characters (it has %(show_value)d).', 'limit_value') code = 'min_length' def compare(self, a, b): return a < b def clean(self, x): return len(x) @deconstructible class MaxLengthValidator(BaseValidator): message = ngettext_lazy( 'Ensure this value has at most %(limit_value)d character (it has %(show_value)d).', 'Ensure this value has at most %(limit_value)d characters (it has %(show_value)d).', 'limit_value') code = 'max_length' def compare(self, a, b): return a > b def clean(self, x): return len(x) @deconstructible class DecimalValidator: """ Validate that the input does not exceed the maximum number of digits expected, otherwise raise ValidationError. """ messages = { 'invalid': _('Enter a number.'), 'max_digits': ngettext_lazy( 'Ensure that there are no more than %(max)s digit in total.', 'Ensure that there are no more than %(max)s digits in total.', 'max' ), 'max_decimal_places': ngettext_lazy( 'Ensure that there are no more than %(max)s decimal place.', 'Ensure that there are no more than %(max)s decimal places.', 'max' ), 'max_whole_digits': ngettext_lazy( 'Ensure that there are no more than %(max)s digit before the decimal point.', 'Ensure that there are no more than %(max)s digits before the decimal point.', 'max' ), } def __init__(self, max_digits, decimal_places): self.max_digits = max_digits self.decimal_places = decimal_places def __call__(self, value): digit_tuple, exponent = value.as_tuple()[1:] if exponent in {'F', 'n', 'N'}: raise ValidationError(self.messages['invalid'], code='invalid', params={'value': value}) if exponent >= 0: # A positive exponent adds that many trailing zeros. digits = len(digit_tuple) + exponent decimals = 0 else: # If the absolute value of the negative exponent is larger than the # number of digits, then it's the same as the number of digits, # because it'll consume all of the digits in digit_tuple and then # add abs(exponent) - len(digit_tuple) leading zeros after the # decimal point. if abs(exponent) > len(digit_tuple): digits = decimals = abs(exponent) else: digits = len(digit_tuple) decimals = abs(exponent) whole_digits = digits - decimals if self.max_digits is not None and digits > self.max_digits: raise ValidationError( self.messages['max_digits'], code='max_digits', params={'max': self.max_digits, 'value': value}, ) if self.decimal_places is not None and decimals > self.decimal_places: raise ValidationError( self.messages['max_decimal_places'], code='max_decimal_places', params={'max': self.decimal_places, 'value': value}, ) if (self.max_digits is not None and self.decimal_places is not None and whole_digits > (self.max_digits - self.decimal_places)): raise ValidationError( self.messages['max_whole_digits'], code='max_whole_digits', params={'max': (self.max_digits - self.decimal_places), 'value': value}, ) def __eq__(self, other): return ( isinstance(other, self.__class__) and self.max_digits == other.max_digits and self.decimal_places == other.decimal_places ) @deconstructible class FileExtensionValidator: message = _( 'File extension “%(extension)s” is not allowed. ' 'Allowed extensions are: %(allowed_extensions)s.' ) code = 'invalid_extension' def __init__(self, allowed_extensions=None, message=None, code=None): if allowed_extensions is not None: allowed_extensions = [allowed_extension.lower() for allowed_extension in allowed_extensions] self.allowed_extensions = allowed_extensions if message is not None: self.message = message if code is not None: self.code = code def __call__(self, value): extension = Path(value.name).suffix[1:].lower() if self.allowed_extensions is not None and extension not in self.allowed_extensions: raise ValidationError( self.message, code=self.code, params={ 'extension': extension, 'allowed_extensions': ', '.join(self.allowed_extensions), 'value': value, } ) def __eq__(self, other): return ( isinstance(other, self.__class__) and self.allowed_extensions == other.allowed_extensions and self.message == other.message and self.code == other.code ) def get_available_image_extensions(): try: from PIL import Image except ImportError: return [] else: Image.init() return [ext.lower()[1:] for ext in Image.EXTENSION] def validate_image_file_extension(value): return FileExtensionValidator(allowed_extensions=get_available_image_extensions())(value) @deconstructible class ProhibitNullCharactersValidator: """Validate that the string doesn't contain the null character.""" message = _('Null characters are not allowed.') code = 'null_characters_not_allowed' def __init__(self, message=None, code=None): if message is not None: self.message = message if code is not None: self.code = code def __call__(self, value): if '\x00' in str(value): raise ValidationError(self.message, code=self.code, params={'value': value}) def __eq__(self, other): return ( isinstance(other, self.__class__) and self.message == other.message and self.code == other.code )
3a97d51040285eacd98bd7967577275d5c2eda2bef990c6f995b62c214bb3fa5
import datetime import json import mimetypes import os import re import sys import time from collections.abc import Mapping from email.header import Header from http.client import responses from urllib.parse import quote, urlparse from django.conf import settings from django.core import signals, signing from django.core.exceptions import DisallowedRedirect from django.core.serializers.json import DjangoJSONEncoder from django.http.cookie import SimpleCookie from django.utils import timezone from django.utils.datastructures import ( CaseInsensitiveMapping, _destruct_iterable_mapping_values, ) from django.utils.encoding import iri_to_uri from django.utils.http import http_date from django.utils.regex_helper import _lazy_re_compile _charset_from_content_type_re = _lazy_re_compile(r';\s*charset=(?P<charset>[^\s;]+)', re.I) class ResponseHeaders(CaseInsensitiveMapping): def __init__(self, data): """ Populate the initial data using __setitem__ to ensure values are correctly encoded. """ if not isinstance(data, Mapping): data = {k: v for k, v in _destruct_iterable_mapping_values(data)} self._store = {} for header, value in data.items(): self[header] = value def _convert_to_charset(self, value, charset, mime_encode=False): """ Convert headers key/value to ascii/latin-1 native strings. `charset` must be 'ascii' or 'latin-1'. If `mime_encode` is True and `value` can't be represented in the given charset, apply MIME-encoding. """ if not isinstance(value, (bytes, str)): value = str(value) if ( (isinstance(value, bytes) and (b'\n' in value or b'\r' in value)) or (isinstance(value, str) and ('\n' in value or '\r' in value)) ): raise BadHeaderError("Header values can't contain newlines (got %r)" % value) try: if isinstance(value, str): # Ensure string is valid in given charset value.encode(charset) else: # Convert bytestring using given charset value = value.decode(charset) except UnicodeError as e: if mime_encode: value = Header(value, 'utf-8', maxlinelen=sys.maxsize).encode() else: e.reason += ', HTTP response headers must be in %s format' % charset raise return value def __delitem__(self, key): self.pop(key) def __setitem__(self, key, value): key = self._convert_to_charset(key, 'ascii') value = self._convert_to_charset(value, 'latin-1', mime_encode=True) self._store[key.lower()] = (key, value) def pop(self, key, default=None): return self._store.pop(key.lower(), default) def setdefault(self, key, value): if key not in self: self[key] = value class BadHeaderError(ValueError): pass class HttpResponseBase: """ An HTTP response base class with dictionary-accessed headers. This class doesn't handle content. It should not be used directly. Use the HttpResponse and StreamingHttpResponse subclasses instead. """ status_code = 200 def __init__(self, content_type=None, status=None, reason=None, charset=None, headers=None): self.headers = ResponseHeaders(headers or {}) self._charset = charset if content_type and 'Content-Type' in self.headers: raise ValueError( "'headers' must not contain 'Content-Type' when the " "'content_type' parameter is provided." ) if 'Content-Type' not in self.headers: if content_type is None: content_type = 'text/html; charset=%s' % self.charset self.headers['Content-Type'] = content_type self._resource_closers = [] # This parameter is set by the handler. It's necessary to preserve the # historical behavior of request_finished. self._handler_class = None self.cookies = SimpleCookie() self.closed = False if status is not None: try: self.status_code = int(status) except (ValueError, TypeError): raise TypeError('HTTP status code must be an integer.') if not 100 <= self.status_code <= 599: raise ValueError('HTTP status code must be an integer from 100 to 599.') self._reason_phrase = reason @property def reason_phrase(self): if self._reason_phrase is not None: return self._reason_phrase # Leave self._reason_phrase unset in order to use the default # reason phrase for status code. return responses.get(self.status_code, 'Unknown Status Code') @reason_phrase.setter def reason_phrase(self, value): self._reason_phrase = value @property def charset(self): if self._charset is not None: return self._charset content_type = self.get('Content-Type', '') matched = _charset_from_content_type_re.search(content_type) if matched: # Extract the charset and strip its double quotes return matched['charset'].replace('"', '') return settings.DEFAULT_CHARSET @charset.setter def charset(self, value): self._charset = value def serialize_headers(self): """HTTP headers as a bytestring.""" def to_bytes(val, encoding): return val if isinstance(val, bytes) else val.encode(encoding) headers = [ (to_bytes(key, 'ascii') + b': ' + to_bytes(value, 'latin-1')) for key, value in self.headers.items() ] return b'\r\n'.join(headers) __bytes__ = serialize_headers @property def _content_type_for_repr(self): return ', "%s"' % self.headers['Content-Type'] if 'Content-Type' in self.headers else '' def __setitem__(self, header, value): self.headers[header] = value def __delitem__(self, header): del self.headers[header] def __getitem__(self, header): return self.headers[header] def has_header(self, header): """Case-insensitive check for a header.""" return header in self.headers __contains__ = has_header def items(self): return self.headers.items() def get(self, header, alternate=None): return self.headers.get(header, alternate) def set_cookie(self, key, value='', max_age=None, expires=None, path='/', domain=None, secure=False, httponly=False, samesite=None): """ Set a cookie. ``expires`` can be: - a string in the correct format, - a naive ``datetime.datetime`` object in UTC, - an aware ``datetime.datetime`` object in any time zone. If it is a ``datetime.datetime`` object then calculate ``max_age``. """ self.cookies[key] = value if expires is not None: if isinstance(expires, datetime.datetime): if timezone.is_naive(expires): expires = timezone.make_aware(expires, timezone.utc) delta = expires - datetime.datetime.now(tz=timezone.utc) # Add one second so the date matches exactly (a fraction of # time gets lost between converting to a timedelta and # then the date string). delta = delta + datetime.timedelta(seconds=1) # Just set max_age - the max_age logic will set expires. expires = None max_age = max(0, delta.days * 86400 + delta.seconds) else: self.cookies[key]['expires'] = expires else: self.cookies[key]['expires'] = '' if max_age is not None: self.cookies[key]['max-age'] = int(max_age) # IE requires expires, so set it if hasn't been already. if not expires: self.cookies[key]['expires'] = http_date(time.time() + max_age) if path is not None: self.cookies[key]['path'] = path if domain is not None: self.cookies[key]['domain'] = domain if secure: self.cookies[key]['secure'] = True if httponly: self.cookies[key]['httponly'] = True if samesite: if samesite.lower() not in ('lax', 'none', 'strict'): raise ValueError('samesite must be "lax", "none", or "strict".') self.cookies[key]['samesite'] = samesite def setdefault(self, key, value): """Set a header unless it has already been set.""" self.headers.setdefault(key, value) def set_signed_cookie(self, key, value, salt='', **kwargs): value = signing.get_cookie_signer(salt=key + salt).sign(value) return self.set_cookie(key, value, **kwargs) def delete_cookie(self, key, path='/', domain=None, samesite=None): # Browsers can ignore the Set-Cookie header if the cookie doesn't use # the secure flag and: # - the cookie name starts with "__Host-" or "__Secure-", or # - the samesite is "none". secure = ( key.startswith(('__Secure-', '__Host-')) or (samesite and samesite.lower() == 'none') ) self.set_cookie( key, max_age=0, path=path, domain=domain, secure=secure, expires='Thu, 01 Jan 1970 00:00:00 GMT', samesite=samesite, ) # Common methods used by subclasses def make_bytes(self, value): """Turn a value into a bytestring encoded in the output charset.""" # Per PEP 3333, this response body must be bytes. To avoid returning # an instance of a subclass, this function returns `bytes(value)`. # This doesn't make a copy when `value` already contains bytes. # Handle string types -- we can't rely on force_bytes here because: # - Python attempts str conversion first # - when self._charset != 'utf-8' it re-encodes the content if isinstance(value, (bytes, memoryview)): return bytes(value) if isinstance(value, str): return bytes(value.encode(self.charset)) # Handle non-string types. return str(value).encode(self.charset) # These methods partially implement the file-like object interface. # See https://docs.python.org/library/io.html#io.IOBase # The WSGI server must call this method upon completion of the request. # See http://blog.dscpl.com.au/2012/10/obligations-for-calling-close-on.html def close(self): for closer in self._resource_closers: try: closer() except Exception: pass # Free resources that were still referenced. self._resource_closers.clear() self.closed = True signals.request_finished.send(sender=self._handler_class) def write(self, content): raise OSError('This %s instance is not writable' % self.__class__.__name__) def flush(self): pass def tell(self): raise OSError('This %s instance cannot tell its position' % self.__class__.__name__) # These methods partially implement a stream-like object interface. # See https://docs.python.org/library/io.html#io.IOBase def readable(self): return False def seekable(self): return False def writable(self): return False def writelines(self, lines): raise OSError('This %s instance is not writable' % self.__class__.__name__) class HttpResponse(HttpResponseBase): """ An HTTP response class with a string as content. This content can be read, appended to, or replaced. """ streaming = False def __init__(self, content=b'', *args, **kwargs): super().__init__(*args, **kwargs) # Content is a bytestring. See the `content` property methods. self.content = content def __repr__(self): return '<%(cls)s status_code=%(status_code)d%(content_type)s>' % { 'cls': self.__class__.__name__, 'status_code': self.status_code, 'content_type': self._content_type_for_repr, } def serialize(self): """Full HTTP message, including headers, as a bytestring.""" return self.serialize_headers() + b'\r\n\r\n' + self.content __bytes__ = serialize @property def content(self): return b''.join(self._container) @content.setter def content(self, value): # Consume iterators upon assignment to allow repeated iteration. if ( hasattr(value, '__iter__') and not isinstance(value, (bytes, memoryview, str)) ): content = b''.join(self.make_bytes(chunk) for chunk in value) if hasattr(value, 'close'): try: value.close() except Exception: pass else: content = self.make_bytes(value) # Create a list of properly encoded bytestrings to support write(). self._container = [content] def __iter__(self): return iter(self._container) def write(self, content): self._container.append(self.make_bytes(content)) def tell(self): return len(self.content) def getvalue(self): return self.content def writable(self): return True def writelines(self, lines): for line in lines: self.write(line) class StreamingHttpResponse(HttpResponseBase): """ A streaming HTTP response class with an iterator as content. This should only be iterated once, when the response is streamed to the client. However, it can be appended to or replaced with a new iterator that wraps the original content (or yields entirely new content). """ streaming = True def __init__(self, streaming_content=(), *args, **kwargs): super().__init__(*args, **kwargs) # `streaming_content` should be an iterable of bytestrings. # See the `streaming_content` property methods. self.streaming_content = streaming_content @property def content(self): raise AttributeError( "This %s instance has no `content` attribute. Use " "`streaming_content` instead." % self.__class__.__name__ ) @property def streaming_content(self): return map(self.make_bytes, self._iterator) @streaming_content.setter def streaming_content(self, value): self._set_streaming_content(value) def _set_streaming_content(self, value): # Ensure we can never iterate on "value" more than once. self._iterator = iter(value) if hasattr(value, 'close'): self._resource_closers.append(value.close) def __iter__(self): return self.streaming_content def getvalue(self): return b''.join(self.streaming_content) class FileResponse(StreamingHttpResponse): """ A streaming HTTP response class optimized for files. """ block_size = 4096 def __init__(self, *args, as_attachment=False, filename='', **kwargs): self.as_attachment = as_attachment self.filename = filename super().__init__(*args, **kwargs) def _set_streaming_content(self, value): if not hasattr(value, 'read'): self.file_to_stream = None return super()._set_streaming_content(value) self.file_to_stream = filelike = value if hasattr(filelike, 'close'): self._resource_closers.append(filelike.close) value = iter(lambda: filelike.read(self.block_size), b'') self.set_headers(filelike) super()._set_streaming_content(value) def set_headers(self, filelike): """ Set some common response headers (Content-Length, Content-Type, and Content-Disposition) based on the `filelike` response content. """ encoding_map = { 'bzip2': 'application/x-bzip', 'gzip': 'application/gzip', 'xz': 'application/x-xz', } filename = getattr(filelike, 'name', None) filename = filename if (isinstance(filename, str) and filename) else self.filename if os.path.isabs(filename): self.headers['Content-Length'] = os.path.getsize(filelike.name) elif hasattr(filelike, 'getbuffer'): self.headers['Content-Length'] = filelike.getbuffer().nbytes if self.headers.get('Content-Type', '').startswith('text/html'): if filename: content_type, encoding = mimetypes.guess_type(filename) # Encoding isn't set to prevent browsers from automatically # uncompressing files. content_type = encoding_map.get(encoding, content_type) self.headers['Content-Type'] = content_type or 'application/octet-stream' else: self.headers['Content-Type'] = 'application/octet-stream' filename = self.filename or os.path.basename(filename) if filename: disposition = 'attachment' if self.as_attachment else 'inline' try: filename.encode('ascii') file_expr = 'filename="{}"'.format(filename) except UnicodeEncodeError: file_expr = "filename*=utf-8''{}".format(quote(filename)) self.headers['Content-Disposition'] = '{}; {}'.format(disposition, file_expr) elif self.as_attachment: self.headers['Content-Disposition'] = 'attachment' class HttpResponseRedirectBase(HttpResponse): allowed_schemes = ['http', 'https', 'ftp'] def __init__(self, redirect_to, *args, **kwargs): super().__init__(*args, **kwargs) self['Location'] = iri_to_uri(redirect_to) parsed = urlparse(str(redirect_to)) if parsed.scheme and parsed.scheme not in self.allowed_schemes: raise DisallowedRedirect("Unsafe redirect to URL with protocol '%s'" % parsed.scheme) url = property(lambda self: self['Location']) def __repr__(self): return '<%(cls)s status_code=%(status_code)d%(content_type)s, url="%(url)s">' % { 'cls': self.__class__.__name__, 'status_code': self.status_code, 'content_type': self._content_type_for_repr, 'url': self.url, } class HttpResponseRedirect(HttpResponseRedirectBase): status_code = 302 class HttpResponsePermanentRedirect(HttpResponseRedirectBase): status_code = 301 class HttpResponseNotModified(HttpResponse): status_code = 304 def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) del self['content-type'] @HttpResponse.content.setter def content(self, value): if value: raise AttributeError("You cannot set content to a 304 (Not Modified) response") self._container = [] class HttpResponseBadRequest(HttpResponse): status_code = 400 class HttpResponseNotFound(HttpResponse): status_code = 404 class HttpResponseForbidden(HttpResponse): status_code = 403 class HttpResponseNotAllowed(HttpResponse): status_code = 405 def __init__(self, permitted_methods, *args, **kwargs): super().__init__(*args, **kwargs) self['Allow'] = ', '.join(permitted_methods) def __repr__(self): return '<%(cls)s [%(methods)s] status_code=%(status_code)d%(content_type)s>' % { 'cls': self.__class__.__name__, 'status_code': self.status_code, 'content_type': self._content_type_for_repr, 'methods': self['Allow'], } class HttpResponseGone(HttpResponse): status_code = 410 class HttpResponseServerError(HttpResponse): status_code = 500 class Http404(Exception): pass class JsonResponse(HttpResponse): """ An HTTP response class that consumes data to be serialized to JSON. :param data: Data to be dumped into json. By default only ``dict`` objects are allowed to be passed due to a security flaw before ECMAScript 5. See the ``safe`` parameter for more information. :param encoder: Should be a json encoder class. Defaults to ``django.core.serializers.json.DjangoJSONEncoder``. :param safe: Controls if only ``dict`` objects may be serialized. Defaults to ``True``. :param json_dumps_params: A dictionary of kwargs passed to json.dumps(). """ def __init__(self, data, encoder=DjangoJSONEncoder, safe=True, json_dumps_params=None, **kwargs): if safe and not isinstance(data, dict): raise TypeError( 'In order to allow non-dict objects to be serialized set the ' 'safe parameter to False.' ) if json_dumps_params is None: json_dumps_params = {} kwargs.setdefault('content_type', 'application/json') data = json.dumps(data, cls=encoder, **json_dumps_params) super().__init__(content=data, **kwargs)
9621721d8e4b886eeb9f96d9940e4ba80d79668f52555a90d6f6ce4d43b8f574
from urllib.parse import quote, urljoin from django import template from django.apps import apps from django.utils.encoding import iri_to_uri from django.utils.html import conditional_escape register = template.Library() class PrefixNode(template.Node): def __repr__(self): return "<PrefixNode for %r>" % self.name def __init__(self, varname=None, name=None): if name is None: raise template.TemplateSyntaxError( "Prefix nodes must be given a name to return.") self.varname = varname self.name = name @classmethod def handle_token(cls, parser, token, name): """ Class method to parse prefix node and return a Node. """ # token.split_contents() isn't useful here because tags using this method don't accept variable as arguments tokens = token.contents.split() if len(tokens) > 1 and tokens[1] != 'as': raise template.TemplateSyntaxError( "First argument in '%s' must be 'as'" % tokens[0]) if len(tokens) > 1: varname = tokens[2] else: varname = None return cls(varname, name) @classmethod def handle_simple(cls, name): try: from django.conf import settings except ImportError: prefix = '' else: prefix = iri_to_uri(getattr(settings, name, '')) return prefix def render(self, context): prefix = self.handle_simple(self.name) if self.varname is None: return prefix context[self.varname] = prefix return '' @register.tag def get_static_prefix(parser, token): """ Populate a template variable with the static prefix, ``settings.STATIC_URL``. Usage:: {% get_static_prefix [as varname] %} Examples:: {% get_static_prefix %} {% get_static_prefix as static_prefix %} """ return PrefixNode.handle_token(parser, token, "STATIC_URL") @register.tag def get_media_prefix(parser, token): """ Populate a template variable with the media prefix, ``settings.MEDIA_URL``. Usage:: {% get_media_prefix [as varname] %} Examples:: {% get_media_prefix %} {% get_media_prefix as media_prefix %} """ return PrefixNode.handle_token(parser, token, "MEDIA_URL") class StaticNode(template.Node): def __init__(self, varname=None, path=None): if path is None: raise template.TemplateSyntaxError( "Static template nodes must be given a path to return.") self.path = path self.varname = varname def __repr__(self): return ( f'{self.__class__.__name__}(varname={self.varname!r}, path={self.path!r})' ) def url(self, context): path = self.path.resolve(context) return self.handle_simple(path) def render(self, context): url = self.url(context) if context.autoescape: url = conditional_escape(url) if self.varname is None: return url context[self.varname] = url return '' @classmethod def handle_simple(cls, path): if apps.is_installed('django.contrib.staticfiles'): from django.contrib.staticfiles.storage import staticfiles_storage return staticfiles_storage.url(path) else: return urljoin(PrefixNode.handle_simple("STATIC_URL"), quote(path)) @classmethod def handle_token(cls, parser, token): """ Class method to parse prefix node and return a Node. """ bits = token.split_contents() if len(bits) < 2: raise template.TemplateSyntaxError( "'%s' takes at least one argument (path to file)" % bits[0]) path = parser.compile_filter(bits[1]) if len(bits) >= 2 and bits[-2] == 'as': varname = bits[3] else: varname = None return cls(varname, path) @register.tag('static') def do_static(parser, token): """ Join the given path with the STATIC_URL setting. Usage:: {% static path [as varname] %} Examples:: {% static "myapp/css/base.css" %} {% static variable_with_path %} {% static "myapp/css/base.css" as admin_base_css %} {% static variable_with_path as varname %} """ return StaticNode.handle_token(parser, token) def static(path): """ Given a relative path to a static asset, return the absolute path to the asset. """ return StaticNode.handle_simple(path)
b365b9d601bbb504c79dd5a1645784b22ca9dd17f66defe9a2269fd7de53091a
import copy from collections import defaultdict from contextlib import contextmanager from functools import partial from django.apps import AppConfig from django.apps.registry import Apps, apps as global_apps from django.conf import settings from django.db import models from django.db.models.fields.related import RECURSIVE_RELATIONSHIP_CONSTANT from django.db.models.options import DEFAULT_NAMES, normalize_together from django.db.models.utils import make_model_tuple from django.utils.functional import cached_property from django.utils.module_loading import import_string from django.utils.version import get_docs_version from .exceptions import InvalidBasesError from .utils import resolve_relation def _get_app_label_and_model_name(model, app_label=''): if isinstance(model, str): split = model.split('.', 1) return tuple(split) if len(split) == 2 else (app_label, split[0]) else: return model._meta.app_label, model._meta.model_name def _get_related_models(m): """Return all models that have a direct relationship to the given model.""" related_models = [ subclass for subclass in m.__subclasses__() if issubclass(subclass, models.Model) ] related_fields_models = set() for f in m._meta.get_fields(include_parents=True, include_hidden=True): if f.is_relation and f.related_model is not None and not isinstance(f.related_model, str): related_fields_models.add(f.model) related_models.append(f.related_model) # Reverse accessors of foreign keys to proxy models are attached to their # concrete proxied model. opts = m._meta if opts.proxy and m in related_fields_models: related_models.append(opts.concrete_model) return related_models def get_related_models_tuples(model): """ Return a list of typical (app_label, model_name) tuples for all related models for the given model. """ return { (rel_mod._meta.app_label, rel_mod._meta.model_name) for rel_mod in _get_related_models(model) } def get_related_models_recursive(model): """ Return all models that have a direct or indirect relationship to the given model. Relationships are either defined by explicit relational fields, like ForeignKey, ManyToManyField or OneToOneField, or by inheriting from another model (a superclass is related to its subclasses, but not vice versa). Note, however, that a model inheriting from a concrete model is also related to its superclass through the implicit *_ptr OneToOneField on the subclass. """ seen = set() queue = _get_related_models(model) for rel_mod in queue: rel_app_label, rel_model_name = rel_mod._meta.app_label, rel_mod._meta.model_name if (rel_app_label, rel_model_name) in seen: continue seen.add((rel_app_label, rel_model_name)) queue.extend(_get_related_models(rel_mod)) return seen - {(model._meta.app_label, model._meta.model_name)} class ProjectState: """ Represent the entire project's overall state. This is the item that is passed around - do it here rather than at the app level so that cross-app FKs/etc. resolve properly. """ def __init__(self, models=None, real_apps=None): self.models = models or {} # Apps to include from main registry, usually unmigrated ones self.real_apps = real_apps or [] self.is_delayed = False # {remote_model_key: {model_key: [(field_name, field)]}} self.relations = None def add_model(self, model_state): app_label, model_name = model_state.app_label, model_state.name_lower self.models[(app_label, model_name)] = model_state if 'apps' in self.__dict__: # hasattr would cache the property self.reload_model(app_label, model_name) def remove_model(self, app_label, model_name): del self.models[app_label, model_name] if 'apps' in self.__dict__: # hasattr would cache the property self.apps.unregister_model(app_label, model_name) # Need to do this explicitly since unregister_model() doesn't clear # the cache automatically (#24513) self.apps.clear_cache() def _find_reload_model(self, app_label, model_name, delay=False): if delay: self.is_delayed = True related_models = set() try: old_model = self.apps.get_model(app_label, model_name) except LookupError: pass else: # Get all relations to and from the old model before reloading, # as _meta.apps may change if delay: related_models = get_related_models_tuples(old_model) else: related_models = get_related_models_recursive(old_model) # Get all outgoing references from the model to be rendered model_state = self.models[(app_label, model_name)] # Directly related models are the models pointed to by ForeignKeys, # OneToOneFields, and ManyToManyFields. direct_related_models = set() for field in model_state.fields.values(): if field.is_relation: if field.remote_field.model == RECURSIVE_RELATIONSHIP_CONSTANT: continue rel_app_label, rel_model_name = _get_app_label_and_model_name(field.related_model, app_label) direct_related_models.add((rel_app_label, rel_model_name.lower())) # For all direct related models recursively get all related models. related_models.update(direct_related_models) for rel_app_label, rel_model_name in direct_related_models: try: rel_model = self.apps.get_model(rel_app_label, rel_model_name) except LookupError: pass else: if delay: related_models.update(get_related_models_tuples(rel_model)) else: related_models.update(get_related_models_recursive(rel_model)) # Include the model itself related_models.add((app_label, model_name)) return related_models def reload_model(self, app_label, model_name, delay=False): if 'apps' in self.__dict__: # hasattr would cache the property related_models = self._find_reload_model(app_label, model_name, delay) self._reload(related_models) def reload_models(self, models, delay=True): if 'apps' in self.__dict__: # hasattr would cache the property related_models = set() for app_label, model_name in models: related_models.update(self._find_reload_model(app_label, model_name, delay)) self._reload(related_models) def _reload(self, related_models): # Unregister all related models with self.apps.bulk_update(): for rel_app_label, rel_model_name in related_models: self.apps.unregister_model(rel_app_label, rel_model_name) states_to_be_rendered = [] # Gather all models states of those models that will be rerendered. # This includes: # 1. All related models of unmigrated apps for model_state in self.apps.real_models: if (model_state.app_label, model_state.name_lower) in related_models: states_to_be_rendered.append(model_state) # 2. All related models of migrated apps for rel_app_label, rel_model_name in related_models: try: model_state = self.models[rel_app_label, rel_model_name] except KeyError: pass else: states_to_be_rendered.append(model_state) # Render all models self.apps.render_multiple(states_to_be_rendered) def resolve_fields_and_relations(self): # Resolve fields. for model_state in self.models.values(): for field_name, field in model_state.fields.items(): field.name = field_name # Resolve relations. # {remote_model_key: {model_key: [(field_name, field)]}} self.relations = defaultdict(partial(defaultdict, list)) concretes, proxies = self._get_concrete_models_mapping_and_proxy_models() real_apps = set(self.real_apps) for model_key in concretes: model_state = self.models[model_key] for field_name, field in model_state.fields.items(): remote_field = field.remote_field if not remote_field: continue remote_model_key = resolve_relation(remote_field.model, *model_key) if remote_model_key[0] not in real_apps and remote_model_key in concretes: remote_model_key = concretes[remote_model_key] self.relations[remote_model_key][model_key].append((field_name, field)) through = getattr(remote_field, 'through', None) if not through: continue through_model_key = resolve_relation(through, *model_key) if through_model_key[0] not in real_apps and through_model_key in concretes: through_model_key = concretes[through_model_key] self.relations[through_model_key][model_key].append((field_name, field)) for model_key in proxies: self.relations[model_key] = self.relations[concretes[model_key]] def get_concrete_model_key(self, model): concrete_models_mapping, _ = self._get_concrete_models_mapping_and_proxy_models() model_key = make_model_tuple(model) return concrete_models_mapping[model_key] def _get_concrete_models_mapping_and_proxy_models(self): concrete_models_mapping = {} proxy_models = {} # Split models to proxy and concrete models. for model_key, model_state in self.models.items(): if model_state.options.get('proxy'): proxy_models[model_key] = model_state # Find a concrete model for the proxy. concrete_models_mapping[model_key] = self._find_concrete_model_from_proxy( proxy_models, model_state, ) else: concrete_models_mapping[model_key] = model_key return concrete_models_mapping, proxy_models def _find_concrete_model_from_proxy(self, proxy_models, model_state): for base in model_state.bases: base_key = make_model_tuple(base) base_state = proxy_models.get(base_key) if not base_state: # Concrete model found, stop looking at bases. return base_key return self._find_concrete_model_from_proxy(proxy_models, base_state) def clone(self): """Return an exact copy of this ProjectState.""" new_state = ProjectState( models={k: v.clone() for k, v in self.models.items()}, real_apps=self.real_apps, ) if 'apps' in self.__dict__: new_state.apps = self.apps.clone() new_state.is_delayed = self.is_delayed return new_state def clear_delayed_apps_cache(self): if self.is_delayed and 'apps' in self.__dict__: del self.__dict__['apps'] @cached_property def apps(self): return StateApps(self.real_apps, self.models) @classmethod def from_apps(cls, apps): """Take an Apps and return a ProjectState matching it.""" app_models = {} for model in apps.get_models(include_swapped=True): model_state = ModelState.from_model(model) app_models[(model_state.app_label, model_state.name_lower)] = model_state return cls(app_models) def __eq__(self, other): return self.models == other.models and set(self.real_apps) == set(other.real_apps) class AppConfigStub(AppConfig): """Stub of an AppConfig. Only provides a label and a dict of models.""" def __init__(self, label): self.apps = None self.models = {} # App-label and app-name are not the same thing, so technically passing # in the label here is wrong. In practice, migrations don't care about # the app name, but we need something unique, and the label works fine. self.label = label self.name = label def import_models(self): self.models = self.apps.all_models[self.label] class StateApps(Apps): """ Subclass of the global Apps registry class to better handle dynamic model additions and removals. """ def __init__(self, real_apps, models, ignore_swappable=False): # Any apps in self.real_apps should have all their models included # in the render. We don't use the original model instances as there # are some variables that refer to the Apps object. # FKs/M2Ms from real apps are also not included as they just # mess things up with partial states (due to lack of dependencies) self.real_models = [] for app_label in real_apps: app = global_apps.get_app_config(app_label) for model in app.get_models(): self.real_models.append(ModelState.from_model(model, exclude_rels=True)) # Populate the app registry with a stub for each application. app_labels = {model_state.app_label for model_state in models.values()} app_configs = [AppConfigStub(label) for label in sorted([*real_apps, *app_labels])] super().__init__(app_configs) # These locks get in the way of copying as implemented in clone(), # which is called whenever Django duplicates a StateApps before # updating it. self._lock = None self.ready_event = None self.render_multiple([*models.values(), *self.real_models]) # There shouldn't be any operations pending at this point. from django.core.checks.model_checks import _check_lazy_references ignore = {make_model_tuple(settings.AUTH_USER_MODEL)} if ignore_swappable else set() errors = _check_lazy_references(self, ignore=ignore) if errors: raise ValueError("\n".join(error.msg for error in errors)) @contextmanager def bulk_update(self): # Avoid clearing each model's cache for each change. Instead, clear # all caches when we're finished updating the model instances. ready = self.ready self.ready = False try: yield finally: self.ready = ready self.clear_cache() def render_multiple(self, model_states): # We keep trying to render the models in a loop, ignoring invalid # base errors, until the size of the unrendered models doesn't # decrease by at least one, meaning there's a base dependency loop/ # missing base. if not model_states: return # Prevent that all model caches are expired for each render. with self.bulk_update(): unrendered_models = model_states while unrendered_models: new_unrendered_models = [] for model in unrendered_models: try: model.render(self) except InvalidBasesError: new_unrendered_models.append(model) if len(new_unrendered_models) == len(unrendered_models): raise InvalidBasesError( "Cannot resolve bases for %r\nThis can happen if you are inheriting models from an " "app with migrations (e.g. contrib.auth)\n in an app with no migrations; see " "https://docs.djangoproject.com/en/%s/topics/migrations/#dependencies " "for more" % (new_unrendered_models, get_docs_version()) ) unrendered_models = new_unrendered_models def clone(self): """Return a clone of this registry.""" clone = StateApps([], {}) clone.all_models = copy.deepcopy(self.all_models) clone.app_configs = copy.deepcopy(self.app_configs) # Set the pointer to the correct app registry. for app_config in clone.app_configs.values(): app_config.apps = clone # No need to actually clone them, they'll never change clone.real_models = self.real_models return clone def register_model(self, app_label, model): self.all_models[app_label][model._meta.model_name] = model if app_label not in self.app_configs: self.app_configs[app_label] = AppConfigStub(app_label) self.app_configs[app_label].apps = self self.app_configs[app_label].models[model._meta.model_name] = model self.do_pending_operations(model) self.clear_cache() def unregister_model(self, app_label, model_name): try: del self.all_models[app_label][model_name] del self.app_configs[app_label].models[model_name] except KeyError: pass class ModelState: """ Represent a Django Model. Don't use the actual Model class as it's not designed to have its options changed - instead, mutate this one and then render it into a Model as required. Note that while you are allowed to mutate .fields, you are not allowed to mutate the Field instances inside there themselves - you must instead assign new ones, as these are not detached during a clone. """ def __init__(self, app_label, name, fields, options=None, bases=None, managers=None): self.app_label = app_label self.name = name self.fields = dict(fields) self.options = options or {} self.options.setdefault('indexes', []) self.options.setdefault('constraints', []) self.bases = bases or (models.Model,) self.managers = managers or [] for name, field in self.fields.items(): # Sanity-check that fields are NOT already bound to a model. if hasattr(field, 'model'): raise ValueError( 'ModelState.fields cannot be bound to a model - "%s" is.' % name ) # Sanity-check that relation fields are NOT referring to a model class. if field.is_relation and hasattr(field.related_model, '_meta'): raise ValueError( 'ModelState.fields cannot refer to a model class - "%s.to" does. ' 'Use a string reference instead.' % name ) if field.many_to_many and hasattr(field.remote_field.through, '_meta'): raise ValueError( 'ModelState.fields cannot refer to a model class - "%s.through" does. ' 'Use a string reference instead.' % name ) # Sanity-check that indexes have their name set. for index in self.options['indexes']: if not index.name: raise ValueError( "Indexes passed to ModelState require a name attribute. " "%r doesn't have one." % index ) @cached_property def name_lower(self): return self.name.lower() def get_field(self, field_name): field_name = ( self.options['order_with_respect_to'] if field_name == '_order' else field_name ) return self.fields[field_name] @classmethod def from_model(cls, model, exclude_rels=False): """Given a model, return a ModelState representing it.""" # Deconstruct the fields fields = [] for field in model._meta.local_fields: if getattr(field, "remote_field", None) and exclude_rels: continue if isinstance(field, models.OrderWrt): continue name = field.name try: fields.append((name, field.clone())) except TypeError as e: raise TypeError("Couldn't reconstruct field %s on %s: %s" % ( name, model._meta.label, e, )) if not exclude_rels: for field in model._meta.local_many_to_many: name = field.name try: fields.append((name, field.clone())) except TypeError as e: raise TypeError("Couldn't reconstruct m2m field %s on %s: %s" % ( name, model._meta.object_name, e, )) # Extract the options options = {} for name in DEFAULT_NAMES: # Ignore some special options if name in ["apps", "app_label"]: continue elif name in model._meta.original_attrs: if name == "unique_together": ut = model._meta.original_attrs["unique_together"] options[name] = set(normalize_together(ut)) elif name == "index_together": it = model._meta.original_attrs["index_together"] options[name] = set(normalize_together(it)) elif name == "indexes": indexes = [idx.clone() for idx in model._meta.indexes] for index in indexes: if not index.name: index.set_name_with_model(model) options['indexes'] = indexes elif name == 'constraints': options['constraints'] = [con.clone() for con in model._meta.constraints] else: options[name] = model._meta.original_attrs[name] # If we're ignoring relationships, remove all field-listing model # options (that option basically just means "make a stub model") if exclude_rels: for key in ["unique_together", "index_together", "order_with_respect_to"]: if key in options: del options[key] # Private fields are ignored, so remove options that refer to them. elif options.get('order_with_respect_to') in {field.name for field in model._meta.private_fields}: del options['order_with_respect_to'] def flatten_bases(model): bases = [] for base in model.__bases__: if hasattr(base, "_meta") and base._meta.abstract: bases.extend(flatten_bases(base)) else: bases.append(base) return bases # We can't rely on __mro__ directly because we only want to flatten # abstract models and not the whole tree. However by recursing on # __bases__ we may end up with duplicates and ordering issues, we # therefore discard any duplicates and reorder the bases according # to their index in the MRO. flattened_bases = sorted(set(flatten_bases(model)), key=lambda x: model.__mro__.index(x)) # Make our record bases = tuple( ( base._meta.label_lower if hasattr(base, "_meta") else base ) for base in flattened_bases ) # Ensure at least one base inherits from models.Model if not any((isinstance(base, str) or issubclass(base, models.Model)) for base in bases): bases = (models.Model,) managers = [] manager_names = set() default_manager_shim = None for manager in model._meta.managers: if manager.name in manager_names: # Skip overridden managers. continue elif manager.use_in_migrations: # Copy managers usable in migrations. new_manager = copy.copy(manager) new_manager._set_creation_counter() elif manager is model._base_manager or manager is model._default_manager: # Shim custom managers used as default and base managers. new_manager = models.Manager() new_manager.model = manager.model new_manager.name = manager.name if manager is model._default_manager: default_manager_shim = new_manager else: continue manager_names.add(manager.name) managers.append((manager.name, new_manager)) # Ignore a shimmed default manager called objects if it's the only one. if managers == [('objects', default_manager_shim)]: managers = [] # Construct the new ModelState return cls( model._meta.app_label, model._meta.object_name, fields, options, bases, managers, ) def construct_managers(self): """Deep-clone the managers using deconstruction.""" # Sort all managers by their creation counter sorted_managers = sorted(self.managers, key=lambda v: v[1].creation_counter) for mgr_name, manager in sorted_managers: as_manager, manager_path, qs_path, args, kwargs = manager.deconstruct() if as_manager: qs_class = import_string(qs_path) yield mgr_name, qs_class.as_manager() else: manager_class = import_string(manager_path) yield mgr_name, manager_class(*args, **kwargs) def clone(self): """Return an exact copy of this ModelState.""" return self.__class__( app_label=self.app_label, name=self.name, fields=dict(self.fields), # Since options are shallow-copied here, operations such as # AddIndex must replace their option (e.g 'indexes') rather # than mutating it. options=dict(self.options), bases=self.bases, managers=list(self.managers), ) def render(self, apps): """Create a Model object from our current state into the given apps.""" # First, make a Meta object meta_contents = {'app_label': self.app_label, 'apps': apps, **self.options} meta = type("Meta", (), meta_contents) # Then, work out our bases try: bases = tuple( (apps.get_model(base) if isinstance(base, str) else base) for base in self.bases ) except LookupError: raise InvalidBasesError("Cannot resolve one or more bases from %r" % (self.bases,)) # Clone fields for the body, add other bits. body = {name: field.clone() for name, field in self.fields.items()} body['Meta'] = meta body['__module__'] = "__fake__" # Restore managers body.update(self.construct_managers()) # Then, make a Model object (apps.register_model is called in __new__) return type(self.name, bases, body) def get_index_by_name(self, name): for index in self.options['indexes']: if index.name == name: return index raise ValueError("No index named %s on model %s" % (name, self.name)) def get_constraint_by_name(self, name): for constraint in self.options['constraints']: if constraint.name == name: return constraint raise ValueError('No constraint named %s on model %s' % (name, self.name)) def __repr__(self): return "<%s: '%s.%s'>" % (self.__class__.__name__, self.app_label, self.name) def __eq__(self, other): return ( (self.app_label == other.app_label) and (self.name == other.name) and (len(self.fields) == len(other.fields)) and all( k1 == k2 and f1.deconstruct()[1:] == f2.deconstruct()[1:] for (k1, f1), (k2, f2) in zip( sorted(self.fields.items()), sorted(other.fields.items()), ) ) and (self.options == other.options) and (self.bases == other.bases) and (self.managers == other.managers) )
1a6bfbd7cb66801e205ca750e9ffaab8ed101f5acc11e0ac979f2bf9c9a0f6c6
import functools import re from itertools import chain from django.conf import settings from django.db import models from django.db.migrations import operations from django.db.migrations.migration import Migration from django.db.migrations.operations.models import AlterModelOptions from django.db.migrations.optimizer import MigrationOptimizer from django.db.migrations.questioner import MigrationQuestioner from django.db.migrations.utils import ( COMPILED_REGEX_TYPE, RegexObject, resolve_relation, ) from django.utils.topological_sort import stable_topological_sort class MigrationAutodetector: """ Take a pair of ProjectStates and compare them to see what the first would need doing to make it match the second (the second usually being the project's current state). Note that this naturally operates on entire projects at a time, as it's likely that changes interact (for example, you can't add a ForeignKey without having a migration to add the table it depends on first). A user interface may offer single-app usage if it wishes, with the caveat that it may not always be possible. """ def __init__(self, from_state, to_state, questioner=None): self.from_state = from_state self.to_state = to_state self.questioner = questioner or MigrationQuestioner() self.existing_apps = {app for app, model in from_state.models} def changes(self, graph, trim_to_apps=None, convert_apps=None, migration_name=None): """ Main entry point to produce a list of applicable changes. Take a graph to base names on and an optional set of apps to try and restrict to (restriction is not guaranteed) """ changes = self._detect_changes(convert_apps, graph) changes = self.arrange_for_graph(changes, graph, migration_name) if trim_to_apps: changes = self._trim_to_apps(changes, trim_to_apps) return changes def deep_deconstruct(self, obj): """ Recursive deconstruction for a field and its arguments. Used for full comparison for rename/alter; sometimes a single-level deconstruction will not compare correctly. """ if isinstance(obj, list): return [self.deep_deconstruct(value) for value in obj] elif isinstance(obj, tuple): return tuple(self.deep_deconstruct(value) for value in obj) elif isinstance(obj, dict): return { key: self.deep_deconstruct(value) for key, value in obj.items() } elif isinstance(obj, functools.partial): return (obj.func, self.deep_deconstruct(obj.args), self.deep_deconstruct(obj.keywords)) elif isinstance(obj, COMPILED_REGEX_TYPE): return RegexObject(obj) elif isinstance(obj, type): # If this is a type that implements 'deconstruct' as an instance method, # avoid treating this as being deconstructible itself - see #22951 return obj elif hasattr(obj, 'deconstruct'): deconstructed = obj.deconstruct() if isinstance(obj, models.Field): # we have a field which also returns a name deconstructed = deconstructed[1:] path, args, kwargs = deconstructed return ( path, [self.deep_deconstruct(value) for value in args], { key: self.deep_deconstruct(value) for key, value in kwargs.items() }, ) else: return obj def only_relation_agnostic_fields(self, fields): """ Return a definition of the fields that ignores field names and what related fields actually relate to. Used for detecting renames (as the related fields change during renames). """ fields_def = [] for name, field in sorted(fields.items()): deconstruction = self.deep_deconstruct(field) if field.remote_field and field.remote_field.model: del deconstruction[2]['to'] fields_def.append(deconstruction) return fields_def def _detect_changes(self, convert_apps=None, graph=None): """ Return a dict of migration plans which will achieve the change from from_state to to_state. The dict has app labels as keys and a list of migrations as values. The resulting migrations aren't specially named, but the names do matter for dependencies inside the set. convert_apps is the list of apps to convert to use migrations (i.e. to make initial migrations for, in the usual case) graph is an optional argument that, if provided, can help improve dependency generation and avoid potential circular dependencies. """ # The first phase is generating all the operations for each app # and gathering them into a big per-app list. # Then go through that list, order it, and split into migrations to # resolve dependencies caused by M2Ms and FKs. self.generated_operations = {} self.altered_indexes = {} self.altered_constraints = {} # Prepare some old/new state and model lists, separating # proxy models and ignoring unmigrated apps. self.old_model_keys = set() self.old_proxy_keys = set() self.old_unmanaged_keys = set() self.new_model_keys = set() self.new_proxy_keys = set() self.new_unmanaged_keys = set() for (app_label, model_name), model_state in self.from_state.models.items(): if not model_state.options.get('managed', True): self.old_unmanaged_keys.add((app_label, model_name)) elif app_label not in self.from_state.real_apps: if model_state.options.get('proxy'): self.old_proxy_keys.add((app_label, model_name)) else: self.old_model_keys.add((app_label, model_name)) for (app_label, model_name), model_state in self.to_state.models.items(): if not model_state.options.get('managed', True): self.new_unmanaged_keys.add((app_label, model_name)) elif ( app_label not in self.from_state.real_apps or (convert_apps and app_label in convert_apps) ): if model_state.options.get('proxy'): self.new_proxy_keys.add((app_label, model_name)) else: self.new_model_keys.add((app_label, model_name)) self.from_state.resolve_fields_and_relations() self.to_state.resolve_fields_and_relations() # Renames have to come first self.generate_renamed_models() # Prepare lists of fields and generate through model map self._prepare_field_lists() self._generate_through_model_map() # Generate non-rename model operations self.generate_deleted_models() self.generate_created_models() self.generate_deleted_proxies() self.generate_created_proxies() self.generate_altered_options() self.generate_altered_managers() # Create the altered indexes and store them in self.altered_indexes. # This avoids the same computation in generate_removed_indexes() # and generate_added_indexes(). self.create_altered_indexes() self.create_altered_constraints() # Generate index removal operations before field is removed self.generate_removed_constraints() self.generate_removed_indexes() # Generate field operations self.generate_renamed_fields() self.generate_removed_fields() self.generate_added_fields() self.generate_altered_fields() self.generate_altered_order_with_respect_to() self.generate_altered_unique_together() self.generate_altered_index_together() self.generate_added_indexes() self.generate_added_constraints() self.generate_altered_db_table() self._sort_migrations() self._build_migration_list(graph) self._optimize_migrations() return self.migrations def _prepare_field_lists(self): """ Prepare field lists and a list of the fields that used through models in the old state so dependencies can be made from the through model deletion to the field that uses it. """ self.kept_model_keys = self.old_model_keys & self.new_model_keys self.kept_proxy_keys = self.old_proxy_keys & self.new_proxy_keys self.kept_unmanaged_keys = self.old_unmanaged_keys & self.new_unmanaged_keys self.through_users = {} self.old_field_keys = { (app_label, model_name, field_name) for app_label, model_name in self.kept_model_keys for field_name in self.from_state.models[ app_label, self.renamed_models.get((app_label, model_name), model_name) ].fields } self.new_field_keys = { (app_label, model_name, field_name) for app_label, model_name in self.kept_model_keys for field_name in self.to_state.models[app_label, model_name].fields } def _generate_through_model_map(self): """Through model map generation.""" for app_label, model_name in sorted(self.old_model_keys): old_model_name = self.renamed_models.get((app_label, model_name), model_name) old_model_state = self.from_state.models[app_label, old_model_name] for field_name, field in old_model_state.fields.items(): if hasattr(field, 'remote_field') and getattr(field.remote_field, 'through', None): through_key = resolve_relation(field.remote_field.through, app_label, model_name) self.through_users[through_key] = (app_label, old_model_name, field_name) @staticmethod def _resolve_dependency(dependency): """ Return the resolved dependency and a boolean denoting whether or not it was swappable. """ if dependency[0] != '__setting__': return dependency, False resolved_app_label, resolved_object_name = getattr(settings, dependency[1]).split('.') return (resolved_app_label, resolved_object_name.lower()) + dependency[2:], True def _build_migration_list(self, graph=None): """ Chop the lists of operations up into migrations with dependencies on each other. Do this by going through an app's list of operations until one is found that has an outgoing dependency that isn't in another app's migration yet (hasn't been chopped off its list). Then chop off the operations before it into a migration and move onto the next app. If the loops completes without doing anything, there's a circular dependency (which _should_ be impossible as the operations are all split at this point so they can't depend and be depended on). """ self.migrations = {} num_ops = sum(len(x) for x in self.generated_operations.values()) chop_mode = False while num_ops: # On every iteration, we step through all the apps and see if there # is a completed set of operations. # If we find that a subset of the operations are complete we can # try to chop it off from the rest and continue, but we only # do this if we've already been through the list once before # without any chopping and nothing has changed. for app_label in sorted(self.generated_operations): chopped = [] dependencies = set() for operation in list(self.generated_operations[app_label]): deps_satisfied = True operation_dependencies = set() for dep in operation._auto_deps: # Temporarily resolve the swappable dependency to # prevent circular references. While keeping the # dependency checks on the resolved model, add the # swappable dependencies. original_dep = dep dep, is_swappable_dep = self._resolve_dependency(dep) if dep[0] != app_label: # External app dependency. See if it's not yet # satisfied. for other_operation in self.generated_operations.get(dep[0], []): if self.check_dependency(other_operation, dep): deps_satisfied = False break if not deps_satisfied: break else: if is_swappable_dep: operation_dependencies.add((original_dep[0], original_dep[1])) elif dep[0] in self.migrations: operation_dependencies.add((dep[0], self.migrations[dep[0]][-1].name)) else: # If we can't find the other app, we add a first/last dependency, # but only if we've already been through once and checked everything if chop_mode: # If the app already exists, we add a dependency on the last migration, # as we don't know which migration contains the target field. # If it's not yet migrated or has no migrations, we use __first__ if graph and graph.leaf_nodes(dep[0]): operation_dependencies.add(graph.leaf_nodes(dep[0])[0]) else: operation_dependencies.add((dep[0], "__first__")) else: deps_satisfied = False if deps_satisfied: chopped.append(operation) dependencies.update(operation_dependencies) del self.generated_operations[app_label][0] else: break # Make a migration! Well, only if there's stuff to put in it if dependencies or chopped: if not self.generated_operations[app_label] or chop_mode: subclass = type("Migration", (Migration,), {"operations": [], "dependencies": []}) instance = subclass("auto_%i" % (len(self.migrations.get(app_label, [])) + 1), app_label) instance.dependencies = list(dependencies) instance.operations = chopped instance.initial = app_label not in self.existing_apps self.migrations.setdefault(app_label, []).append(instance) chop_mode = False else: self.generated_operations[app_label] = chopped + self.generated_operations[app_label] new_num_ops = sum(len(x) for x in self.generated_operations.values()) if new_num_ops == num_ops: if not chop_mode: chop_mode = True else: raise ValueError("Cannot resolve operation dependencies: %r" % self.generated_operations) num_ops = new_num_ops def _sort_migrations(self): """ Reorder to make things possible. Reordering may be needed so FKs work nicely inside the same app. """ for app_label, ops in sorted(self.generated_operations.items()): # construct a dependency graph for intra-app dependencies dependency_graph = {op: set() for op in ops} for op in ops: for dep in op._auto_deps: # Resolve intra-app dependencies to handle circular # references involving a swappable model. dep = self._resolve_dependency(dep)[0] if dep[0] == app_label: for op2 in ops: if self.check_dependency(op2, dep): dependency_graph[op].add(op2) # we use a stable sort for deterministic tests & general behavior self.generated_operations[app_label] = stable_topological_sort(ops, dependency_graph) def _optimize_migrations(self): # Add in internal dependencies among the migrations for app_label, migrations in self.migrations.items(): for m1, m2 in zip(migrations, migrations[1:]): m2.dependencies.append((app_label, m1.name)) # De-dupe dependencies for migrations in self.migrations.values(): for migration in migrations: migration.dependencies = list(set(migration.dependencies)) # Optimize migrations for app_label, migrations in self.migrations.items(): for migration in migrations: migration.operations = MigrationOptimizer().optimize(migration.operations, app_label) def check_dependency(self, operation, dependency): """ Return True if the given operation depends on the given dependency, False otherwise. """ # Created model if dependency[2] is None and dependency[3] is True: return ( isinstance(operation, operations.CreateModel) and operation.name_lower == dependency[1].lower() ) # Created field elif dependency[2] is not None and dependency[3] is True: return ( ( isinstance(operation, operations.CreateModel) and operation.name_lower == dependency[1].lower() and any(dependency[2] == x for x, y in operation.fields) ) or ( isinstance(operation, operations.AddField) and operation.model_name_lower == dependency[1].lower() and operation.name_lower == dependency[2].lower() ) ) # Removed field elif dependency[2] is not None and dependency[3] is False: return ( isinstance(operation, operations.RemoveField) and operation.model_name_lower == dependency[1].lower() and operation.name_lower == dependency[2].lower() ) # Removed model elif dependency[2] is None and dependency[3] is False: return ( isinstance(operation, operations.DeleteModel) and operation.name_lower == dependency[1].lower() ) # Field being altered elif dependency[2] is not None and dependency[3] == "alter": return ( isinstance(operation, operations.AlterField) and operation.model_name_lower == dependency[1].lower() and operation.name_lower == dependency[2].lower() ) # order_with_respect_to being unset for a field elif dependency[2] is not None and dependency[3] == "order_wrt_unset": return ( isinstance(operation, operations.AlterOrderWithRespectTo) and operation.name_lower == dependency[1].lower() and (operation.order_with_respect_to or "").lower() != dependency[2].lower() ) # Field is removed and part of an index/unique_together elif dependency[2] is not None and dependency[3] == "foo_together_change": return ( isinstance(operation, (operations.AlterUniqueTogether, operations.AlterIndexTogether)) and operation.name_lower == dependency[1].lower() ) # Unknown dependency. Raise an error. else: raise ValueError("Can't handle dependency %r" % (dependency,)) def add_operation(self, app_label, operation, dependencies=None, beginning=False): # Dependencies are (app_label, model_name, field_name, create/delete as True/False) operation._auto_deps = dependencies or [] if beginning: self.generated_operations.setdefault(app_label, []).insert(0, operation) else: self.generated_operations.setdefault(app_label, []).append(operation) def swappable_first_key(self, item): """ Place potential swappable models first in lists of created models (only real way to solve #22783). """ try: model_state = self.to_state.models[item] base_names = { base if isinstance(base, str) else base.__name__ for base in model_state.bases } string_version = "%s.%s" % (item[0], item[1]) if ( model_state.options.get('swappable') or "AbstractUser" in base_names or "AbstractBaseUser" in base_names or settings.AUTH_USER_MODEL.lower() == string_version.lower() ): return ("___" + item[0], "___" + item[1]) except LookupError: pass return item def generate_renamed_models(self): """ Find any renamed models, generate the operations for them, and remove the old entry from the model lists. Must be run before other model-level generation. """ self.renamed_models = {} self.renamed_models_rel = {} added_models = self.new_model_keys - self.old_model_keys for app_label, model_name in sorted(added_models): model_state = self.to_state.models[app_label, model_name] model_fields_def = self.only_relation_agnostic_fields(model_state.fields) removed_models = self.old_model_keys - self.new_model_keys for rem_app_label, rem_model_name in removed_models: if rem_app_label == app_label: rem_model_state = self.from_state.models[rem_app_label, rem_model_name] rem_model_fields_def = self.only_relation_agnostic_fields(rem_model_state.fields) if model_fields_def == rem_model_fields_def: if self.questioner.ask_rename_model(rem_model_state, model_state): dependencies = [] fields = list(model_state.fields.values()) + [ field.remote_field for relations in self.to_state.relations[app_label, model_name].values() for _, field in relations ] for field in fields: if field.is_relation: dependencies.extend( self._get_dependencies_for_foreign_key( app_label, model_name, field, self.to_state, ) ) self.add_operation( app_label, operations.RenameModel( old_name=rem_model_state.name, new_name=model_state.name, ), dependencies=dependencies, ) self.renamed_models[app_label, model_name] = rem_model_name renamed_models_rel_key = '%s.%s' % ( rem_model_state.app_label, rem_model_state.name_lower, ) self.renamed_models_rel[renamed_models_rel_key] = '%s.%s' % ( model_state.app_label, model_state.name_lower, ) self.old_model_keys.remove((rem_app_label, rem_model_name)) self.old_model_keys.add((app_label, model_name)) break def generate_created_models(self): """ Find all new models (both managed and unmanaged) and make create operations for them as well as separate operations to create any foreign key or M2M relationships (these are optimized later, if possible). Defer any model options that refer to collections of fields that might be deferred (e.g. unique_together, index_together). """ old_keys = self.old_model_keys | self.old_unmanaged_keys added_models = self.new_model_keys - old_keys added_unmanaged_models = self.new_unmanaged_keys - old_keys all_added_models = chain( sorted(added_models, key=self.swappable_first_key, reverse=True), sorted(added_unmanaged_models, key=self.swappable_first_key, reverse=True) ) for app_label, model_name in all_added_models: model_state = self.to_state.models[app_label, model_name] # Gather related fields related_fields = {} primary_key_rel = None for field_name, field in model_state.fields.items(): if field.remote_field: if field.remote_field.model: if field.primary_key: primary_key_rel = field.remote_field.model elif not field.remote_field.parent_link: related_fields[field_name] = field if getattr(field.remote_field, 'through', None): related_fields[field_name] = field # Are there indexes/unique|index_together to defer? indexes = model_state.options.pop('indexes') constraints = model_state.options.pop('constraints') unique_together = model_state.options.pop('unique_together', None) index_together = model_state.options.pop('index_together', None) order_with_respect_to = model_state.options.pop('order_with_respect_to', None) # Depend on the deletion of any possible proxy version of us dependencies = [ (app_label, model_name, None, False), ] # Depend on all bases for base in model_state.bases: if isinstance(base, str) and "." in base: base_app_label, base_name = base.split(".", 1) dependencies.append((base_app_label, base_name, None, True)) # Depend on the removal of base fields if the new model has # a field with the same name. old_base_model_state = self.from_state.models.get((base_app_label, base_name)) new_base_model_state = self.to_state.models.get((base_app_label, base_name)) if old_base_model_state and new_base_model_state: removed_base_fields = set(old_base_model_state.fields).difference( new_base_model_state.fields, ).intersection(model_state.fields) for removed_base_field in removed_base_fields: dependencies.append((base_app_label, base_name, removed_base_field, False)) # Depend on the other end of the primary key if it's a relation if primary_key_rel: dependencies.append( resolve_relation( primary_key_rel, app_label, model_name, ) + (None, True) ) # Generate creation operation self.add_operation( app_label, operations.CreateModel( name=model_state.name, fields=[d for d in model_state.fields.items() if d[0] not in related_fields], options=model_state.options, bases=model_state.bases, managers=model_state.managers, ), dependencies=dependencies, beginning=True, ) # Don't add operations which modify the database for unmanaged models if not model_state.options.get('managed', True): continue # Generate operations for each related field for name, field in sorted(related_fields.items()): dependencies = self._get_dependencies_for_foreign_key( app_label, model_name, field, self.to_state, ) # Depend on our own model being created dependencies.append((app_label, model_name, None, True)) # Make operation self.add_operation( app_label, operations.AddField( model_name=model_name, name=name, field=field, ), dependencies=list(set(dependencies)), ) # Generate other opns if order_with_respect_to: self.add_operation( app_label, operations.AlterOrderWithRespectTo( name=model_name, order_with_respect_to=order_with_respect_to, ), dependencies=[ (app_label, model_name, order_with_respect_to, True), (app_label, model_name, None, True), ] ) related_dependencies = [ (app_label, model_name, name, True) for name in sorted(related_fields) ] related_dependencies.append((app_label, model_name, None, True)) for index in indexes: self.add_operation( app_label, operations.AddIndex( model_name=model_name, index=index, ), dependencies=related_dependencies, ) for constraint in constraints: self.add_operation( app_label, operations.AddConstraint( model_name=model_name, constraint=constraint, ), dependencies=related_dependencies, ) if unique_together: self.add_operation( app_label, operations.AlterUniqueTogether( name=model_name, unique_together=unique_together, ), dependencies=related_dependencies ) if index_together: self.add_operation( app_label, operations.AlterIndexTogether( name=model_name, index_together=index_together, ), dependencies=related_dependencies ) # Fix relationships if the model changed from a proxy model to a # concrete model. relations = self.to_state.relations if (app_label, model_name) in self.old_proxy_keys: for related_model_key, related_fields in relations[app_label, model_name].items(): related_model_state = self.to_state.models[related_model_key] for related_field_name, related_field in related_fields: self.add_operation( related_model_state.app_label, operations.AlterField( model_name=related_model_state.name, name=related_field_name, field=related_field, ), dependencies=[(app_label, model_name, None, True)], ) def generate_created_proxies(self): """ Make CreateModel statements for proxy models. Use the same statements as that way there's less code duplication, but for proxy models it's safe to skip all the pointless field stuff and chuck out an operation. """ added = self.new_proxy_keys - self.old_proxy_keys for app_label, model_name in sorted(added): model_state = self.to_state.models[app_label, model_name] assert model_state.options.get("proxy") # Depend on the deletion of any possible non-proxy version of us dependencies = [ (app_label, model_name, None, False), ] # Depend on all bases for base in model_state.bases: if isinstance(base, str) and "." in base: base_app_label, base_name = base.split(".", 1) dependencies.append((base_app_label, base_name, None, True)) # Generate creation operation self.add_operation( app_label, operations.CreateModel( name=model_state.name, fields=[], options=model_state.options, bases=model_state.bases, managers=model_state.managers, ), # Depend on the deletion of any possible non-proxy version of us dependencies=dependencies, ) def generate_deleted_models(self): """ Find all deleted models (managed and unmanaged) and make delete operations for them as well as separate operations to delete any foreign key or M2M relationships (these are optimized later, if possible). Also bring forward removal of any model options that refer to collections of fields - the inverse of generate_created_models(). """ new_keys = self.new_model_keys | self.new_unmanaged_keys deleted_models = self.old_model_keys - new_keys deleted_unmanaged_models = self.old_unmanaged_keys - new_keys all_deleted_models = chain(sorted(deleted_models), sorted(deleted_unmanaged_models)) for app_label, model_name in all_deleted_models: model_state = self.from_state.models[app_label, model_name] # Gather related fields related_fields = {} for field_name, field in model_state.fields.items(): if field.remote_field: if field.remote_field.model: related_fields[field_name] = field if getattr(field.remote_field, 'through', None): related_fields[field_name] = field # Generate option removal first unique_together = model_state.options.pop('unique_together', None) index_together = model_state.options.pop('index_together', None) if unique_together: self.add_operation( app_label, operations.AlterUniqueTogether( name=model_name, unique_together=None, ) ) if index_together: self.add_operation( app_label, operations.AlterIndexTogether( name=model_name, index_together=None, ) ) # Then remove each related field for name in sorted(related_fields): self.add_operation( app_label, operations.RemoveField( model_name=model_name, name=name, ) ) # Finally, remove the model. # This depends on both the removal/alteration of all incoming fields # and the removal of all its own related fields, and if it's # a through model the field that references it. dependencies = [] relations = self.from_state.relations for (related_object_app_label, object_name), relation_related_fields in ( relations[app_label, model_name].items() ): for field_name, field in relation_related_fields: dependencies.append( (related_object_app_label, object_name, field_name, False), ) if not field.many_to_many: dependencies.append( (related_object_app_label, object_name, field_name, 'alter'), ) for name in sorted(related_fields): dependencies.append((app_label, model_name, name, False)) # We're referenced in another field's through= through_user = self.through_users.get((app_label, model_state.name_lower)) if through_user: dependencies.append((through_user[0], through_user[1], through_user[2], False)) # Finally, make the operation, deduping any dependencies self.add_operation( app_label, operations.DeleteModel( name=model_state.name, ), dependencies=list(set(dependencies)), ) def generate_deleted_proxies(self): """Make DeleteModel options for proxy models.""" deleted = self.old_proxy_keys - self.new_proxy_keys for app_label, model_name in sorted(deleted): model_state = self.from_state.models[app_label, model_name] assert model_state.options.get("proxy") self.add_operation( app_label, operations.DeleteModel( name=model_state.name, ), ) def generate_renamed_fields(self): """Work out renamed fields.""" self.renamed_fields = {} for app_label, model_name, field_name in sorted(self.new_field_keys - self.old_field_keys): old_model_name = self.renamed_models.get((app_label, model_name), model_name) old_model_state = self.from_state.models[app_label, old_model_name] new_model_state = self.to_state.models[app_label, old_model_name] field = new_model_state.get_field(field_name) # Scan to see if this is actually a rename! field_dec = self.deep_deconstruct(field) for rem_app_label, rem_model_name, rem_field_name in sorted(self.old_field_keys - self.new_field_keys): if rem_app_label == app_label and rem_model_name == model_name: old_field = old_model_state.get_field(rem_field_name) old_field_dec = self.deep_deconstruct(old_field) if field.remote_field and field.remote_field.model and 'to' in old_field_dec[2]: old_rel_to = old_field_dec[2]['to'] if old_rel_to in self.renamed_models_rel: old_field_dec[2]['to'] = self.renamed_models_rel[old_rel_to] old_field.set_attributes_from_name(rem_field_name) old_db_column = old_field.get_attname_column()[1] if (old_field_dec == field_dec or ( # Was the field renamed and db_column equal to the # old field's column added? old_field_dec[0:2] == field_dec[0:2] and dict(old_field_dec[2], db_column=old_db_column) == field_dec[2])): if self.questioner.ask_rename(model_name, rem_field_name, field_name, field): self.add_operation( app_label, operations.RenameField( model_name=model_name, old_name=rem_field_name, new_name=field_name, ) ) self.old_field_keys.remove((rem_app_label, rem_model_name, rem_field_name)) self.old_field_keys.add((app_label, model_name, field_name)) self.renamed_fields[app_label, model_name, field_name] = rem_field_name break def generate_added_fields(self): """Make AddField operations.""" for app_label, model_name, field_name in sorted(self.new_field_keys - self.old_field_keys): self._generate_added_field(app_label, model_name, field_name) def _generate_added_field(self, app_label, model_name, field_name): field = self.to_state.models[app_label, model_name].get_field(field_name) # Fields that are foreignkeys/m2ms depend on stuff dependencies = [] if field.remote_field and field.remote_field.model: dependencies.extend(self._get_dependencies_for_foreign_key( app_label, model_name, field, self.to_state, )) # You can't just add NOT NULL fields with no default or fields # which don't allow empty strings as default. time_fields = (models.DateField, models.DateTimeField, models.TimeField) preserve_default = ( field.null or field.has_default() or field.many_to_many or (field.blank and field.empty_strings_allowed) or (isinstance(field, time_fields) and field.auto_now) ) if not preserve_default: field = field.clone() if isinstance(field, time_fields) and field.auto_now_add: field.default = self.questioner.ask_auto_now_add_addition(field_name, model_name) else: field.default = self.questioner.ask_not_null_addition(field_name, model_name) self.add_operation( app_label, operations.AddField( model_name=model_name, name=field_name, field=field, preserve_default=preserve_default, ), dependencies=dependencies, ) def generate_removed_fields(self): """Make RemoveField operations.""" for app_label, model_name, field_name in sorted(self.old_field_keys - self.new_field_keys): self._generate_removed_field(app_label, model_name, field_name) def _generate_removed_field(self, app_label, model_name, field_name): self.add_operation( app_label, operations.RemoveField( model_name=model_name, name=field_name, ), # We might need to depend on the removal of an # order_with_respect_to or index/unique_together operation; # this is safely ignored if there isn't one dependencies=[ (app_label, model_name, field_name, "order_wrt_unset"), (app_label, model_name, field_name, "foo_together_change"), ], ) def generate_altered_fields(self): """ Make AlterField operations, or possibly RemovedField/AddField if alter isn's possible. """ for app_label, model_name, field_name in sorted(self.old_field_keys & self.new_field_keys): # Did the field change? old_model_name = self.renamed_models.get((app_label, model_name), model_name) old_field_name = self.renamed_fields.get((app_label, model_name, field_name), field_name) old_field = self.from_state.models[app_label, old_model_name].get_field(old_field_name) new_field = self.to_state.models[app_label, model_name].get_field(field_name) dependencies = [] # Implement any model renames on relations; these are handled by RenameModel # so we need to exclude them from the comparison if hasattr(new_field, "remote_field") and getattr(new_field.remote_field, "model", None): rename_key = resolve_relation(new_field.remote_field.model, app_label, model_name) if rename_key in self.renamed_models: new_field.remote_field.model = old_field.remote_field.model # Handle ForeignKey which can only have a single to_field. remote_field_name = getattr(new_field.remote_field, 'field_name', None) if remote_field_name: to_field_rename_key = rename_key + (remote_field_name,) if to_field_rename_key in self.renamed_fields: # Repoint both model and field name because to_field # inclusion in ForeignKey.deconstruct() is based on # both. new_field.remote_field.model = old_field.remote_field.model new_field.remote_field.field_name = old_field.remote_field.field_name # Handle ForeignObjects which can have multiple from_fields/to_fields. from_fields = getattr(new_field, 'from_fields', None) if from_fields: from_rename_key = (app_label, model_name) new_field.from_fields = tuple([ self.renamed_fields.get(from_rename_key + (from_field,), from_field) for from_field in from_fields ]) new_field.to_fields = tuple([ self.renamed_fields.get(rename_key + (to_field,), to_field) for to_field in new_field.to_fields ]) dependencies.extend(self._get_dependencies_for_foreign_key( app_label, model_name, new_field, self.to_state, )) if ( hasattr(new_field, 'remote_field') and getattr(new_field.remote_field, 'through', None) ): rename_key = resolve_relation(new_field.remote_field.through, app_label, model_name) if rename_key in self.renamed_models: new_field.remote_field.through = old_field.remote_field.through old_field_dec = self.deep_deconstruct(old_field) new_field_dec = self.deep_deconstruct(new_field) if old_field_dec != new_field_dec: both_m2m = old_field.many_to_many and new_field.many_to_many neither_m2m = not old_field.many_to_many and not new_field.many_to_many if both_m2m or neither_m2m: # Either both fields are m2m or neither is preserve_default = True if (old_field.null and not new_field.null and not new_field.has_default() and not new_field.many_to_many): field = new_field.clone() new_default = self.questioner.ask_not_null_alteration(field_name, model_name) if new_default is not models.NOT_PROVIDED: field.default = new_default preserve_default = False else: field = new_field self.add_operation( app_label, operations.AlterField( model_name=model_name, name=field_name, field=field, preserve_default=preserve_default, ), dependencies=dependencies, ) else: # We cannot alter between m2m and concrete fields self._generate_removed_field(app_label, model_name, field_name) self._generate_added_field(app_label, model_name, field_name) def create_altered_indexes(self): option_name = operations.AddIndex.option_name for app_label, model_name in sorted(self.kept_model_keys): old_model_name = self.renamed_models.get((app_label, model_name), model_name) old_model_state = self.from_state.models[app_label, old_model_name] new_model_state = self.to_state.models[app_label, model_name] old_indexes = old_model_state.options[option_name] new_indexes = new_model_state.options[option_name] add_idx = [idx for idx in new_indexes if idx not in old_indexes] rem_idx = [idx for idx in old_indexes if idx not in new_indexes] self.altered_indexes.update({ (app_label, model_name): { 'added_indexes': add_idx, 'removed_indexes': rem_idx, } }) def generate_added_indexes(self): for (app_label, model_name), alt_indexes in self.altered_indexes.items(): for index in alt_indexes['added_indexes']: self.add_operation( app_label, operations.AddIndex( model_name=model_name, index=index, ) ) def generate_removed_indexes(self): for (app_label, model_name), alt_indexes in self.altered_indexes.items(): for index in alt_indexes['removed_indexes']: self.add_operation( app_label, operations.RemoveIndex( model_name=model_name, name=index.name, ) ) def create_altered_constraints(self): option_name = operations.AddConstraint.option_name for app_label, model_name in sorted(self.kept_model_keys): old_model_name = self.renamed_models.get((app_label, model_name), model_name) old_model_state = self.from_state.models[app_label, old_model_name] new_model_state = self.to_state.models[app_label, model_name] old_constraints = old_model_state.options[option_name] new_constraints = new_model_state.options[option_name] add_constraints = [c for c in new_constraints if c not in old_constraints] rem_constraints = [c for c in old_constraints if c not in new_constraints] self.altered_constraints.update({ (app_label, model_name): { 'added_constraints': add_constraints, 'removed_constraints': rem_constraints, } }) def generate_added_constraints(self): for (app_label, model_name), alt_constraints in self.altered_constraints.items(): for constraint in alt_constraints['added_constraints']: self.add_operation( app_label, operations.AddConstraint( model_name=model_name, constraint=constraint, ) ) def generate_removed_constraints(self): for (app_label, model_name), alt_constraints in self.altered_constraints.items(): for constraint in alt_constraints['removed_constraints']: self.add_operation( app_label, operations.RemoveConstraint( model_name=model_name, name=constraint.name, ) ) @staticmethod def _get_dependencies_for_foreign_key(app_label, model_name, field, project_state): remote_field_model = None if hasattr(field.remote_field, 'model'): remote_field_model = field.remote_field.model else: relations = project_state.relations[app_label, model_name] for (remote_app_label, remote_model_name), fields in relations.items(): if any(field == related_field.remote_field for _, related_field in fields): remote_field_model = f'{remote_app_label}.{remote_model_name}' break # Account for FKs to swappable models swappable_setting = getattr(field, 'swappable_setting', None) if swappable_setting is not None: dep_app_label = "__setting__" dep_object_name = swappable_setting else: dep_app_label, dep_object_name = resolve_relation( remote_field_model, app_label, model_name, ) dependencies = [(dep_app_label, dep_object_name, None, True)] if getattr(field.remote_field, 'through', None): through_app_label, through_object_name = resolve_relation( remote_field_model, app_label, model_name, ) dependencies.append((through_app_label, through_object_name, None, True)) return dependencies def _generate_altered_foo_together(self, operation): option_name = operation.option_name for app_label, model_name in sorted(self.kept_model_keys): old_model_name = self.renamed_models.get((app_label, model_name), model_name) old_model_state = self.from_state.models[app_label, old_model_name] new_model_state = self.to_state.models[app_label, model_name] # We run the old version through the field renames to account for those old_value = old_model_state.options.get(option_name) old_value = { tuple( self.renamed_fields.get((app_label, model_name, n), n) for n in unique ) for unique in old_value } if old_value else set() new_value = new_model_state.options.get(option_name) new_value = set(new_value) if new_value else set() if old_value != new_value: dependencies = [] for foo_togethers in new_value: for field_name in foo_togethers: field = new_model_state.get_field(field_name) if field.remote_field and field.remote_field.model: dependencies.extend(self._get_dependencies_for_foreign_key( app_label, model_name, field, self.to_state, )) self.add_operation( app_label, operation( name=model_name, **{option_name: new_value} ), dependencies=dependencies, ) def generate_altered_unique_together(self): self._generate_altered_foo_together(operations.AlterUniqueTogether) def generate_altered_index_together(self): self._generate_altered_foo_together(operations.AlterIndexTogether) def generate_altered_db_table(self): models_to_check = self.kept_model_keys.union(self.kept_proxy_keys, self.kept_unmanaged_keys) for app_label, model_name in sorted(models_to_check): old_model_name = self.renamed_models.get((app_label, model_name), model_name) old_model_state = self.from_state.models[app_label, old_model_name] new_model_state = self.to_state.models[app_label, model_name] old_db_table_name = old_model_state.options.get('db_table') new_db_table_name = new_model_state.options.get('db_table') if old_db_table_name != new_db_table_name: self.add_operation( app_label, operations.AlterModelTable( name=model_name, table=new_db_table_name, ) ) def generate_altered_options(self): """ Work out if any non-schema-affecting options have changed and make an operation to represent them in state changes (in case Python code in migrations needs them). """ models_to_check = self.kept_model_keys.union( self.kept_proxy_keys, self.kept_unmanaged_keys, # unmanaged converted to managed self.old_unmanaged_keys & self.new_model_keys, # managed converted to unmanaged self.old_model_keys & self.new_unmanaged_keys, ) for app_label, model_name in sorted(models_to_check): old_model_name = self.renamed_models.get((app_label, model_name), model_name) old_model_state = self.from_state.models[app_label, old_model_name] new_model_state = self.to_state.models[app_label, model_name] old_options = { key: value for key, value in old_model_state.options.items() if key in AlterModelOptions.ALTER_OPTION_KEYS } new_options = { key: value for key, value in new_model_state.options.items() if key in AlterModelOptions.ALTER_OPTION_KEYS } if old_options != new_options: self.add_operation( app_label, operations.AlterModelOptions( name=model_name, options=new_options, ) ) def generate_altered_order_with_respect_to(self): for app_label, model_name in sorted(self.kept_model_keys): old_model_name = self.renamed_models.get((app_label, model_name), model_name) old_model_state = self.from_state.models[app_label, old_model_name] new_model_state = self.to_state.models[app_label, model_name] if (old_model_state.options.get("order_with_respect_to") != new_model_state.options.get("order_with_respect_to")): # Make sure it comes second if we're adding # (removal dependency is part of RemoveField) dependencies = [] if new_model_state.options.get("order_with_respect_to"): dependencies.append(( app_label, model_name, new_model_state.options["order_with_respect_to"], True, )) # Actually generate the operation self.add_operation( app_label, operations.AlterOrderWithRespectTo( name=model_name, order_with_respect_to=new_model_state.options.get('order_with_respect_to'), ), dependencies=dependencies, ) def generate_altered_managers(self): for app_label, model_name in sorted(self.kept_model_keys): old_model_name = self.renamed_models.get((app_label, model_name), model_name) old_model_state = self.from_state.models[app_label, old_model_name] new_model_state = self.to_state.models[app_label, model_name] if old_model_state.managers != new_model_state.managers: self.add_operation( app_label, operations.AlterModelManagers( name=model_name, managers=new_model_state.managers, ) ) def arrange_for_graph(self, changes, graph, migration_name=None): """ Take a result from changes() and a MigrationGraph, and fix the names and dependencies of the changes so they extend the graph from the leaf nodes for each app. """ leaves = graph.leaf_nodes() name_map = {} for app_label, migrations in list(changes.items()): if not migrations: continue # Find the app label's current leaf node app_leaf = None for leaf in leaves: if leaf[0] == app_label: app_leaf = leaf break # Do they want an initial migration for this app? if app_leaf is None and not self.questioner.ask_initial(app_label): # They don't. for migration in migrations: name_map[(app_label, migration.name)] = (app_label, "__first__") del changes[app_label] continue # Work out the next number in the sequence if app_leaf is None: next_number = 1 else: next_number = (self.parse_number(app_leaf[1]) or 0) + 1 # Name each migration for i, migration in enumerate(migrations): if i == 0 and app_leaf: migration.dependencies.append(app_leaf) new_name_parts = ['%04i' % next_number] if migration_name: new_name_parts.append(migration_name) elif i == 0 and not app_leaf: new_name_parts.append('initial') else: new_name_parts.append(migration.suggest_name()[:100]) new_name = '_'.join(new_name_parts) name_map[(app_label, migration.name)] = (app_label, new_name) next_number += 1 migration.name = new_name # Now fix dependencies for migrations in changes.values(): for migration in migrations: migration.dependencies = [name_map.get(d, d) for d in migration.dependencies] return changes def _trim_to_apps(self, changes, app_labels): """ Take changes from arrange_for_graph() and set of app labels, and return a modified set of changes which trims out as many migrations that are not in app_labels as possible. Note that some other migrations may still be present as they may be required dependencies. """ # Gather other app dependencies in a first pass app_dependencies = {} for app_label, migrations in changes.items(): for migration in migrations: for dep_app_label, name in migration.dependencies: app_dependencies.setdefault(app_label, set()).add(dep_app_label) required_apps = set(app_labels) # Keep resolving till there's no change old_required_apps = None while old_required_apps != required_apps: old_required_apps = set(required_apps) required_apps.update(*[app_dependencies.get(app_label, ()) for app_label in required_apps]) # Remove all migrations that aren't needed for app_label in list(changes): if app_label not in required_apps: del changes[app_label] return changes @classmethod def parse_number(cls, name): """ Given a migration name, try to extract a number from the beginning of it. If no number is found, return None. """ match = re.match(r'^\d+', name) if match: return int(match[0]) return None
571b25b9b823bd72d67c76e9b934e0f12fcc5053a286c9c63fc271fb49441a81
import datetime import re from django.db.models.fields.related import RECURSIVE_RELATIONSHIP_CONSTANT COMPILED_REGEX_TYPE = type(re.compile('')) class RegexObject: def __init__(self, obj): self.pattern = obj.pattern self.flags = obj.flags def __eq__(self, other): return self.pattern == other.pattern and self.flags == other.flags def get_migration_name_timestamp(): return datetime.datetime.now().strftime("%Y%m%d_%H%M") def resolve_relation(model, app_label=None, model_name=None): """ Turn a model class or model reference string and return a model tuple. app_label and model_name are used to resolve the scope of recursive and unscoped model relationship. """ if isinstance(model, str): if model == RECURSIVE_RELATIONSHIP_CONSTANT: if app_label is None or model_name is None: raise TypeError( 'app_label and model_name must be provided to resolve ' 'recursive relationships.' ) return app_label, model_name if '.' in model: app_label, model_name = model.split('.', 1) return app_label, model_name.lower() if app_label is None: raise TypeError( 'app_label must be provided to resolve unscoped model ' 'relationships.' ) return app_label, model.lower() return model._meta.app_label, model._meta.model_name
eb74d9d243be69405249301843e43f5bf94d2c458e5ce27e88e19a234504907e
import copy import inspect import warnings from functools import partialmethod from itertools import chain import django from django.apps import apps from django.conf import settings from django.core import checks from django.core.exceptions import ( NON_FIELD_ERRORS, FieldDoesNotExist, FieldError, MultipleObjectsReturned, ObjectDoesNotExist, ValidationError, ) from django.db import ( DEFAULT_DB_ALIAS, DJANGO_VERSION_PICKLE_KEY, DatabaseError, connection, connections, router, transaction, ) from django.db.models import ( NOT_PROVIDED, ExpressionWrapper, IntegerField, Max, Value, ) from django.db.models.constants import LOOKUP_SEP from django.db.models.constraints import CheckConstraint, UniqueConstraint from django.db.models.deletion import CASCADE, Collector from django.db.models.fields.related import ( ForeignObjectRel, OneToOneField, lazy_related_operation, resolve_relation, ) from django.db.models.functions import Coalesce from django.db.models.manager import Manager from django.db.models.options import Options from django.db.models.query import F, Q from django.db.models.signals import ( class_prepared, post_init, post_save, pre_init, pre_save, ) from django.db.models.utils import make_model_tuple from django.utils.encoding import force_str from django.utils.hashable import make_hashable from django.utils.text import capfirst, get_text_list from django.utils.translation import gettext_lazy as _ class Deferred: def __repr__(self): return '<Deferred field>' def __str__(self): return '<Deferred field>' DEFERRED = Deferred() def subclass_exception(name, bases, module, attached_to): """ Create exception subclass. Used by ModelBase below. The exception is created in a way that allows it to be pickled, assuming that the returned exception class will be added as an attribute to the 'attached_to' class. """ return type(name, bases, { '__module__': module, '__qualname__': '%s.%s' % (attached_to.__qualname__, name), }) def _has_contribute_to_class(value): # Only call contribute_to_class() if it's bound. return not inspect.isclass(value) and hasattr(value, 'contribute_to_class') class ModelBase(type): """Metaclass for all models.""" def __new__(cls, name, bases, attrs, **kwargs): super_new = super().__new__ # Also ensure initialization is only performed for subclasses of Model # (excluding Model class itself). parents = [b for b in bases if isinstance(b, ModelBase)] if not parents: return super_new(cls, name, bases, attrs) # Create the class. module = attrs.pop('__module__') new_attrs = {'__module__': module} classcell = attrs.pop('__classcell__', None) if classcell is not None: new_attrs['__classcell__'] = classcell attr_meta = attrs.pop('Meta', None) # Pass all attrs without a (Django-specific) contribute_to_class() # method to type.__new__() so that they're properly initialized # (i.e. __set_name__()). contributable_attrs = {} for obj_name, obj in attrs.items(): if _has_contribute_to_class(obj): contributable_attrs[obj_name] = obj else: new_attrs[obj_name] = obj new_class = super_new(cls, name, bases, new_attrs, **kwargs) abstract = getattr(attr_meta, 'abstract', False) meta = attr_meta or getattr(new_class, 'Meta', None) base_meta = getattr(new_class, '_meta', None) app_label = None # Look for an application configuration to attach the model to. app_config = apps.get_containing_app_config(module) if getattr(meta, 'app_label', None) is None: if app_config is None: if not abstract: raise RuntimeError( "Model class %s.%s doesn't declare an explicit " "app_label and isn't in an application in " "INSTALLED_APPS." % (module, name) ) else: app_label = app_config.label new_class.add_to_class('_meta', Options(meta, app_label)) if not abstract: new_class.add_to_class( 'DoesNotExist', subclass_exception( 'DoesNotExist', tuple( x.DoesNotExist for x in parents if hasattr(x, '_meta') and not x._meta.abstract ) or (ObjectDoesNotExist,), module, attached_to=new_class)) new_class.add_to_class( 'MultipleObjectsReturned', subclass_exception( 'MultipleObjectsReturned', tuple( x.MultipleObjectsReturned for x in parents if hasattr(x, '_meta') and not x._meta.abstract ) or (MultipleObjectsReturned,), module, attached_to=new_class)) if base_meta and not base_meta.abstract: # Non-abstract child classes inherit some attributes from their # non-abstract parent (unless an ABC comes before it in the # method resolution order). if not hasattr(meta, 'ordering'): new_class._meta.ordering = base_meta.ordering if not hasattr(meta, 'get_latest_by'): new_class._meta.get_latest_by = base_meta.get_latest_by is_proxy = new_class._meta.proxy # If the model is a proxy, ensure that the base class # hasn't been swapped out. if is_proxy and base_meta and base_meta.swapped: raise TypeError("%s cannot proxy the swapped model '%s'." % (name, base_meta.swapped)) # Add remaining attributes (those with a contribute_to_class() method) # to the class. for obj_name, obj in contributable_attrs.items(): new_class.add_to_class(obj_name, obj) # All the fields of any type declared on this model new_fields = chain( new_class._meta.local_fields, new_class._meta.local_many_to_many, new_class._meta.private_fields ) field_names = {f.name for f in new_fields} # Basic setup for proxy models. if is_proxy: base = None for parent in [kls for kls in parents if hasattr(kls, '_meta')]: if parent._meta.abstract: if parent._meta.fields: raise TypeError( "Abstract base class containing model fields not " "permitted for proxy model '%s'." % name ) else: continue if base is None: base = parent elif parent._meta.concrete_model is not base._meta.concrete_model: raise TypeError("Proxy model '%s' has more than one non-abstract model base class." % name) if base is None: raise TypeError("Proxy model '%s' has no non-abstract model base class." % name) new_class._meta.setup_proxy(base) new_class._meta.concrete_model = base._meta.concrete_model else: new_class._meta.concrete_model = new_class # Collect the parent links for multi-table inheritance. parent_links = {} for base in reversed([new_class] + parents): # Conceptually equivalent to `if base is Model`. if not hasattr(base, '_meta'): continue # Skip concrete parent classes. if base != new_class and not base._meta.abstract: continue # Locate OneToOneField instances. for field in base._meta.local_fields: if isinstance(field, OneToOneField) and field.remote_field.parent_link: related = resolve_relation(new_class, field.remote_field.model) parent_links[make_model_tuple(related)] = field # Track fields inherited from base models. inherited_attributes = set() # Do the appropriate setup for any model parents. for base in new_class.mro(): if base not in parents or not hasattr(base, '_meta'): # Things without _meta aren't functional models, so they're # uninteresting parents. inherited_attributes.update(base.__dict__) continue parent_fields = base._meta.local_fields + base._meta.local_many_to_many if not base._meta.abstract: # Check for clashes between locally declared fields and those # on the base classes. for field in parent_fields: if field.name in field_names: raise FieldError( 'Local field %r in class %r clashes with field of ' 'the same name from base class %r.' % ( field.name, name, base.__name__, ) ) else: inherited_attributes.add(field.name) # Concrete classes... base = base._meta.concrete_model base_key = make_model_tuple(base) if base_key in parent_links: field = parent_links[base_key] elif not is_proxy: attr_name = '%s_ptr' % base._meta.model_name field = OneToOneField( base, on_delete=CASCADE, name=attr_name, auto_created=True, parent_link=True, ) if attr_name in field_names: raise FieldError( "Auto-generated field '%s' in class %r for " "parent_link to base class %r clashes with " "declared field of the same name." % ( attr_name, name, base.__name__, ) ) # Only add the ptr field if it's not already present; # e.g. migrations will already have it specified if not hasattr(new_class, attr_name): new_class.add_to_class(attr_name, field) else: field = None new_class._meta.parents[base] = field else: base_parents = base._meta.parents.copy() # Add fields from abstract base class if it wasn't overridden. for field in parent_fields: if (field.name not in field_names and field.name not in new_class.__dict__ and field.name not in inherited_attributes): new_field = copy.deepcopy(field) new_class.add_to_class(field.name, new_field) # Replace parent links defined on this base by the new # field. It will be appropriately resolved if required. if field.one_to_one: for parent, parent_link in base_parents.items(): if field == parent_link: base_parents[parent] = new_field # Pass any non-abstract parent classes onto child. new_class._meta.parents.update(base_parents) # Inherit private fields (like GenericForeignKey) from the parent # class for field in base._meta.private_fields: if field.name in field_names: if not base._meta.abstract: raise FieldError( 'Local field %r in class %r clashes with field of ' 'the same name from base class %r.' % ( field.name, name, base.__name__, ) ) else: field = copy.deepcopy(field) if not base._meta.abstract: field.mti_inherited = True new_class.add_to_class(field.name, field) # Copy indexes so that index names are unique when models extend an # abstract model. new_class._meta.indexes = [copy.deepcopy(idx) for idx in new_class._meta.indexes] if abstract: # Abstract base models can't be instantiated and don't appear in # the list of models for an app. We do the final setup for them a # little differently from normal models. attr_meta.abstract = False new_class.Meta = attr_meta return new_class new_class._prepare() new_class._meta.apps.register_model(new_class._meta.app_label, new_class) return new_class def add_to_class(cls, name, value): if _has_contribute_to_class(value): value.contribute_to_class(cls, name) else: setattr(cls, name, value) def _prepare(cls): """Create some methods once self._meta has been populated.""" opts = cls._meta opts._prepare(cls) if opts.order_with_respect_to: cls.get_next_in_order = partialmethod(cls._get_next_or_previous_in_order, is_next=True) cls.get_previous_in_order = partialmethod(cls._get_next_or_previous_in_order, is_next=False) # Defer creating accessors on the foreign class until it has been # created and registered. If remote_field is None, we're ordering # with respect to a GenericForeignKey and don't know what the # foreign class is - we'll add those accessors later in # contribute_to_class(). if opts.order_with_respect_to.remote_field: wrt = opts.order_with_respect_to remote = wrt.remote_field.model lazy_related_operation(make_foreign_order_accessors, cls, remote) # Give the class a docstring -- its definition. if cls.__doc__ is None: cls.__doc__ = "%s(%s)" % (cls.__name__, ", ".join(f.name for f in opts.fields)) get_absolute_url_override = settings.ABSOLUTE_URL_OVERRIDES.get(opts.label_lower) if get_absolute_url_override: setattr(cls, 'get_absolute_url', get_absolute_url_override) if not opts.managers: if any(f.name == 'objects' for f in opts.fields): raise ValueError( "Model %s must specify a custom Manager, because it has a " "field named 'objects'." % cls.__name__ ) manager = Manager() manager.auto_created = True cls.add_to_class('objects', manager) # Set the name of _meta.indexes. This can't be done in # Options.contribute_to_class() because fields haven't been added to # the model at that point. for index in cls._meta.indexes: if not index.name: index.set_name_with_model(cls) class_prepared.send(sender=cls) @property def _base_manager(cls): return cls._meta.base_manager @property def _default_manager(cls): return cls._meta.default_manager class ModelStateFieldsCacheDescriptor: def __get__(self, instance, cls=None): if instance is None: return self res = instance.fields_cache = {} return res class ModelState: """Store model instance state.""" db = None # If true, uniqueness validation checks will consider this a new, unsaved # object. Necessary for correct validation of new instances of objects with # explicit (non-auto) PKs. This impacts validation only; it has no effect # on the actual save. adding = True fields_cache = ModelStateFieldsCacheDescriptor() class Model(metaclass=ModelBase): def __init__(self, *args, **kwargs): # Alias some things as locals to avoid repeat global lookups cls = self.__class__ opts = self._meta _setattr = setattr _DEFERRED = DEFERRED if opts.abstract: raise TypeError('Abstract models cannot be instantiated.') pre_init.send(sender=cls, args=args, kwargs=kwargs) # Set up the storage for instance state self._state = ModelState() # There is a rather weird disparity here; if kwargs, it's set, then args # overrides it. It should be one or the other; don't duplicate the work # The reason for the kwargs check is that standard iterator passes in by # args, and instantiation for iteration is 33% faster. if len(args) > len(opts.concrete_fields): # Daft, but matches old exception sans the err msg. raise IndexError("Number of args exceeds number of fields") if not kwargs: fields_iter = iter(opts.concrete_fields) # The ordering of the zip calls matter - zip throws StopIteration # when an iter throws it. So if the first iter throws it, the second # is *not* consumed. We rely on this, so don't change the order # without changing the logic. for val, field in zip(args, fields_iter): if val is _DEFERRED: continue _setattr(self, field.attname, val) else: # Slower, kwargs-ready version. fields_iter = iter(opts.fields) for val, field in zip(args, fields_iter): if val is _DEFERRED: continue _setattr(self, field.attname, val) if kwargs.pop(field.name, NOT_PROVIDED) is not NOT_PROVIDED: raise TypeError( f"{cls.__qualname__}() got both positional and " f"keyword arguments for field '{field.name}'." ) # Now we're left with the unprocessed fields that *must* come from # keywords, or default. for field in fields_iter: is_related_object = False # Virtual field if field.attname not in kwargs and field.column is None: continue if kwargs: if isinstance(field.remote_field, ForeignObjectRel): try: # Assume object instance was passed in. rel_obj = kwargs.pop(field.name) is_related_object = True except KeyError: try: # Object instance wasn't passed in -- must be an ID. val = kwargs.pop(field.attname) except KeyError: val = field.get_default() else: try: val = kwargs.pop(field.attname) except KeyError: # This is done with an exception rather than the # default argument on pop because we don't want # get_default() to be evaluated, and then not used. # Refs #12057. val = field.get_default() else: val = field.get_default() if is_related_object: # If we are passed a related instance, set it using the # field.name instead of field.attname (e.g. "user" instead of # "user_id") so that the object gets properly cached (and type # checked) by the RelatedObjectDescriptor. if rel_obj is not _DEFERRED: _setattr(self, field.name, rel_obj) else: if val is not _DEFERRED: _setattr(self, field.attname, val) if kwargs: property_names = opts._property_names for prop in tuple(kwargs): try: # Any remaining kwargs must correspond to properties or # virtual fields. if prop in property_names or opts.get_field(prop): if kwargs[prop] is not _DEFERRED: _setattr(self, prop, kwargs[prop]) del kwargs[prop] except (AttributeError, FieldDoesNotExist): pass for kwarg in kwargs: raise TypeError("%s() got an unexpected keyword argument '%s'" % (cls.__name__, kwarg)) super().__init__() post_init.send(sender=cls, instance=self) @classmethod def from_db(cls, db, field_names, values): if len(values) != len(cls._meta.concrete_fields): values_iter = iter(values) values = [ next(values_iter) if f.attname in field_names else DEFERRED for f in cls._meta.concrete_fields ] new = cls(*values) new._state.adding = False new._state.db = db return new def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self) def __str__(self): return '%s object (%s)' % (self.__class__.__name__, self.pk) def __eq__(self, other): if not isinstance(other, Model): return NotImplemented if self._meta.concrete_model != other._meta.concrete_model: return False my_pk = self.pk if my_pk is None: return self is other return my_pk == other.pk def __hash__(self): if self.pk is None: raise TypeError("Model instances without primary key value are unhashable") return hash(self.pk) def __reduce__(self): data = self.__getstate__() data[DJANGO_VERSION_PICKLE_KEY] = django.__version__ class_id = self._meta.app_label, self._meta.object_name return model_unpickle, (class_id,), data def __getstate__(self): """Hook to allow choosing the attributes to pickle.""" state = self.__dict__.copy() state['_state'] = copy.copy(state['_state']) state['_state'].fields_cache = state['_state'].fields_cache.copy() return state def __setstate__(self, state): pickled_version = state.get(DJANGO_VERSION_PICKLE_KEY) if pickled_version: if pickled_version != django.__version__: warnings.warn( "Pickled model instance's Django version %s does not " "match the current version %s." % (pickled_version, django.__version__), RuntimeWarning, stacklevel=2, ) else: warnings.warn( "Pickled model instance's Django version is not specified.", RuntimeWarning, stacklevel=2, ) self.__dict__.update(state) def _get_pk_val(self, meta=None): meta = meta or self._meta return getattr(self, meta.pk.attname) def _set_pk_val(self, value): for parent_link in self._meta.parents.values(): if parent_link and parent_link != self._meta.pk: setattr(self, parent_link.target_field.attname, value) return setattr(self, self._meta.pk.attname, value) pk = property(_get_pk_val, _set_pk_val) def get_deferred_fields(self): """ Return a set containing names of deferred fields for this instance. """ return { f.attname for f in self._meta.concrete_fields if f.attname not in self.__dict__ } def refresh_from_db(self, using=None, fields=None): """ Reload field values from the database. By default, the reloading happens from the database this instance was loaded from, or by the read router if this instance wasn't loaded from any database. The using parameter will override the default. Fields can be used to specify which fields to reload. The fields should be an iterable of field attnames. If fields is None, then all non-deferred fields are reloaded. When accessing deferred fields of an instance, the deferred loading of the field will call this method. """ if fields is None: self._prefetched_objects_cache = {} else: prefetched_objects_cache = getattr(self, '_prefetched_objects_cache', ()) for field in fields: if field in prefetched_objects_cache: del prefetched_objects_cache[field] fields.remove(field) if not fields: return if any(LOOKUP_SEP in f for f in fields): raise ValueError( 'Found "%s" in fields argument. Relations and transforms ' 'are not allowed in fields.' % LOOKUP_SEP) hints = {'instance': self} db_instance_qs = self.__class__._base_manager.db_manager(using, hints=hints).filter(pk=self.pk) # Use provided fields, if not set then reload all non-deferred fields. deferred_fields = self.get_deferred_fields() if fields is not None: fields = list(fields) db_instance_qs = db_instance_qs.only(*fields) elif deferred_fields: fields = [f.attname for f in self._meta.concrete_fields if f.attname not in deferred_fields] db_instance_qs = db_instance_qs.only(*fields) db_instance = db_instance_qs.get() non_loaded_fields = db_instance.get_deferred_fields() for field in self._meta.concrete_fields: if field.attname in non_loaded_fields: # This field wasn't refreshed - skip ahead. continue setattr(self, field.attname, getattr(db_instance, field.attname)) # Clear cached foreign keys. if field.is_relation and field.is_cached(self): field.delete_cached_value(self) # Clear cached relations. for field in self._meta.related_objects: if field.is_cached(self): field.delete_cached_value(self) self._state.db = db_instance._state.db def serializable_value(self, field_name): """ Return the value of the field name for this instance. If the field is a foreign key, return the id value instead of the object. If there's no Field object with this name on the model, return the model attribute's value. Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method. """ try: field = self._meta.get_field(field_name) except FieldDoesNotExist: return getattr(self, field_name) return getattr(self, field.attname) def save(self, force_insert=False, force_update=False, using=None, update_fields=None): """ Save the current instance. Override this in a subclass if you want to control the saving process. The 'force_insert' and 'force_update' parameters can be used to insist that the "save" must be an SQL insert or update (or equivalent for non-SQL backends), respectively. Normally, they should not be set. """ self._prepare_related_fields_for_save(operation_name='save') using = using or router.db_for_write(self.__class__, instance=self) if force_insert and (force_update or update_fields): raise ValueError("Cannot force both insert and updating in model saving.") deferred_fields = self.get_deferred_fields() if update_fields is not None: # If update_fields is empty, skip the save. We do also check for # no-op saves later on for inheritance cases. This bailout is # still needed for skipping signal sending. if not update_fields: return update_fields = frozenset(update_fields) field_names = set() for field in self._meta.concrete_fields: if not field.primary_key: field_names.add(field.name) if field.name != field.attname: field_names.add(field.attname) non_model_fields = update_fields.difference(field_names) if non_model_fields: raise ValueError( 'The following fields do not exist in this model, are m2m ' 'fields, or are non-concrete fields: %s' % ', '.join(non_model_fields) ) # If saving to the same database, and this model is deferred, then # automatically do an "update_fields" save on the loaded fields. elif not force_insert and deferred_fields and using == self._state.db: field_names = set() for field in self._meta.concrete_fields: if not field.primary_key and not hasattr(field, 'through'): field_names.add(field.attname) loaded_fields = field_names.difference(deferred_fields) if loaded_fields: update_fields = frozenset(loaded_fields) self.save_base(using=using, force_insert=force_insert, force_update=force_update, update_fields=update_fields) save.alters_data = True def save_base(self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None): """ Handle the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending. The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading. """ using = using or router.db_for_write(self.__class__, instance=self) assert not (force_insert and (force_update or update_fields)) assert update_fields is None or update_fields cls = origin = self.__class__ # Skip proxies, but keep the origin as the proxy model. if cls._meta.proxy: cls = cls._meta.concrete_model meta = cls._meta if not meta.auto_created: pre_save.send( sender=origin, instance=self, raw=raw, using=using, update_fields=update_fields, ) # A transaction isn't needed if one query is issued. if meta.parents: context_manager = transaction.atomic(using=using, savepoint=False) else: context_manager = transaction.mark_for_rollback_on_error(using=using) with context_manager: parent_inserted = False if not raw: parent_inserted = self._save_parents(cls, using, update_fields) updated = self._save_table( raw, cls, force_insert or parent_inserted, force_update, using, update_fields, ) # Store the database on which the object was saved self._state.db = using # Once saved, this is no longer a to-be-added instance. self._state.adding = False # Signal that the save is complete if not meta.auto_created: post_save.send( sender=origin, instance=self, created=(not updated), update_fields=update_fields, raw=raw, using=using, ) save_base.alters_data = True def _save_parents(self, cls, using, update_fields): """Save all the parents of cls using values from self.""" meta = cls._meta inserted = False for parent, field in meta.parents.items(): # Make sure the link fields are synced between parent and self. if (field and getattr(self, parent._meta.pk.attname) is None and getattr(self, field.attname) is not None): setattr(self, parent._meta.pk.attname, getattr(self, field.attname)) parent_inserted = self._save_parents(cls=parent, using=using, update_fields=update_fields) updated = self._save_table( cls=parent, using=using, update_fields=update_fields, force_insert=parent_inserted, ) if not updated: inserted = True # Set the parent's PK value to self. if field: setattr(self, field.attname, self._get_pk_val(parent._meta)) # Since we didn't have an instance of the parent handy set # attname directly, bypassing the descriptor. Invalidate # the related object cache, in case it's been accidentally # populated. A fresh instance will be re-built from the # database if necessary. if field.is_cached(self): field.delete_cached_value(self) return inserted def _save_table(self, raw=False, cls=None, force_insert=False, force_update=False, using=None, update_fields=None): """ Do the heavy-lifting involved in saving. Update or insert the data for a single table. """ meta = cls._meta non_pks = [f for f in meta.local_concrete_fields if not f.primary_key] if update_fields: non_pks = [f for f in non_pks if f.name in update_fields or f.attname in update_fields] pk_val = self._get_pk_val(meta) if pk_val is None: pk_val = meta.pk.get_pk_value_on_save(self) setattr(self, meta.pk.attname, pk_val) pk_set = pk_val is not None if not pk_set and (force_update or update_fields): raise ValueError("Cannot force an update in save() with no primary key.") updated = False # Skip an UPDATE when adding an instance and primary key has a default. if ( not raw and not force_insert and self._state.adding and meta.pk.default and meta.pk.default is not NOT_PROVIDED ): force_insert = True # If possible, try an UPDATE. If that doesn't update anything, do an INSERT. if pk_set and not force_insert: base_qs = cls._base_manager.using(using) values = [(f, None, (getattr(self, f.attname) if raw else f.pre_save(self, False))) for f in non_pks] forced_update = update_fields or force_update updated = self._do_update(base_qs, using, pk_val, values, update_fields, forced_update) if force_update and not updated: raise DatabaseError("Forced update did not affect any rows.") if update_fields and not updated: raise DatabaseError("Save with update_fields did not affect any rows.") if not updated: if meta.order_with_respect_to: # If this is a model with an order_with_respect_to # autopopulate the _order field field = meta.order_with_respect_to filter_args = field.get_filter_kwargs_for_object(self) self._order = cls._base_manager.using(using).filter(**filter_args).aggregate( _order__max=Coalesce( ExpressionWrapper(Max('_order') + Value(1), output_field=IntegerField()), Value(0), ), )['_order__max'] fields = meta.local_concrete_fields if not pk_set: fields = [f for f in fields if f is not meta.auto_field] returning_fields = meta.db_returning_fields results = self._do_insert(cls._base_manager, using, fields, returning_fields, raw) if results: for value, field in zip(results[0], returning_fields): setattr(self, field.attname, value) return updated def _do_update(self, base_qs, using, pk_val, values, update_fields, forced_update): """ Try to update the model. Return True if the model was updated (if an update query was done and a matching row was found in the DB). """ filtered = base_qs.filter(pk=pk_val) if not values: # We can end up here when saving a model in inheritance chain where # update_fields doesn't target any field in current model. In that # case we just say the update succeeded. Another case ending up here # is a model with just PK - in that case check that the PK still # exists. return update_fields is not None or filtered.exists() if self._meta.select_on_save and not forced_update: return ( filtered.exists() and # It may happen that the object is deleted from the DB right after # this check, causing the subsequent UPDATE to return zero matching # rows. The same result can occur in some rare cases when the # database returns zero despite the UPDATE being executed # successfully (a row is matched and updated). In order to # distinguish these two cases, the object's existence in the # database is again checked for if the UPDATE query returns 0. (filtered._update(values) > 0 or filtered.exists()) ) return filtered._update(values) > 0 def _do_insert(self, manager, using, fields, returning_fields, raw): """ Do an INSERT. If returning_fields is defined then this method should return the newly created data for the model. """ return manager._insert( [self], fields=fields, returning_fields=returning_fields, using=using, raw=raw, ) def _prepare_related_fields_for_save(self, operation_name): # Ensure that a model instance without a PK hasn't been assigned to # a ForeignKey or OneToOneField on this model. If the field is # nullable, allowing the save would result in silent data loss. for field in self._meta.concrete_fields: # If the related field isn't cached, then an instance hasn't been # assigned and there's no need to worry about this check. if field.is_relation and field.is_cached(self): obj = getattr(self, field.name, None) if not obj: continue # A pk may have been assigned manually to a model instance not # saved to the database (or auto-generated in a case like # UUIDField), but we allow the save to proceed and rely on the # database to raise an IntegrityError if applicable. If # constraints aren't supported by the database, there's the # unavoidable risk of data corruption. if obj.pk is None: # Remove the object from a related instance cache. if not field.remote_field.multiple: field.remote_field.delete_cached_value(obj) raise ValueError( "%s() prohibited to prevent data loss due to unsaved " "related object '%s'." % (operation_name, field.name) ) elif getattr(self, field.attname) in field.empty_values: # Use pk from related object if it has been saved after # an assignment. setattr(self, field.attname, obj.pk) # If the relationship's pk/to_field was changed, clear the # cached relationship. if getattr(obj, field.target_field.attname) != getattr(self, field.attname): field.delete_cached_value(self) def delete(self, using=None, keep_parents=False): using = using or router.db_for_write(self.__class__, instance=self) assert self.pk is not None, ( "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname) ) collector = Collector(using=using) collector.collect([self], keep_parents=keep_parents) return collector.delete() delete.alters_data = True def _get_FIELD_display(self, field): value = getattr(self, field.attname) choices_dict = dict(make_hashable(field.flatchoices)) # force_str() to coerce lazy strings. return force_str(choices_dict.get(make_hashable(value), value), strings_only=True) def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs): if not self.pk: raise ValueError("get_next/get_previous cannot be used on unsaved objects.") op = 'gt' if is_next else 'lt' order = '' if is_next else '-' param = getattr(self, field.attname) q = Q(**{'%s__%s' % (field.name, op): param}) q = q | Q(**{field.name: param, 'pk__%s' % op: self.pk}) qs = self.__class__._default_manager.using(self._state.db).filter(**kwargs).filter(q).order_by( '%s%s' % (order, field.name), '%spk' % order ) try: return qs[0] except IndexError: raise self.DoesNotExist("%s matching query does not exist." % self.__class__._meta.object_name) def _get_next_or_previous_in_order(self, is_next): cachename = "__%s_order_cache" % is_next if not hasattr(self, cachename): op = 'gt' if is_next else 'lt' order = '_order' if is_next else '-_order' order_field = self._meta.order_with_respect_to filter_args = order_field.get_filter_kwargs_for_object(self) obj = self.__class__._default_manager.filter(**filter_args).filter(**{ '_order__%s' % op: self.__class__._default_manager.values('_order').filter(**{ self._meta.pk.name: self.pk }) }).order_by(order)[:1].get() setattr(self, cachename, obj) return getattr(self, cachename) def prepare_database_save(self, field): if self.pk is None: raise ValueError("Unsaved model instance %r cannot be used in an ORM query." % self) return getattr(self, field.remote_field.get_related_field().attname) def clean(self): """ Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS. """ pass def validate_unique(self, exclude=None): """ Check unique constraints on the model and raise ValidationError if any failed. """ unique_checks, date_checks = self._get_unique_checks(exclude=exclude) errors = self._perform_unique_checks(unique_checks) date_errors = self._perform_date_checks(date_checks) for k, v in date_errors.items(): errors.setdefault(k, []).extend(v) if errors: raise ValidationError(errors) def _get_unique_checks(self, exclude=None): """ Return a list of checks to perform. Since validate_unique() could be called from a ModelForm, some fields may have been excluded; we can't perform a unique check on a model that is missing fields involved in that check. Fields that did not validate should also be excluded, but they need to be passed in via the exclude argument. """ if exclude is None: exclude = [] unique_checks = [] unique_togethers = [(self.__class__, self._meta.unique_together)] constraints = [(self.__class__, self._meta.total_unique_constraints)] for parent_class in self._meta.get_parent_list(): if parent_class._meta.unique_together: unique_togethers.append((parent_class, parent_class._meta.unique_together)) if parent_class._meta.total_unique_constraints: constraints.append( (parent_class, parent_class._meta.total_unique_constraints) ) for model_class, unique_together in unique_togethers: for check in unique_together: if not any(name in exclude for name in check): # Add the check if the field isn't excluded. unique_checks.append((model_class, tuple(check))) for model_class, model_constraints in constraints: for constraint in model_constraints: if not any(name in exclude for name in constraint.fields): unique_checks.append((model_class, constraint.fields)) # These are checks for the unique_for_<date/year/month>. date_checks = [] # Gather a list of checks for fields declared as unique and add them to # the list of checks. fields_with_class = [(self.__class__, self._meta.local_fields)] for parent_class in self._meta.get_parent_list(): fields_with_class.append((parent_class, parent_class._meta.local_fields)) for model_class, fields in fields_with_class: for f in fields: name = f.name if name in exclude: continue if f.unique: unique_checks.append((model_class, (name,))) if f.unique_for_date and f.unique_for_date not in exclude: date_checks.append((model_class, 'date', name, f.unique_for_date)) if f.unique_for_year and f.unique_for_year not in exclude: date_checks.append((model_class, 'year', name, f.unique_for_year)) if f.unique_for_month and f.unique_for_month not in exclude: date_checks.append((model_class, 'month', name, f.unique_for_month)) return unique_checks, date_checks def _perform_unique_checks(self, unique_checks): errors = {} for model_class, unique_check in unique_checks: # Try to look up an existing object with the same values as this # object's values for all the unique field. lookup_kwargs = {} for field_name in unique_check: f = self._meta.get_field(field_name) lookup_value = getattr(self, f.attname) # TODO: Handle multiple backends with different feature flags. if (lookup_value is None or (lookup_value == '' and connection.features.interprets_empty_strings_as_nulls)): # no value, skip the lookup continue if f.primary_key and not self._state.adding: # no need to check for unique primary key when editing continue lookup_kwargs[str(field_name)] = lookup_value # some fields were skipped, no reason to do the check if len(unique_check) != len(lookup_kwargs): continue qs = model_class._default_manager.filter(**lookup_kwargs) # Exclude the current object from the query if we are editing an # instance (as opposed to creating a new one) # Note that we need to use the pk as defined by model_class, not # self.pk. These can be different fields because model inheritance # allows single model to have effectively multiple primary keys. # Refs #17615. model_class_pk = self._get_pk_val(model_class._meta) if not self._state.adding and model_class_pk is not None: qs = qs.exclude(pk=model_class_pk) if qs.exists(): if len(unique_check) == 1: key = unique_check[0] else: key = NON_FIELD_ERRORS errors.setdefault(key, []).append(self.unique_error_message(model_class, unique_check)) return errors def _perform_date_checks(self, date_checks): errors = {} for model_class, lookup_type, field, unique_for in date_checks: lookup_kwargs = {} # there's a ticket to add a date lookup, we can remove this special # case if that makes it's way in date = getattr(self, unique_for) if date is None: continue if lookup_type == 'date': lookup_kwargs['%s__day' % unique_for] = date.day lookup_kwargs['%s__month' % unique_for] = date.month lookup_kwargs['%s__year' % unique_for] = date.year else: lookup_kwargs['%s__%s' % (unique_for, lookup_type)] = getattr(date, lookup_type) lookup_kwargs[field] = getattr(self, field) qs = model_class._default_manager.filter(**lookup_kwargs) # Exclude the current object from the query if we are editing an # instance (as opposed to creating a new one) if not self._state.adding and self.pk is not None: qs = qs.exclude(pk=self.pk) if qs.exists(): errors.setdefault(field, []).append( self.date_error_message(lookup_type, field, unique_for) ) return errors def date_error_message(self, lookup_type, field_name, unique_for): opts = self._meta field = opts.get_field(field_name) return ValidationError( message=field.error_messages['unique_for_date'], code='unique_for_date', params={ 'model': self, 'model_name': capfirst(opts.verbose_name), 'lookup_type': lookup_type, 'field': field_name, 'field_label': capfirst(field.verbose_name), 'date_field': unique_for, 'date_field_label': capfirst(opts.get_field(unique_for).verbose_name), } ) def unique_error_message(self, model_class, unique_check): opts = model_class._meta params = { 'model': self, 'model_class': model_class, 'model_name': capfirst(opts.verbose_name), 'unique_check': unique_check, } # A unique field if len(unique_check) == 1: field = opts.get_field(unique_check[0]) params['field_label'] = capfirst(field.verbose_name) return ValidationError( message=field.error_messages['unique'], code='unique', params=params, ) # unique_together else: field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check] params['field_labels'] = get_text_list(field_labels, _('and')) return ValidationError( message=_("%(model_name)s with this %(field_labels)s already exists."), code='unique_together', params=params, ) def full_clean(self, exclude=None, validate_unique=True): """ Call clean_fields(), clean(), and validate_unique() on the model. Raise a ValidationError for any errors that occur. """ errors = {} if exclude is None: exclude = [] else: exclude = list(exclude) try: self.clean_fields(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) # Form.clean() is run even if other validation fails, so do the # same with Model.clean() for consistency. try: self.clean() except ValidationError as e: errors = e.update_error_dict(errors) # Run unique checks, but only for fields that passed validation. if validate_unique: for name in errors: if name != NON_FIELD_ERRORS and name not in exclude: exclude.append(name) try: self.validate_unique(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) if errors: raise ValidationError(errors) def clean_fields(self, exclude=None): """ Clean all fields and raise a ValidationError containing a dict of all validation errors if any occur. """ if exclude is None: exclude = [] errors = {} for f in self._meta.fields: if f.name in exclude: continue # Skip validation for empty fields with blank=True. The developer # is responsible for making sure they have a valid value. raw_value = getattr(self, f.attname) if f.blank and raw_value in f.empty_values: continue try: setattr(self, f.attname, f.clean(raw_value, self)) except ValidationError as e: errors[f.name] = e.error_list if errors: raise ValidationError(errors) @classmethod def check(cls, **kwargs): errors = [*cls._check_swappable(), *cls._check_model(), *cls._check_managers(**kwargs)] if not cls._meta.swapped: databases = kwargs.get('databases') or [] errors += [ *cls._check_fields(**kwargs), *cls._check_m2m_through_same_relationship(), *cls._check_long_column_names(databases), ] clash_errors = ( *cls._check_id_field(), *cls._check_field_name_clashes(), *cls._check_model_name_db_lookup_clashes(), *cls._check_property_name_related_field_accessor_clashes(), *cls._check_single_primary_key(), ) errors.extend(clash_errors) # If there are field name clashes, hide consequent column name # clashes. if not clash_errors: errors.extend(cls._check_column_name_clashes()) errors += [ *cls._check_index_together(), *cls._check_unique_together(), *cls._check_indexes(databases), *cls._check_ordering(), *cls._check_constraints(databases), *cls._check_default_pk(), ] return errors @classmethod def _check_default_pk(cls): if ( not cls._meta.abstract and cls._meta.pk.auto_created and # Inherited PKs are checked in parents models. not ( isinstance(cls._meta.pk, OneToOneField) and cls._meta.pk.remote_field.parent_link ) and not settings.is_overridden('DEFAULT_AUTO_FIELD') and not cls._meta.app_config._is_default_auto_field_overridden ): return [ checks.Warning( f"Auto-created primary key used when not defining a " f"primary key type, by default " f"'{settings.DEFAULT_AUTO_FIELD}'.", hint=( f"Configure the DEFAULT_AUTO_FIELD setting or the " f"{cls._meta.app_config.__class__.__qualname__}." f"default_auto_field attribute to point to a subclass " f"of AutoField, e.g. 'django.db.models.BigAutoField'." ), obj=cls, id='models.W042', ), ] return [] @classmethod def _check_swappable(cls): """Check if the swapped model exists.""" errors = [] if cls._meta.swapped: try: apps.get_model(cls._meta.swapped) except ValueError: errors.append( checks.Error( "'%s' is not of the form 'app_label.app_name'." % cls._meta.swappable, id='models.E001', ) ) except LookupError: app_label, model_name = cls._meta.swapped.split('.') errors.append( checks.Error( "'%s' references '%s.%s', which has not been " "installed, or is abstract." % ( cls._meta.swappable, app_label, model_name ), id='models.E002', ) ) return errors @classmethod def _check_model(cls): errors = [] if cls._meta.proxy: if cls._meta.local_fields or cls._meta.local_many_to_many: errors.append( checks.Error( "Proxy model '%s' contains model fields." % cls.__name__, id='models.E017', ) ) return errors @classmethod def _check_managers(cls, **kwargs): """Perform all manager checks.""" errors = [] for manager in cls._meta.managers: errors.extend(manager.check(**kwargs)) return errors @classmethod def _check_fields(cls, **kwargs): """Perform all field checks.""" errors = [] for field in cls._meta.local_fields: errors.extend(field.check(**kwargs)) for field in cls._meta.local_many_to_many: errors.extend(field.check(from_model=cls, **kwargs)) return errors @classmethod def _check_m2m_through_same_relationship(cls): """ Check if no relationship model is used by more than one m2m field. """ errors = [] seen_intermediary_signatures = [] fields = cls._meta.local_many_to_many # Skip when the target model wasn't found. fields = (f for f in fields if isinstance(f.remote_field.model, ModelBase)) # Skip when the relationship model wasn't found. fields = (f for f in fields if isinstance(f.remote_field.through, ModelBase)) for f in fields: signature = (f.remote_field.model, cls, f.remote_field.through, f.remote_field.through_fields) if signature in seen_intermediary_signatures: errors.append( checks.Error( "The model has two identical many-to-many relations " "through the intermediate model '%s'." % f.remote_field.through._meta.label, obj=cls, id='models.E003', ) ) else: seen_intermediary_signatures.append(signature) return errors @classmethod def _check_id_field(cls): """Check if `id` field is a primary key.""" fields = [f for f in cls._meta.local_fields if f.name == 'id' and f != cls._meta.pk] # fields is empty or consists of the invalid "id" field if fields and not fields[0].primary_key and cls._meta.pk.name == 'id': return [ checks.Error( "'id' can only be used as a field name if the field also " "sets 'primary_key=True'.", obj=cls, id='models.E004', ) ] else: return [] @classmethod def _check_field_name_clashes(cls): """Forbid field shadowing in multi-table inheritance.""" errors = [] used_fields = {} # name or attname -> field # Check that multi-inheritance doesn't cause field name shadowing. for parent in cls._meta.get_parent_list(): for f in parent._meta.local_fields: clash = used_fields.get(f.name) or used_fields.get(f.attname) or None if clash: errors.append( checks.Error( "The field '%s' from parent model " "'%s' clashes with the field '%s' " "from parent model '%s'." % ( clash.name, clash.model._meta, f.name, f.model._meta ), obj=cls, id='models.E005', ) ) used_fields[f.name] = f used_fields[f.attname] = f # Check that fields defined in the model don't clash with fields from # parents, including auto-generated fields like multi-table inheritance # child accessors. for parent in cls._meta.get_parent_list(): for f in parent._meta.get_fields(): if f not in used_fields: used_fields[f.name] = f for f in cls._meta.local_fields: clash = used_fields.get(f.name) or used_fields.get(f.attname) or None # Note that we may detect clash between user-defined non-unique # field "id" and automatically added unique field "id", both # defined at the same model. This special case is considered in # _check_id_field and here we ignore it. id_conflict = f.name == "id" and clash and clash.name == "id" and clash.model == cls if clash and not id_conflict: errors.append( checks.Error( "The field '%s' clashes with the field '%s' " "from model '%s'." % ( f.name, clash.name, clash.model._meta ), obj=f, id='models.E006', ) ) used_fields[f.name] = f used_fields[f.attname] = f return errors @classmethod def _check_column_name_clashes(cls): # Store a list of column names which have already been used by other fields. used_column_names = [] errors = [] for f in cls._meta.local_fields: _, column_name = f.get_attname_column() # Ensure the column name is not already in use. if column_name and column_name in used_column_names: errors.append( checks.Error( "Field '%s' has column name '%s' that is used by " "another field." % (f.name, column_name), hint="Specify a 'db_column' for the field.", obj=cls, id='models.E007' ) ) else: used_column_names.append(column_name) return errors @classmethod def _check_model_name_db_lookup_clashes(cls): errors = [] model_name = cls.__name__ if model_name.startswith('_') or model_name.endswith('_'): errors.append( checks.Error( "The model name '%s' cannot start or end with an underscore " "as it collides with the query lookup syntax." % model_name, obj=cls, id='models.E023' ) ) elif LOOKUP_SEP in model_name: errors.append( checks.Error( "The model name '%s' cannot contain double underscores as " "it collides with the query lookup syntax." % model_name, obj=cls, id='models.E024' ) ) return errors @classmethod def _check_property_name_related_field_accessor_clashes(cls): errors = [] property_names = cls._meta._property_names related_field_accessors = ( f.get_attname() for f in cls._meta._get_fields(reverse=False) if f.is_relation and f.related_model is not None ) for accessor in related_field_accessors: if accessor in property_names: errors.append( checks.Error( "The property '%s' clashes with a related field " "accessor." % accessor, obj=cls, id='models.E025', ) ) return errors @classmethod def _check_single_primary_key(cls): errors = [] if sum(1 for f in cls._meta.local_fields if f.primary_key) > 1: errors.append( checks.Error( "The model cannot have more than one field with " "'primary_key=True'.", obj=cls, id='models.E026', ) ) return errors @classmethod def _check_index_together(cls): """Check the value of "index_together" option.""" if not isinstance(cls._meta.index_together, (tuple, list)): return [ checks.Error( "'index_together' must be a list or tuple.", obj=cls, id='models.E008', ) ] elif any(not isinstance(fields, (tuple, list)) for fields in cls._meta.index_together): return [ checks.Error( "All 'index_together' elements must be lists or tuples.", obj=cls, id='models.E009', ) ] else: errors = [] for fields in cls._meta.index_together: errors.extend(cls._check_local_fields(fields, "index_together")) return errors @classmethod def _check_unique_together(cls): """Check the value of "unique_together" option.""" if not isinstance(cls._meta.unique_together, (tuple, list)): return [ checks.Error( "'unique_together' must be a list or tuple.", obj=cls, id='models.E010', ) ] elif any(not isinstance(fields, (tuple, list)) for fields in cls._meta.unique_together): return [ checks.Error( "All 'unique_together' elements must be lists or tuples.", obj=cls, id='models.E011', ) ] else: errors = [] for fields in cls._meta.unique_together: errors.extend(cls._check_local_fields(fields, "unique_together")) return errors @classmethod def _check_indexes(cls, databases): """Check fields, names, and conditions of indexes.""" errors = [] references = set() for index in cls._meta.indexes: # Index name can't start with an underscore or a number, restricted # for cross-database compatibility with Oracle. if index.name[0] == '_' or index.name[0].isdigit(): errors.append( checks.Error( "The index name '%s' cannot start with an underscore " "or a number." % index.name, obj=cls, id='models.E033', ), ) if len(index.name) > index.max_name_length: errors.append( checks.Error( "The index name '%s' cannot be longer than %d " "characters." % (index.name, index.max_name_length), obj=cls, id='models.E034', ), ) if index.contains_expressions: for expression in index.expressions: references.update( ref[0] for ref in cls._get_expr_references(expression) ) for db in databases: if not router.allow_migrate_model(db, cls): continue connection = connections[db] if not ( connection.features.supports_partial_indexes or 'supports_partial_indexes' in cls._meta.required_db_features ) and any(index.condition is not None for index in cls._meta.indexes): errors.append( checks.Warning( '%s does not support indexes with conditions.' % connection.display_name, hint=( "Conditions will be ignored. Silence this warning " "if you don't care about it." ), obj=cls, id='models.W037', ) ) if not ( connection.features.supports_covering_indexes or 'supports_covering_indexes' in cls._meta.required_db_features ) and any(index.include for index in cls._meta.indexes): errors.append( checks.Warning( '%s does not support indexes with non-key columns.' % connection.display_name, hint=( "Non-key columns will be ignored. Silence this " "warning if you don't care about it." ), obj=cls, id='models.W040', ) ) if not ( connection.features.supports_expression_indexes or 'supports_expression_indexes' in cls._meta.required_db_features ) and any(index.contains_expressions for index in cls._meta.indexes): errors.append( checks.Warning( '%s does not support indexes on expressions.' % connection.display_name, hint=( "An index won't be created. Silence this warning " "if you don't care about it." ), obj=cls, id='models.W043', ) ) fields = [field for index in cls._meta.indexes for field, _ in index.fields_orders] fields += [include for index in cls._meta.indexes for include in index.include] fields += references errors.extend(cls._check_local_fields(fields, 'indexes')) return errors @classmethod def _check_local_fields(cls, fields, option): from django.db import models # In order to avoid hitting the relation tree prematurely, we use our # own fields_map instead of using get_field() forward_fields_map = {} for field in cls._meta._get_fields(reverse=False): forward_fields_map[field.name] = field if hasattr(field, 'attname'): forward_fields_map[field.attname] = field errors = [] for field_name in fields: try: field = forward_fields_map[field_name] except KeyError: errors.append( checks.Error( "'%s' refers to the nonexistent field '%s'." % ( option, field_name, ), obj=cls, id='models.E012', ) ) else: if isinstance(field.remote_field, models.ManyToManyRel): errors.append( checks.Error( "'%s' refers to a ManyToManyField '%s', but " "ManyToManyFields are not permitted in '%s'." % ( option, field_name, option, ), obj=cls, id='models.E013', ) ) elif field not in cls._meta.local_fields: errors.append( checks.Error( "'%s' refers to field '%s' which is not local to model '%s'." % (option, field_name, cls._meta.object_name), hint="This issue may be caused by multi-table inheritance.", obj=cls, id='models.E016', ) ) return errors @classmethod def _check_ordering(cls): """ Check "ordering" option -- is it a list of strings and do all fields exist? """ if cls._meta._ordering_clash: return [ checks.Error( "'ordering' and 'order_with_respect_to' cannot be used together.", obj=cls, id='models.E021', ), ] if cls._meta.order_with_respect_to or not cls._meta.ordering: return [] if not isinstance(cls._meta.ordering, (list, tuple)): return [ checks.Error( "'ordering' must be a tuple or list (even if you want to order by only one field).", obj=cls, id='models.E014', ) ] errors = [] fields = cls._meta.ordering # Skip expressions and '?' fields. fields = (f for f in fields if isinstance(f, str) and f != '?') # Convert "-field" to "field". fields = ((f[1:] if f.startswith('-') else f) for f in fields) # Separate related fields and non-related fields. _fields = [] related_fields = [] for f in fields: if LOOKUP_SEP in f: related_fields.append(f) else: _fields.append(f) fields = _fields # Check related fields. for field in related_fields: _cls = cls fld = None for part in field.split(LOOKUP_SEP): try: # pk is an alias that won't be found by opts.get_field. if part == 'pk': fld = _cls._meta.pk else: fld = _cls._meta.get_field(part) if fld.is_relation: _cls = fld.get_path_info()[-1].to_opts.model else: _cls = None except (FieldDoesNotExist, AttributeError): if fld is None or ( fld.get_transform(part) is None and fld.get_lookup(part) is None ): errors.append( checks.Error( "'ordering' refers to the nonexistent field, " "related field, or lookup '%s'." % field, obj=cls, id='models.E015', ) ) # Skip ordering on pk. This is always a valid order_by field # but is an alias and therefore won't be found by opts.get_field. fields = {f for f in fields if f != 'pk'} # Check for invalid or nonexistent fields in ordering. invalid_fields = [] # Any field name that is not present in field_names does not exist. # Also, ordering by m2m fields is not allowed. opts = cls._meta valid_fields = set(chain.from_iterable( (f.name, f.attname) if not (f.auto_created and not f.concrete) else (f.field.related_query_name(),) for f in chain(opts.fields, opts.related_objects) )) invalid_fields.extend(fields - valid_fields) for invalid_field in invalid_fields: errors.append( checks.Error( "'ordering' refers to the nonexistent field, related " "field, or lookup '%s'." % invalid_field, obj=cls, id='models.E015', ) ) return errors @classmethod def _check_long_column_names(cls, databases): """ Check that any auto-generated column names are shorter than the limits for each database in which the model will be created. """ if not databases: return [] errors = [] allowed_len = None db_alias = None # Find the minimum max allowed length among all specified db_aliases. for db in databases: # skip databases where the model won't be created if not router.allow_migrate_model(db, cls): continue connection = connections[db] max_name_length = connection.ops.max_name_length() if max_name_length is None or connection.features.truncates_names: continue else: if allowed_len is None: allowed_len = max_name_length db_alias = db elif max_name_length < allowed_len: allowed_len = max_name_length db_alias = db if allowed_len is None: return errors for f in cls._meta.local_fields: _, column_name = f.get_attname_column() # Check if auto-generated name for the field is too long # for the database. if f.db_column is None and column_name is not None and len(column_name) > allowed_len: errors.append( checks.Error( 'Autogenerated column name too long for field "%s". ' 'Maximum length is "%s" for database "%s".' % (column_name, allowed_len, db_alias), hint="Set the column name manually using 'db_column'.", obj=cls, id='models.E018', ) ) for f in cls._meta.local_many_to_many: # Skip nonexistent models. if isinstance(f.remote_field.through, str): continue # Check if auto-generated name for the M2M field is too long # for the database. for m2m in f.remote_field.through._meta.local_fields: _, rel_name = m2m.get_attname_column() if m2m.db_column is None and rel_name is not None and len(rel_name) > allowed_len: errors.append( checks.Error( 'Autogenerated column name too long for M2M field ' '"%s". Maximum length is "%s" for database "%s".' % (rel_name, allowed_len, db_alias), hint=( "Use 'through' to create a separate model for " "M2M and then set column_name using 'db_column'." ), obj=cls, id='models.E019', ) ) return errors @classmethod def _get_expr_references(cls, expr): if isinstance(expr, Q): for child in expr.children: if isinstance(child, tuple): lookup, value = child yield tuple(lookup.split(LOOKUP_SEP)) yield from cls._get_expr_references(value) else: yield from cls._get_expr_references(child) elif isinstance(expr, F): yield tuple(expr.name.split(LOOKUP_SEP)) elif hasattr(expr, 'get_source_expressions'): for src_expr in expr.get_source_expressions(): yield from cls._get_expr_references(src_expr) @classmethod def _check_constraints(cls, databases): errors = [] for db in databases: if not router.allow_migrate_model(db, cls): continue connection = connections[db] if not ( connection.features.supports_table_check_constraints or 'supports_table_check_constraints' in cls._meta.required_db_features ) and any( isinstance(constraint, CheckConstraint) for constraint in cls._meta.constraints ): errors.append( checks.Warning( '%s does not support check constraints.' % connection.display_name, hint=( "A constraint won't be created. Silence this " "warning if you don't care about it." ), obj=cls, id='models.W027', ) ) if not ( connection.features.supports_partial_indexes or 'supports_partial_indexes' in cls._meta.required_db_features ) and any( isinstance(constraint, UniqueConstraint) and constraint.condition is not None for constraint in cls._meta.constraints ): errors.append( checks.Warning( '%s does not support unique constraints with ' 'conditions.' % connection.display_name, hint=( "A constraint won't be created. Silence this " "warning if you don't care about it." ), obj=cls, id='models.W036', ) ) if not ( connection.features.supports_deferrable_unique_constraints or 'supports_deferrable_unique_constraints' in cls._meta.required_db_features ) and any( isinstance(constraint, UniqueConstraint) and constraint.deferrable is not None for constraint in cls._meta.constraints ): errors.append( checks.Warning( '%s does not support deferrable unique constraints.' % connection.display_name, hint=( "A constraint won't be created. Silence this " "warning if you don't care about it." ), obj=cls, id='models.W038', ) ) if not ( connection.features.supports_covering_indexes or 'supports_covering_indexes' in cls._meta.required_db_features ) and any( isinstance(constraint, UniqueConstraint) and constraint.include for constraint in cls._meta.constraints ): errors.append( checks.Warning( '%s does not support unique constraints with non-key ' 'columns.' % connection.display_name, hint=( "A constraint won't be created. Silence this " "warning if you don't care about it." ), obj=cls, id='models.W039', ) ) if not ( connection.features.supports_expression_indexes or 'supports_expression_indexes' in cls._meta.required_db_features ) and any( isinstance(constraint, UniqueConstraint) and constraint.contains_expressions for constraint in cls._meta.constraints ): errors.append( checks.Warning( '%s does not support unique constraints on ' 'expressions.' % connection.display_name, hint=( "A constraint won't be created. Silence this " "warning if you don't care about it." ), obj=cls, id='models.W044', ) ) fields = set(chain.from_iterable( (*constraint.fields, *constraint.include) for constraint in cls._meta.constraints if isinstance(constraint, UniqueConstraint) )) references = set() for constraint in cls._meta.constraints: if isinstance(constraint, UniqueConstraint): if ( connection.features.supports_partial_indexes or 'supports_partial_indexes' not in cls._meta.required_db_features ) and isinstance(constraint.condition, Q): references.update(cls._get_expr_references(constraint.condition)) if ( connection.features.supports_expression_indexes or 'supports_expression_indexes' not in cls._meta.required_db_features ) and constraint.contains_expressions: for expression in constraint.expressions: references.update(cls._get_expr_references(expression)) elif isinstance(constraint, CheckConstraint): if ( connection.features.supports_table_check_constraints or 'supports_table_check_constraints' not in cls._meta.required_db_features ) and isinstance(constraint.check, Q): references.update(cls._get_expr_references(constraint.check)) for field_name, *lookups in references: # pk is an alias that won't be found by opts.get_field. if field_name != 'pk': fields.add(field_name) if not lookups: # If it has no lookups it cannot result in a JOIN. continue try: if field_name == 'pk': field = cls._meta.pk else: field = cls._meta.get_field(field_name) if not field.is_relation or field.many_to_many or field.one_to_many: continue except FieldDoesNotExist: continue # JOIN must happen at the first lookup. first_lookup = lookups[0] if ( hasattr(field, 'get_transform') and hasattr(field, 'get_lookup') and field.get_transform(first_lookup) is None and field.get_lookup(first_lookup) is None ): errors.append( checks.Error( "'constraints' refers to the joined field '%s'." % LOOKUP_SEP.join([field_name] + lookups), obj=cls, id='models.E041', ) ) errors.extend(cls._check_local_fields(fields, 'constraints')) return errors ############################################ # HELPER FUNCTIONS (CURRIED MODEL METHODS) # ############################################ # ORDERING METHODS ######################### def method_set_order(self, ordered_obj, id_list, using=None): if using is None: using = DEFAULT_DB_ALIAS order_wrt = ordered_obj._meta.order_with_respect_to filter_args = order_wrt.get_forward_related_filter(self) ordered_obj.objects.db_manager(using).filter(**filter_args).bulk_update([ ordered_obj(pk=pk, _order=order) for order, pk in enumerate(id_list) ], ['_order']) def method_get_order(self, ordered_obj): order_wrt = ordered_obj._meta.order_with_respect_to filter_args = order_wrt.get_forward_related_filter(self) pk_name = ordered_obj._meta.pk.name return ordered_obj.objects.filter(**filter_args).values_list(pk_name, flat=True) def make_foreign_order_accessors(model, related_model): setattr( related_model, 'get_%s_order' % model.__name__.lower(), partialmethod(method_get_order, model) ) setattr( related_model, 'set_%s_order' % model.__name__.lower(), partialmethod(method_set_order, model) ) ######## # MISC # ######## def model_unpickle(model_id): """Used to unpickle Model subclasses with deferred fields.""" if isinstance(model_id, tuple): model = apps.get_model(*model_id) else: # Backwards compat - the model was cached directly in earlier versions. model = model_id return model.__new__(model) model_unpickle.__safe_for_unpickle__ = True
52a22883cc5bc2af01363aebbf5a1879d160fe85c99b540ca5636ad80052c211
import copy import datetime import functools import inspect from decimal import Decimal from uuid import UUID from django.core.exceptions import EmptyResultSet, FieldError from django.db import DatabaseError, NotSupportedError, connection from django.db.models import fields from django.db.models.constants import LOOKUP_SEP from django.db.models.query_utils import Q from django.utils.deconstruct import deconstructible from django.utils.functional import cached_property from django.utils.hashable import make_hashable class SQLiteNumericMixin: """ Some expressions with output_field=DecimalField() must be cast to numeric to be properly filtered. """ def as_sqlite(self, compiler, connection, **extra_context): sql, params = self.as_sql(compiler, connection, **extra_context) try: if self.output_field.get_internal_type() == 'DecimalField': sql = 'CAST(%s AS NUMERIC)' % sql except FieldError: pass return sql, params class Combinable: """ Provide the ability to combine one or two objects with some connector. For example F('foo') + F('bar'). """ # Arithmetic connectors ADD = '+' SUB = '-' MUL = '*' DIV = '/' POW = '^' # The following is a quoted % operator - it is quoted because it can be # used in strings that also have parameter substitution. MOD = '%%' # Bitwise operators - note that these are generated by .bitand() # and .bitor(), the '&' and '|' are reserved for boolean operator # usage. BITAND = '&' BITOR = '|' BITLEFTSHIFT = '<<' BITRIGHTSHIFT = '>>' BITXOR = '#' def _combine(self, other, connector, reversed): if not hasattr(other, 'resolve_expression'): # everything must be resolvable to an expression other = Value(other) if reversed: return CombinedExpression(other, connector, self) return CombinedExpression(self, connector, other) ############# # OPERATORS # ############# def __neg__(self): return self._combine(-1, self.MUL, False) def __add__(self, other): return self._combine(other, self.ADD, False) def __sub__(self, other): return self._combine(other, self.SUB, False) def __mul__(self, other): return self._combine(other, self.MUL, False) def __truediv__(self, other): return self._combine(other, self.DIV, False) def __mod__(self, other): return self._combine(other, self.MOD, False) def __pow__(self, other): return self._combine(other, self.POW, False) def __and__(self, other): if getattr(self, 'conditional', False) and getattr(other, 'conditional', False): return Q(self) & Q(other) raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) def bitand(self, other): return self._combine(other, self.BITAND, False) def bitleftshift(self, other): return self._combine(other, self.BITLEFTSHIFT, False) def bitrightshift(self, other): return self._combine(other, self.BITRIGHTSHIFT, False) def bitxor(self, other): return self._combine(other, self.BITXOR, False) def __or__(self, other): if getattr(self, 'conditional', False) and getattr(other, 'conditional', False): return Q(self) | Q(other) raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) def bitor(self, other): return self._combine(other, self.BITOR, False) def __radd__(self, other): return self._combine(other, self.ADD, True) def __rsub__(self, other): return self._combine(other, self.SUB, True) def __rmul__(self, other): return self._combine(other, self.MUL, True) def __rtruediv__(self, other): return self._combine(other, self.DIV, True) def __rmod__(self, other): return self._combine(other, self.MOD, True) def __rpow__(self, other): return self._combine(other, self.POW, True) def __rand__(self, other): raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) def __ror__(self, other): raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) class BaseExpression: """Base class for all query expressions.""" # aggregate specific fields is_summary = False _output_field_resolved_to_none = False # Can the expression be used in a WHERE clause? filterable = True # Can the expression can be used as a source expression in Window? window_compatible = False def __init__(self, output_field=None): if output_field is not None: self.output_field = output_field def __getstate__(self): state = self.__dict__.copy() state.pop('convert_value', None) return state def get_db_converters(self, connection): return ( [] if self.convert_value is self._convert_value_noop else [self.convert_value] ) + self.output_field.get_db_converters(connection) def get_source_expressions(self): return [] def set_source_expressions(self, exprs): assert not exprs def _parse_expressions(self, *expressions): return [ arg if hasattr(arg, 'resolve_expression') else ( F(arg) if isinstance(arg, str) else Value(arg) ) for arg in expressions ] def as_sql(self, compiler, connection): """ Responsible for returning a (sql, [params]) tuple to be included in the current query. Different backends can provide their own implementation, by providing an `as_{vendor}` method and patching the Expression: ``` def override_as_sql(self, compiler, connection): # custom logic return super().as_sql(compiler, connection) setattr(Expression, 'as_' + connection.vendor, override_as_sql) ``` Arguments: * compiler: the query compiler responsible for generating the query. Must have a compile method, returning a (sql, [params]) tuple. Calling compiler(value) will return a quoted `value`. * connection: the database connection used for the current query. Return: (sql, params) Where `sql` is a string containing ordered sql parameters to be replaced with the elements of the list `params`. """ raise NotImplementedError("Subclasses must implement as_sql()") @cached_property def contains_aggregate(self): return any(expr and expr.contains_aggregate for expr in self.get_source_expressions()) @cached_property def contains_over_clause(self): return any(expr and expr.contains_over_clause for expr in self.get_source_expressions()) @cached_property def contains_column_references(self): return any(expr and expr.contains_column_references for expr in self.get_source_expressions()) def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): """ Provide the chance to do any preprocessing or validation before being added to the query. Arguments: * query: the backend query implementation * allow_joins: boolean allowing or denying use of joins in this query * reuse: a set of reusable joins for multijoins * summarize: a terminal aggregate clause * for_save: whether this expression about to be used in a save or update Return: an Expression to be added to the query. """ c = self.copy() c.is_summary = summarize c.set_source_expressions([ expr.resolve_expression(query, allow_joins, reuse, summarize) if expr else None for expr in c.get_source_expressions() ]) return c @property def conditional(self): return isinstance(self.output_field, fields.BooleanField) @property def field(self): return self.output_field @cached_property def output_field(self): """Return the output type of this expressions.""" output_field = self._resolve_output_field() if output_field is None: self._output_field_resolved_to_none = True raise FieldError('Cannot resolve expression type, unknown output_field') return output_field @cached_property def _output_field_or_none(self): """ Return the output field of this expression, or None if _resolve_output_field() didn't return an output type. """ try: return self.output_field except FieldError: if not self._output_field_resolved_to_none: raise def _resolve_output_field(self): """ Attempt to infer the output type of the expression. If the output fields of all source fields match then, simply infer the same type here. This isn't always correct, but it makes sense most of the time. Consider the difference between `2 + 2` and `2 / 3`. Inferring the type here is a convenience for the common case. The user should supply their own output_field with more complex computations. If a source's output field resolves to None, exclude it from this check. If all sources are None, then an error is raised higher up the stack in the output_field property. """ sources_iter = (source for source in self.get_source_fields() if source is not None) for output_field in sources_iter: for source in sources_iter: if not isinstance(output_field, source.__class__): raise FieldError( 'Expression contains mixed types: %s, %s. You must ' 'set output_field.' % ( output_field.__class__.__name__, source.__class__.__name__, ) ) return output_field @staticmethod def _convert_value_noop(value, expression, connection): return value @cached_property def convert_value(self): """ Expressions provide their own converters because users have the option of manually specifying the output_field which may be a different type from the one the database returns. """ field = self.output_field internal_type = field.get_internal_type() if internal_type == 'FloatField': return lambda value, expression, connection: None if value is None else float(value) elif internal_type.endswith('IntegerField'): return lambda value, expression, connection: None if value is None else int(value) elif internal_type == 'DecimalField': return lambda value, expression, connection: None if value is None else Decimal(value) return self._convert_value_noop def get_lookup(self, lookup): return self.output_field.get_lookup(lookup) def get_transform(self, name): return self.output_field.get_transform(name) def relabeled_clone(self, change_map): clone = self.copy() clone.set_source_expressions([ e.relabeled_clone(change_map) if e is not None else None for e in self.get_source_expressions() ]) return clone def copy(self): return copy.copy(self) def get_group_by_cols(self, alias=None): if not self.contains_aggregate: return [self] cols = [] for source in self.get_source_expressions(): cols.extend(source.get_group_by_cols()) return cols def get_source_fields(self): """Return the underlying field types used by this aggregate.""" return [e._output_field_or_none for e in self.get_source_expressions()] def asc(self, **kwargs): return OrderBy(self, **kwargs) def desc(self, **kwargs): return OrderBy(self, descending=True, **kwargs) def reverse_ordering(self): return self def flatten(self): """ Recursively yield this expression and all subexpressions, in depth-first order. """ yield self for expr in self.get_source_expressions(): if expr: if hasattr(expr, 'flatten'): yield from expr.flatten() else: yield expr def select_format(self, compiler, sql, params): """ Custom format for select clauses. For example, EXISTS expressions need to be wrapped in CASE WHEN on Oracle. """ if hasattr(self.output_field, 'select_format'): return self.output_field.select_format(compiler, sql, params) return sql, params @deconstructible class Expression(BaseExpression, Combinable): """An expression that can be combined with other expressions.""" @cached_property def identity(self): constructor_signature = inspect.signature(self.__init__) args, kwargs = self._constructor_args signature = constructor_signature.bind_partial(*args, **kwargs) signature.apply_defaults() arguments = signature.arguments.items() identity = [self.__class__] for arg, value in arguments: if isinstance(value, fields.Field): if value.name and value.model: value = (value.model._meta.label, value.name) else: value = type(value) else: value = make_hashable(value) identity.append((arg, value)) return tuple(identity) def __eq__(self, other): if not isinstance(other, Expression): return NotImplemented return other.identity == self.identity def __hash__(self): return hash(self.identity) _connector_combinators = { connector: [ (fields.IntegerField, fields.IntegerField, fields.IntegerField), (fields.IntegerField, fields.DecimalField, fields.DecimalField), (fields.DecimalField, fields.IntegerField, fields.DecimalField), (fields.IntegerField, fields.FloatField, fields.FloatField), (fields.FloatField, fields.IntegerField, fields.FloatField), ] for connector in (Combinable.ADD, Combinable.SUB, Combinable.MUL, Combinable.DIV) } @functools.lru_cache(maxsize=128) def _resolve_combined_type(connector, lhs_type, rhs_type): combinators = _connector_combinators.get(connector, ()) for combinator_lhs_type, combinator_rhs_type, combined_type in combinators: if issubclass(lhs_type, combinator_lhs_type) and issubclass(rhs_type, combinator_rhs_type): return combined_type class CombinedExpression(SQLiteNumericMixin, Expression): def __init__(self, lhs, connector, rhs, output_field=None): super().__init__(output_field=output_field) self.connector = connector self.lhs = lhs self.rhs = rhs def __repr__(self): return "<{}: {}>".format(self.__class__.__name__, self) def __str__(self): return "{} {} {}".format(self.lhs, self.connector, self.rhs) def get_source_expressions(self): return [self.lhs, self.rhs] def set_source_expressions(self, exprs): self.lhs, self.rhs = exprs def _resolve_output_field(self): try: return super()._resolve_output_field() except FieldError: combined_type = _resolve_combined_type( self.connector, type(self.lhs.output_field), type(self.rhs.output_field), ) if combined_type is None: raise return combined_type() def as_sql(self, compiler, connection): expressions = [] expression_params = [] sql, params = compiler.compile(self.lhs) expressions.append(sql) expression_params.extend(params) sql, params = compiler.compile(self.rhs) expressions.append(sql) expression_params.extend(params) # order of precedence expression_wrapper = '(%s)' sql = connection.ops.combine_expression(self.connector, expressions) return expression_wrapper % sql, expression_params def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): lhs = self.lhs.resolve_expression(query, allow_joins, reuse, summarize, for_save) rhs = self.rhs.resolve_expression(query, allow_joins, reuse, summarize, for_save) if not isinstance(self, (DurationExpression, TemporalSubtraction)): try: lhs_type = lhs.output_field.get_internal_type() except (AttributeError, FieldError): lhs_type = None try: rhs_type = rhs.output_field.get_internal_type() except (AttributeError, FieldError): rhs_type = None if 'DurationField' in {lhs_type, rhs_type} and lhs_type != rhs_type: return DurationExpression(self.lhs, self.connector, self.rhs).resolve_expression( query, allow_joins, reuse, summarize, for_save, ) datetime_fields = {'DateField', 'DateTimeField', 'TimeField'} if self.connector == self.SUB and lhs_type in datetime_fields and lhs_type == rhs_type: return TemporalSubtraction(self.lhs, self.rhs).resolve_expression( query, allow_joins, reuse, summarize, for_save, ) c = self.copy() c.is_summary = summarize c.lhs = lhs c.rhs = rhs return c class DurationExpression(CombinedExpression): def compile(self, side, compiler, connection): try: output = side.output_field except FieldError: pass else: if output.get_internal_type() == 'DurationField': sql, params = compiler.compile(side) return connection.ops.format_for_duration_arithmetic(sql), params return compiler.compile(side) def as_sql(self, compiler, connection): if connection.features.has_native_duration_field: return super().as_sql(compiler, connection) connection.ops.check_expression_support(self) expressions = [] expression_params = [] sql, params = self.compile(self.lhs, compiler, connection) expressions.append(sql) expression_params.extend(params) sql, params = self.compile(self.rhs, compiler, connection) expressions.append(sql) expression_params.extend(params) # order of precedence expression_wrapper = '(%s)' sql = connection.ops.combine_duration_expression(self.connector, expressions) return expression_wrapper % sql, expression_params def as_sqlite(self, compiler, connection, **extra_context): sql, params = self.as_sql(compiler, connection, **extra_context) if self.connector in {Combinable.MUL, Combinable.DIV}: try: lhs_type = self.lhs.output_field.get_internal_type() rhs_type = self.rhs.output_field.get_internal_type() except (AttributeError, FieldError): pass else: allowed_fields = { 'DecimalField', 'DurationField', 'FloatField', 'IntegerField', } if lhs_type not in allowed_fields or rhs_type not in allowed_fields: raise DatabaseError( f'Invalid arguments for operator {self.connector}.' ) return sql, params class TemporalSubtraction(CombinedExpression): output_field = fields.DurationField() def __init__(self, lhs, rhs): super().__init__(lhs, self.SUB, rhs) def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) lhs = compiler.compile(self.lhs) rhs = compiler.compile(self.rhs) return connection.ops.subtract_temporals(self.lhs.output_field.get_internal_type(), lhs, rhs) @deconstructible class F(Combinable): """An object capable of resolving references to existing query objects.""" def __init__(self, name): """ Arguments: * name: the name of the field this expression references """ self.name = name def __repr__(self): return "{}({})".format(self.__class__.__name__, self.name) def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): return query.resolve_ref(self.name, allow_joins, reuse, summarize) def asc(self, **kwargs): return OrderBy(self, **kwargs) def desc(self, **kwargs): return OrderBy(self, descending=True, **kwargs) def __eq__(self, other): return self.__class__ == other.__class__ and self.name == other.name def __hash__(self): return hash(self.name) class ResolvedOuterRef(F): """ An object that contains a reference to an outer query. In this case, the reference to the outer query has been resolved because the inner query has been used as a subquery. """ contains_aggregate = False def as_sql(self, *args, **kwargs): raise ValueError( 'This queryset contains a reference to an outer query and may ' 'only be used in a subquery.' ) def resolve_expression(self, *args, **kwargs): col = super().resolve_expression(*args, **kwargs) # FIXME: Rename possibly_multivalued to multivalued and fix detection # for non-multivalued JOINs (e.g. foreign key fields). This should take # into account only many-to-many and one-to-many relationships. col.possibly_multivalued = LOOKUP_SEP in self.name return col def relabeled_clone(self, relabels): return self def get_group_by_cols(self, alias=None): return [] class OuterRef(F): contains_aggregate = False def resolve_expression(self, *args, **kwargs): if isinstance(self.name, self.__class__): return self.name return ResolvedOuterRef(self.name) def relabeled_clone(self, relabels): return self class Func(SQLiteNumericMixin, Expression): """An SQL function call.""" function = None template = '%(function)s(%(expressions)s)' arg_joiner = ', ' arity = None # The number of arguments the function accepts. def __init__(self, *expressions, output_field=None, **extra): if self.arity is not None and len(expressions) != self.arity: raise TypeError( "'%s' takes exactly %s %s (%s given)" % ( self.__class__.__name__, self.arity, "argument" if self.arity == 1 else "arguments", len(expressions), ) ) super().__init__(output_field=output_field) self.source_expressions = self._parse_expressions(*expressions) self.extra = extra def __repr__(self): args = self.arg_joiner.join(str(arg) for arg in self.source_expressions) extra = {**self.extra, **self._get_repr_options()} if extra: extra = ', '.join(str(key) + '=' + str(val) for key, val in sorted(extra.items())) return "{}({}, {})".format(self.__class__.__name__, args, extra) return "{}({})".format(self.__class__.__name__, args) def _get_repr_options(self): """Return a dict of extra __init__() options to include in the repr.""" return {} def get_source_expressions(self): return self.source_expressions def set_source_expressions(self, exprs): self.source_expressions = exprs def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = self.copy() c.is_summary = summarize for pos, arg in enumerate(c.source_expressions): c.source_expressions[pos] = arg.resolve_expression(query, allow_joins, reuse, summarize, for_save) return c def as_sql(self, compiler, connection, function=None, template=None, arg_joiner=None, **extra_context): connection.ops.check_expression_support(self) sql_parts = [] params = [] for arg in self.source_expressions: arg_sql, arg_params = compiler.compile(arg) sql_parts.append(arg_sql) params.extend(arg_params) data = {**self.extra, **extra_context} # Use the first supplied value in this order: the parameter to this # method, a value supplied in __init__()'s **extra (the value in # `data`), or the value defined on the class. if function is not None: data['function'] = function else: data.setdefault('function', self.function) template = template or data.get('template', self.template) arg_joiner = arg_joiner or data.get('arg_joiner', self.arg_joiner) data['expressions'] = data['field'] = arg_joiner.join(sql_parts) return template % data, params def copy(self): copy = super().copy() copy.source_expressions = self.source_expressions[:] copy.extra = self.extra.copy() return copy class Value(SQLiteNumericMixin, Expression): """Represent a wrapped value as a node within an expression.""" # Provide a default value for `for_save` in order to allow unresolved # instances to be compiled until a decision is taken in #25425. for_save = False def __init__(self, value, output_field=None): """ Arguments: * value: the value this expression represents. The value will be added into the sql parameter list and properly quoted. * output_field: an instance of the model field type that this expression will return, such as IntegerField() or CharField(). """ super().__init__(output_field=output_field) self.value = value def __repr__(self): return f'{self.__class__.__name__}({self.value!r})' def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) val = self.value output_field = self._output_field_or_none if output_field is not None: if self.for_save: val = output_field.get_db_prep_save(val, connection=connection) else: val = output_field.get_db_prep_value(val, connection=connection) if hasattr(output_field, 'get_placeholder'): return output_field.get_placeholder(val, compiler, connection), [val] if val is None: # cx_Oracle does not always convert None to the appropriate # NULL type (like in case expressions using numbers), so we # use a literal SQL NULL return 'NULL', [] return '%s', [val] def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = super().resolve_expression(query, allow_joins, reuse, summarize, for_save) c.for_save = for_save return c def get_group_by_cols(self, alias=None): return [] def _resolve_output_field(self): if isinstance(self.value, str): return fields.CharField() if isinstance(self.value, bool): return fields.BooleanField() if isinstance(self.value, int): return fields.IntegerField() if isinstance(self.value, float): return fields.FloatField() if isinstance(self.value, datetime.datetime): return fields.DateTimeField() if isinstance(self.value, datetime.date): return fields.DateField() if isinstance(self.value, datetime.time): return fields.TimeField() if isinstance(self.value, datetime.timedelta): return fields.DurationField() if isinstance(self.value, Decimal): return fields.DecimalField() if isinstance(self.value, bytes): return fields.BinaryField() if isinstance(self.value, UUID): return fields.UUIDField() class RawSQL(Expression): def __init__(self, sql, params, output_field=None): if output_field is None: output_field = fields.Field() self.sql, self.params = sql, params super().__init__(output_field=output_field) def __repr__(self): return "{}({}, {})".format(self.__class__.__name__, self.sql, self.params) def as_sql(self, compiler, connection): return '(%s)' % self.sql, self.params def get_group_by_cols(self, alias=None): return [self] def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): # Resolve parents fields used in raw SQL. for parent in query.model._meta.get_parent_list(): for parent_field in parent._meta.local_fields: _, column_name = parent_field.get_attname_column() if column_name.lower() in self.sql.lower(): query.resolve_ref(parent_field.name, allow_joins, reuse, summarize) break return super().resolve_expression(query, allow_joins, reuse, summarize, for_save) class Star(Expression): def __repr__(self): return "'*'" def as_sql(self, compiler, connection): return '*', [] class Col(Expression): contains_column_references = True possibly_multivalued = False def __init__(self, alias, target, output_field=None): if output_field is None: output_field = target super().__init__(output_field=output_field) self.alias, self.target = alias, target def __repr__(self): alias, target = self.alias, self.target identifiers = (alias, str(target)) if alias else (str(target),) return '{}({})'.format(self.__class__.__name__, ', '.join(identifiers)) def as_sql(self, compiler, connection): alias, column = self.alias, self.target.column identifiers = (alias, column) if alias else (column,) sql = '.'.join(map(compiler.quote_name_unless_alias, identifiers)) return sql, [] def relabeled_clone(self, relabels): if self.alias is None: return self return self.__class__(relabels.get(self.alias, self.alias), self.target, self.output_field) def get_group_by_cols(self, alias=None): return [self] def get_db_converters(self, connection): if self.target == self.output_field: return self.output_field.get_db_converters(connection) return (self.output_field.get_db_converters(connection) + self.target.get_db_converters(connection)) class Ref(Expression): """ Reference to column alias of the query. For example, Ref('sum_cost') in qs.annotate(sum_cost=Sum('cost')) query. """ def __init__(self, refs, source): super().__init__() self.refs, self.source = refs, source def __repr__(self): return "{}({}, {})".format(self.__class__.__name__, self.refs, self.source) def get_source_expressions(self): return [self.source] def set_source_expressions(self, exprs): self.source, = exprs def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): # The sub-expression `source` has already been resolved, as this is # just a reference to the name of `source`. return self def relabeled_clone(self, relabels): return self def as_sql(self, compiler, connection): return connection.ops.quote_name(self.refs), [] def get_group_by_cols(self, alias=None): return [self] class ExpressionList(Func): """ An expression containing multiple expressions. Can be used to provide a list of expressions as an argument to another expression, like an ordering clause. """ template = '%(expressions)s' def __init__(self, *expressions, **extra): if not expressions: raise ValueError('%s requires at least one expression.' % self.__class__.__name__) super().__init__(*expressions, **extra) def __str__(self): return self.arg_joiner.join(str(arg) for arg in self.source_expressions) def as_sqlite(self, compiler, connection, **extra_context): # Casting to numeric is unnecessary. return self.as_sql(compiler, connection, **extra_context) class ExpressionWrapper(Expression): """ An expression that can wrap another expression so that it can provide extra context to the inner expression, such as the output_field. """ def __init__(self, expression, output_field): super().__init__(output_field=output_field) self.expression = expression def set_source_expressions(self, exprs): self.expression = exprs[0] def get_source_expressions(self): return [self.expression] def get_group_by_cols(self, alias=None): if isinstance(self.expression, Expression): expression = self.expression.copy() expression.output_field = self.output_field return expression.get_group_by_cols(alias=alias) # For non-expressions e.g. an SQL WHERE clause, the entire # `expression` must be included in the GROUP BY clause. return super().get_group_by_cols() def as_sql(self, compiler, connection): return compiler.compile(self.expression) def __repr__(self): return "{}({})".format(self.__class__.__name__, self.expression) class When(Expression): template = 'WHEN %(condition)s THEN %(result)s' # This isn't a complete conditional expression, must be used in Case(). conditional = False def __init__(self, condition=None, then=None, **lookups): if lookups: if condition is None: condition, lookups = Q(**lookups), None elif getattr(condition, 'conditional', False): condition, lookups = Q(condition, **lookups), None if condition is None or not getattr(condition, 'conditional', False) or lookups: raise TypeError( 'When() supports a Q object, a boolean expression, or lookups ' 'as a condition.' ) if isinstance(condition, Q) and not condition: raise ValueError("An empty Q() can't be used as a When() condition.") super().__init__(output_field=None) self.condition = condition self.result = self._parse_expressions(then)[0] def __str__(self): return "WHEN %r THEN %r" % (self.condition, self.result) def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, self) def get_source_expressions(self): return [self.condition, self.result] def set_source_expressions(self, exprs): self.condition, self.result = exprs def get_source_fields(self): # We're only interested in the fields of the result expressions. return [self.result._output_field_or_none] def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = self.copy() c.is_summary = summarize if hasattr(c.condition, 'resolve_expression'): c.condition = c.condition.resolve_expression(query, allow_joins, reuse, summarize, False) c.result = c.result.resolve_expression(query, allow_joins, reuse, summarize, for_save) return c def as_sql(self, compiler, connection, template=None, **extra_context): connection.ops.check_expression_support(self) template_params = extra_context sql_params = [] condition_sql, condition_params = compiler.compile(self.condition) template_params['condition'] = condition_sql sql_params.extend(condition_params) result_sql, result_params = compiler.compile(self.result) template_params['result'] = result_sql sql_params.extend(result_params) template = template or self.template return template % template_params, sql_params def get_group_by_cols(self, alias=None): # This is not a complete expression and cannot be used in GROUP BY. cols = [] for source in self.get_source_expressions(): cols.extend(source.get_group_by_cols()) return cols class Case(Expression): """ An SQL searched CASE expression: CASE WHEN n > 0 THEN 'positive' WHEN n < 0 THEN 'negative' ELSE 'zero' END """ template = 'CASE %(cases)s ELSE %(default)s END' case_joiner = ' ' def __init__(self, *cases, default=None, output_field=None, **extra): if not all(isinstance(case, When) for case in cases): raise TypeError("Positional arguments must all be When objects.") super().__init__(output_field) self.cases = list(cases) self.default = self._parse_expressions(default)[0] self.extra = extra def __str__(self): return "CASE %s, ELSE %r" % (', '.join(str(c) for c in self.cases), self.default) def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, self) def get_source_expressions(self): return self.cases + [self.default] def set_source_expressions(self, exprs): *self.cases, self.default = exprs def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = self.copy() c.is_summary = summarize for pos, case in enumerate(c.cases): c.cases[pos] = case.resolve_expression(query, allow_joins, reuse, summarize, for_save) c.default = c.default.resolve_expression(query, allow_joins, reuse, summarize, for_save) return c def copy(self): c = super().copy() c.cases = c.cases[:] return c def as_sql(self, compiler, connection, template=None, case_joiner=None, **extra_context): connection.ops.check_expression_support(self) if not self.cases: return compiler.compile(self.default) template_params = {**self.extra, **extra_context} case_parts = [] sql_params = [] for case in self.cases: try: case_sql, case_params = compiler.compile(case) except EmptyResultSet: continue case_parts.append(case_sql) sql_params.extend(case_params) default_sql, default_params = compiler.compile(self.default) if not case_parts: return default_sql, default_params case_joiner = case_joiner or self.case_joiner template_params['cases'] = case_joiner.join(case_parts) template_params['default'] = default_sql sql_params.extend(default_params) template = template or template_params.get('template', self.template) sql = template % template_params if self._output_field_or_none is not None: sql = connection.ops.unification_cast_sql(self.output_field) % sql return sql, sql_params def get_group_by_cols(self, alias=None): if not self.cases: return self.default.get_group_by_cols(alias) return super().get_group_by_cols(alias) class Subquery(BaseExpression, Combinable): """ An explicit subquery. It may contain OuterRef() references to the outer query which will be resolved when it is applied to that query. """ template = '(%(subquery)s)' contains_aggregate = False def __init__(self, queryset, output_field=None, **extra): # Allow the usage of both QuerySet and sql.Query objects. self.query = getattr(queryset, 'query', queryset) self.extra = extra super().__init__(output_field) def get_source_expressions(self): return [self.query] def set_source_expressions(self, exprs): self.query = exprs[0] def _resolve_output_field(self): return self.query.output_field def copy(self): clone = super().copy() clone.query = clone.query.clone() return clone @property def external_aliases(self): return self.query.external_aliases def get_external_cols(self): return self.query.get_external_cols() def as_sql(self, compiler, connection, template=None, query=None, **extra_context): connection.ops.check_expression_support(self) template_params = {**self.extra, **extra_context} query = query or self.query subquery_sql, sql_params = query.as_sql(compiler, connection) template_params['subquery'] = subquery_sql[1:-1] template = template or template_params.get('template', self.template) sql = template % template_params return sql, sql_params def get_group_by_cols(self, alias=None): if alias: return [Ref(alias, self)] external_cols = self.get_external_cols() if any(col.possibly_multivalued for col in external_cols): return [self] return external_cols class Exists(Subquery): template = 'EXISTS(%(subquery)s)' output_field = fields.BooleanField() def __init__(self, queryset, negated=False, **kwargs): self.negated = negated super().__init__(queryset, **kwargs) def __invert__(self): clone = self.copy() clone.negated = not self.negated return clone def as_sql(self, compiler, connection, template=None, **extra_context): query = self.query.exists(using=connection.alias) sql, params = super().as_sql( compiler, connection, template=template, query=query, **extra_context, ) if self.negated: sql = 'NOT {}'.format(sql) return sql, params def select_format(self, compiler, sql, params): # Wrap EXISTS() with a CASE WHEN expression if a database backend # (e.g. Oracle) doesn't support boolean expression in SELECT or GROUP # BY list. if not compiler.connection.features.supports_boolean_expr_in_select_clause: sql = 'CASE WHEN {} THEN 1 ELSE 0 END'.format(sql) return sql, params class OrderBy(Expression): template = '%(expression)s %(ordering)s' conditional = False def __init__(self, expression, descending=False, nulls_first=False, nulls_last=False): if nulls_first and nulls_last: raise ValueError('nulls_first and nulls_last are mutually exclusive') self.nulls_first = nulls_first self.nulls_last = nulls_last self.descending = descending if not hasattr(expression, 'resolve_expression'): raise ValueError('expression must be an expression type') self.expression = expression def __repr__(self): return "{}({}, descending={})".format( self.__class__.__name__, self.expression, self.descending) def set_source_expressions(self, exprs): self.expression = exprs[0] def get_source_expressions(self): return [self.expression] def as_sql(self, compiler, connection, template=None, **extra_context): template = template or self.template if connection.features.supports_order_by_nulls_modifier: if self.nulls_last: template = '%s NULLS LAST' % template elif self.nulls_first: template = '%s NULLS FIRST' % template else: if self.nulls_last and not ( self.descending and connection.features.order_by_nulls_first ): template = '%%(expression)s IS NULL, %s' % template elif self.nulls_first and not ( not self.descending and connection.features.order_by_nulls_first ): template = '%%(expression)s IS NOT NULL, %s' % template connection.ops.check_expression_support(self) expression_sql, params = compiler.compile(self.expression) placeholders = { 'expression': expression_sql, 'ordering': 'DESC' if self.descending else 'ASC', **extra_context, } params *= template.count('%(expression)s') return (template % placeholders).rstrip(), params def as_oracle(self, compiler, connection): # Oracle doesn't allow ORDER BY EXISTS() unless it's wrapped in # a CASE WHEN. if isinstance(self.expression, Exists): copy = self.copy() copy.expression = Case( When(self.expression, then=True), default=False, ) return copy.as_sql(compiler, connection) return self.as_sql(compiler, connection) def get_group_by_cols(self, alias=None): cols = [] for source in self.get_source_expressions(): cols.extend(source.get_group_by_cols()) return cols def reverse_ordering(self): self.descending = not self.descending if self.nulls_first or self.nulls_last: self.nulls_first = not self.nulls_first self.nulls_last = not self.nulls_last return self def asc(self): self.descending = False def desc(self): self.descending = True class Window(SQLiteNumericMixin, Expression): template = '%(expression)s OVER (%(window)s)' # Although the main expression may either be an aggregate or an # expression with an aggregate function, the GROUP BY that will # be introduced in the query as a result is not desired. contains_aggregate = False contains_over_clause = True filterable = False def __init__(self, expression, partition_by=None, order_by=None, frame=None, output_field=None): self.partition_by = partition_by self.order_by = order_by self.frame = frame if not getattr(expression, 'window_compatible', False): raise ValueError( "Expression '%s' isn't compatible with OVER clauses." % expression.__class__.__name__ ) if self.partition_by is not None: if not isinstance(self.partition_by, (tuple, list)): self.partition_by = (self.partition_by,) self.partition_by = ExpressionList(*self.partition_by) if self.order_by is not None: if isinstance(self.order_by, (list, tuple)): self.order_by = ExpressionList(*self.order_by) elif not isinstance(self.order_by, BaseExpression): raise ValueError( 'order_by must be either an Expression or a sequence of ' 'expressions.' ) super().__init__(output_field=output_field) self.source_expression = self._parse_expressions(expression)[0] def _resolve_output_field(self): return self.source_expression.output_field def get_source_expressions(self): return [self.source_expression, self.partition_by, self.order_by, self.frame] def set_source_expressions(self, exprs): self.source_expression, self.partition_by, self.order_by, self.frame = exprs def as_sql(self, compiler, connection, template=None): connection.ops.check_expression_support(self) if not connection.features.supports_over_clause: raise NotSupportedError('This backend does not support window expressions.') expr_sql, params = compiler.compile(self.source_expression) window_sql, window_params = [], [] if self.partition_by is not None: sql_expr, sql_params = self.partition_by.as_sql( compiler=compiler, connection=connection, template='PARTITION BY %(expressions)s', ) window_sql.extend(sql_expr) window_params.extend(sql_params) if self.order_by is not None: window_sql.append(' ORDER BY ') order_sql, order_params = compiler.compile(self.order_by) window_sql.extend(order_sql) window_params.extend(order_params) if self.frame: frame_sql, frame_params = compiler.compile(self.frame) window_sql.append(' ' + frame_sql) window_params.extend(frame_params) params.extend(window_params) template = template or self.template return template % { 'expression': expr_sql, 'window': ''.join(window_sql).strip() }, params def as_sqlite(self, compiler, connection): if isinstance(self.output_field, fields.DecimalField): # Casting to numeric must be outside of the window expression. copy = self.copy() source_expressions = copy.get_source_expressions() source_expressions[0].output_field = fields.FloatField() copy.set_source_expressions(source_expressions) return super(Window, copy).as_sqlite(compiler, connection) return self.as_sql(compiler, connection) def __str__(self): return '{} OVER ({}{}{})'.format( str(self.source_expression), 'PARTITION BY ' + str(self.partition_by) if self.partition_by else '', 'ORDER BY ' + str(self.order_by) if self.order_by else '', str(self.frame or ''), ) def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self) def get_group_by_cols(self, alias=None): return [] class WindowFrame(Expression): """ Model the frame clause in window expressions. There are two types of frame clauses which are subclasses, however, all processing and validation (by no means intended to be complete) is done here. Thus, providing an end for a frame is optional (the default is UNBOUNDED FOLLOWING, which is the last row in the frame). """ template = '%(frame_type)s BETWEEN %(start)s AND %(end)s' def __init__(self, start=None, end=None): self.start = Value(start) self.end = Value(end) def set_source_expressions(self, exprs): self.start, self.end = exprs def get_source_expressions(self): return [self.start, self.end] def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) start, end = self.window_frame_start_end(connection, self.start.value, self.end.value) return self.template % { 'frame_type': self.frame_type, 'start': start, 'end': end, }, [] def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self) def get_group_by_cols(self, alias=None): return [] def __str__(self): if self.start.value is not None and self.start.value < 0: start = '%d %s' % (abs(self.start.value), connection.ops.PRECEDING) elif self.start.value is not None and self.start.value == 0: start = connection.ops.CURRENT_ROW else: start = connection.ops.UNBOUNDED_PRECEDING if self.end.value is not None and self.end.value > 0: end = '%d %s' % (self.end.value, connection.ops.FOLLOWING) elif self.end.value is not None and self.end.value == 0: end = connection.ops.CURRENT_ROW else: end = connection.ops.UNBOUNDED_FOLLOWING return self.template % { 'frame_type': self.frame_type, 'start': start, 'end': end, } def window_frame_start_end(self, connection, start, end): raise NotImplementedError('Subclasses must implement window_frame_start_end().') class RowRange(WindowFrame): frame_type = 'ROWS' def window_frame_start_end(self, connection, start, end): return connection.ops.window_frame_rows_start_end(start, end) class ValueRange(WindowFrame): frame_type = 'RANGE' def window_frame_start_end(self, connection, start, end): return connection.ops.window_frame_range_start_end(start, end)
98656ca21b5ed821a5222cc4be0fa1f3d7ea4f6cc65f55ff65237f4c42aa3d6b
import itertools import math from copy import copy from django.core.exceptions import EmptyResultSet from django.db.models.expressions import Case, Func, Value, When from django.db.models.fields import ( CharField, DateTimeField, Field, IntegerField, UUIDField, ) from django.db.models.query_utils import RegisterLookupMixin from django.utils.datastructures import OrderedSet from django.utils.functional import cached_property from django.utils.hashable import make_hashable class Lookup: lookup_name = None prepare_rhs = True can_use_none_as_rhs = False def __init__(self, lhs, rhs): self.lhs, self.rhs = lhs, rhs self.rhs = self.get_prep_lookup() if hasattr(self.lhs, 'get_bilateral_transforms'): bilateral_transforms = self.lhs.get_bilateral_transforms() else: bilateral_transforms = [] if bilateral_transforms: # Warn the user as soon as possible if they are trying to apply # a bilateral transformation on a nested QuerySet: that won't work. from django.db.models.sql.query import ( # avoid circular import Query, ) if isinstance(rhs, Query): raise NotImplementedError("Bilateral transformations on nested querysets are not implemented.") self.bilateral_transforms = bilateral_transforms def apply_bilateral_transforms(self, value): for transform in self.bilateral_transforms: value = transform(value) return value def __repr__(self): return f'{self.__class__.__name__}({self.lhs!r}, {self.rhs!r})' def batch_process_rhs(self, compiler, connection, rhs=None): if rhs is None: rhs = self.rhs if self.bilateral_transforms: sqls, sqls_params = [], [] for p in rhs: value = Value(p, output_field=self.lhs.output_field) value = self.apply_bilateral_transforms(value) value = value.resolve_expression(compiler.query) sql, sql_params = compiler.compile(value) sqls.append(sql) sqls_params.extend(sql_params) else: _, params = self.get_db_prep_lookup(rhs, connection) sqls, sqls_params = ['%s'] * len(params), params return sqls, sqls_params def get_source_expressions(self): if self.rhs_is_direct_value(): return [self.lhs] return [self.lhs, self.rhs] def set_source_expressions(self, new_exprs): if len(new_exprs) == 1: self.lhs = new_exprs[0] else: self.lhs, self.rhs = new_exprs def get_prep_lookup(self): if hasattr(self.rhs, 'resolve_expression'): return self.rhs if self.prepare_rhs and hasattr(self.lhs.output_field, 'get_prep_value'): return self.lhs.output_field.get_prep_value(self.rhs) return self.rhs def get_db_prep_lookup(self, value, connection): return ('%s', [value]) def process_lhs(self, compiler, connection, lhs=None): lhs = lhs or self.lhs if hasattr(lhs, 'resolve_expression'): lhs = lhs.resolve_expression(compiler.query) return compiler.compile(lhs) def process_rhs(self, compiler, connection): value = self.rhs if self.bilateral_transforms: if self.rhs_is_direct_value(): # Do not call get_db_prep_lookup here as the value will be # transformed before being used for lookup value = Value(value, output_field=self.lhs.output_field) value = self.apply_bilateral_transforms(value) value = value.resolve_expression(compiler.query) if hasattr(value, 'as_sql'): sql, params = compiler.compile(value) # Ensure expression is wrapped in parentheses to respect operator # precedence but avoid double wrapping as it can be misinterpreted # on some backends (e.g. subqueries on SQLite). if sql and sql[0] != '(': sql = '(%s)' % sql return sql, params else: return self.get_db_prep_lookup(value, connection) def rhs_is_direct_value(self): return not hasattr(self.rhs, 'as_sql') def relabeled_clone(self, relabels): new = copy(self) new.lhs = new.lhs.relabeled_clone(relabels) if hasattr(new.rhs, 'relabeled_clone'): new.rhs = new.rhs.relabeled_clone(relabels) return new def get_group_by_cols(self, alias=None): cols = self.lhs.get_group_by_cols() if hasattr(self.rhs, 'get_group_by_cols'): cols.extend(self.rhs.get_group_by_cols()) return cols def as_sql(self, compiler, connection): raise NotImplementedError def as_oracle(self, compiler, connection): # Oracle doesn't allow EXISTS() and filters to be compared to another # expression unless they're wrapped in a CASE WHEN. wrapped = False exprs = [] for expr in (self.lhs, self.rhs): if connection.ops.conditional_expression_supported_in_where_clause(expr): expr = Case(When(expr, then=True), default=False) wrapped = True exprs.append(expr) lookup = type(self)(*exprs) if wrapped else self return lookup.as_sql(compiler, connection) @cached_property def contains_aggregate(self): return self.lhs.contains_aggregate or getattr(self.rhs, 'contains_aggregate', False) @cached_property def contains_over_clause(self): return self.lhs.contains_over_clause or getattr(self.rhs, 'contains_over_clause', False) @property def is_summary(self): return self.lhs.is_summary or getattr(self.rhs, 'is_summary', False) @property def identity(self): return self.__class__, self.lhs, self.rhs def __eq__(self, other): if not isinstance(other, Lookup): return NotImplemented return self.identity == other.identity def __hash__(self): return hash(make_hashable(self.identity)) class Transform(RegisterLookupMixin, Func): """ RegisterLookupMixin() is first so that get_lookup() and get_transform() first examine self and then check output_field. """ bilateral = False arity = 1 @property def lhs(self): return self.get_source_expressions()[0] def get_bilateral_transforms(self): if hasattr(self.lhs, 'get_bilateral_transforms'): bilateral_transforms = self.lhs.get_bilateral_transforms() else: bilateral_transforms = [] if self.bilateral: bilateral_transforms.append(self.__class__) return bilateral_transforms class BuiltinLookup(Lookup): def process_lhs(self, compiler, connection, lhs=None): lhs_sql, params = super().process_lhs(compiler, connection, lhs) field_internal_type = self.lhs.output_field.get_internal_type() db_type = self.lhs.output_field.db_type(connection=connection) lhs_sql = connection.ops.field_cast_sql( db_type, field_internal_type) % lhs_sql lhs_sql = connection.ops.lookup_cast(self.lookup_name, field_internal_type) % lhs_sql return lhs_sql, list(params) def as_sql(self, compiler, connection): lhs_sql, params = self.process_lhs(compiler, connection) rhs_sql, rhs_params = self.process_rhs(compiler, connection) params.extend(rhs_params) rhs_sql = self.get_rhs_op(connection, rhs_sql) return '%s %s' % (lhs_sql, rhs_sql), params def get_rhs_op(self, connection, rhs): return connection.operators[self.lookup_name] % rhs class FieldGetDbPrepValueMixin: """ Some lookups require Field.get_db_prep_value() to be called on their inputs. """ get_db_prep_lookup_value_is_iterable = False def get_db_prep_lookup(self, value, connection): # For relational fields, use the 'target_field' attribute of the # output_field. field = getattr(self.lhs.output_field, 'target_field', None) get_db_prep_value = getattr(field, 'get_db_prep_value', None) or self.lhs.output_field.get_db_prep_value return ( '%s', [get_db_prep_value(v, connection, prepared=True) for v in value] if self.get_db_prep_lookup_value_is_iterable else [get_db_prep_value(value, connection, prepared=True)] ) class FieldGetDbPrepValueIterableMixin(FieldGetDbPrepValueMixin): """ Some lookups require Field.get_db_prep_value() to be called on each value in an iterable. """ get_db_prep_lookup_value_is_iterable = True def get_prep_lookup(self): if hasattr(self.rhs, 'resolve_expression'): return self.rhs prepared_values = [] for rhs_value in self.rhs: if hasattr(rhs_value, 'resolve_expression'): # An expression will be handled by the database but can coexist # alongside real values. pass elif self.prepare_rhs and hasattr(self.lhs.output_field, 'get_prep_value'): rhs_value = self.lhs.output_field.get_prep_value(rhs_value) prepared_values.append(rhs_value) return prepared_values def process_rhs(self, compiler, connection): if self.rhs_is_direct_value(): # rhs should be an iterable of values. Use batch_process_rhs() # to prepare/transform those values. return self.batch_process_rhs(compiler, connection) else: return super().process_rhs(compiler, connection) def resolve_expression_parameter(self, compiler, connection, sql, param): params = [param] if hasattr(param, 'resolve_expression'): param = param.resolve_expression(compiler.query) if hasattr(param, 'as_sql'): sql, params = compiler.compile(param) return sql, params def batch_process_rhs(self, compiler, connection, rhs=None): pre_processed = super().batch_process_rhs(compiler, connection, rhs) # The params list may contain expressions which compile to a # sql/param pair. Zip them to get sql and param pairs that refer to the # same argument and attempt to replace them with the result of # compiling the param step. sql, params = zip(*( self.resolve_expression_parameter(compiler, connection, sql, param) for sql, param in zip(*pre_processed) )) params = itertools.chain.from_iterable(params) return sql, tuple(params) class PostgresOperatorLookup(FieldGetDbPrepValueMixin, Lookup): """Lookup defined by operators on PostgreSQL.""" postgres_operator = None def as_postgresql(self, compiler, connection): lhs, lhs_params = self.process_lhs(compiler, connection) rhs, rhs_params = self.process_rhs(compiler, connection) params = tuple(lhs_params) + tuple(rhs_params) return '%s %s %s' % (lhs, self.postgres_operator, rhs), params @Field.register_lookup class Exact(FieldGetDbPrepValueMixin, BuiltinLookup): lookup_name = 'exact' def process_rhs(self, compiler, connection): from django.db.models.sql.query import Query if isinstance(self.rhs, Query): if self.rhs.has_limit_one(): if not self.rhs.has_select_fields: self.rhs.clear_select_clause() self.rhs.add_fields(['pk']) else: raise ValueError( 'The QuerySet value for an exact lookup must be limited to ' 'one result using slicing.' ) return super().process_rhs(compiler, connection) def as_sql(self, compiler, connection): # Avoid comparison against direct rhs if lhs is a boolean value. That # turns "boolfield__exact=True" into "WHERE boolean_field" instead of # "WHERE boolean_field = True" when allowed. if ( isinstance(self.rhs, bool) and getattr(self.lhs, 'conditional', False) and connection.ops.conditional_expression_supported_in_where_clause(self.lhs) ): lhs_sql, params = self.process_lhs(compiler, connection) template = '%s' if self.rhs else 'NOT %s' return template % lhs_sql, params return super().as_sql(compiler, connection) @Field.register_lookup class IExact(BuiltinLookup): lookup_name = 'iexact' prepare_rhs = False def process_rhs(self, qn, connection): rhs, params = super().process_rhs(qn, connection) if params: params[0] = connection.ops.prep_for_iexact_query(params[0]) return rhs, params @Field.register_lookup class GreaterThan(FieldGetDbPrepValueMixin, BuiltinLookup): lookup_name = 'gt' @Field.register_lookup class GreaterThanOrEqual(FieldGetDbPrepValueMixin, BuiltinLookup): lookup_name = 'gte' @Field.register_lookup class LessThan(FieldGetDbPrepValueMixin, BuiltinLookup): lookup_name = 'lt' @Field.register_lookup class LessThanOrEqual(FieldGetDbPrepValueMixin, BuiltinLookup): lookup_name = 'lte' class IntegerFieldFloatRounding: """ Allow floats to work as query values for IntegerField. Without this, the decimal portion of the float would always be discarded. """ def get_prep_lookup(self): if isinstance(self.rhs, float): self.rhs = math.ceil(self.rhs) return super().get_prep_lookup() @IntegerField.register_lookup class IntegerGreaterThanOrEqual(IntegerFieldFloatRounding, GreaterThanOrEqual): pass @IntegerField.register_lookup class IntegerLessThan(IntegerFieldFloatRounding, LessThan): pass @Field.register_lookup class In(FieldGetDbPrepValueIterableMixin, BuiltinLookup): lookup_name = 'in' def process_rhs(self, compiler, connection): db_rhs = getattr(self.rhs, '_db', None) if db_rhs is not None and db_rhs != connection.alias: raise ValueError( "Subqueries aren't allowed across different databases. Force " "the inner query to be evaluated using `list(inner_query)`." ) if self.rhs_is_direct_value(): # Remove None from the list as NULL is never equal to anything. try: rhs = OrderedSet(self.rhs) rhs.discard(None) except TypeError: # Unhashable items in self.rhs rhs = [r for r in self.rhs if r is not None] if not rhs: raise EmptyResultSet # rhs should be an iterable; use batch_process_rhs() to # prepare/transform those values. sqls, sqls_params = self.batch_process_rhs(compiler, connection, rhs) placeholder = '(' + ', '.join(sqls) + ')' return (placeholder, sqls_params) else: if not getattr(self.rhs, 'has_select_fields', True): self.rhs.clear_select_clause() self.rhs.add_fields(['pk']) return super().process_rhs(compiler, connection) def get_group_by_cols(self, alias=None): cols = self.lhs.get_group_by_cols() if hasattr(self.rhs, 'get_group_by_cols'): if not getattr(self.rhs, 'has_select_fields', True): self.rhs.clear_select_clause() self.rhs.add_fields(['pk']) cols.extend(self.rhs.get_group_by_cols()) return cols def get_rhs_op(self, connection, rhs): return 'IN %s' % rhs def as_sql(self, compiler, connection): max_in_list_size = connection.ops.max_in_list_size() if self.rhs_is_direct_value() and max_in_list_size and len(self.rhs) > max_in_list_size: return self.split_parameter_list_as_sql(compiler, connection) return super().as_sql(compiler, connection) def split_parameter_list_as_sql(self, compiler, connection): # This is a special case for databases which limit the number of # elements which can appear in an 'IN' clause. max_in_list_size = connection.ops.max_in_list_size() lhs, lhs_params = self.process_lhs(compiler, connection) rhs, rhs_params = self.batch_process_rhs(compiler, connection) in_clause_elements = ['('] params = [] for offset in range(0, len(rhs_params), max_in_list_size): if offset > 0: in_clause_elements.append(' OR ') in_clause_elements.append('%s IN (' % lhs) params.extend(lhs_params) sqls = rhs[offset: offset + max_in_list_size] sqls_params = rhs_params[offset: offset + max_in_list_size] param_group = ', '.join(sqls) in_clause_elements.append(param_group) in_clause_elements.append(')') params.extend(sqls_params) in_clause_elements.append(')') return ''.join(in_clause_elements), params class PatternLookup(BuiltinLookup): param_pattern = '%%%s%%' prepare_rhs = False def get_rhs_op(self, connection, rhs): # Assume we are in startswith. We need to produce SQL like: # col LIKE %s, ['thevalue%'] # For python values we can (and should) do that directly in Python, # but if the value is for example reference to other column, then # we need to add the % pattern match to the lookup by something like # col LIKE othercol || '%%' # So, for Python values we don't need any special pattern, but for # SQL reference values or SQL transformations we need the correct # pattern added. if hasattr(self.rhs, 'as_sql') or self.bilateral_transforms: pattern = connection.pattern_ops[self.lookup_name].format(connection.pattern_esc) return pattern.format(rhs) else: return super().get_rhs_op(connection, rhs) def process_rhs(self, qn, connection): rhs, params = super().process_rhs(qn, connection) if self.rhs_is_direct_value() and params and not self.bilateral_transforms: params[0] = self.param_pattern % connection.ops.prep_for_like_query(params[0]) return rhs, params @Field.register_lookup class Contains(PatternLookup): lookup_name = 'contains' @Field.register_lookup class IContains(Contains): lookup_name = 'icontains' @Field.register_lookup class StartsWith(PatternLookup): lookup_name = 'startswith' param_pattern = '%s%%' @Field.register_lookup class IStartsWith(StartsWith): lookup_name = 'istartswith' @Field.register_lookup class EndsWith(PatternLookup): lookup_name = 'endswith' param_pattern = '%%%s' @Field.register_lookup class IEndsWith(EndsWith): lookup_name = 'iendswith' @Field.register_lookup class Range(FieldGetDbPrepValueIterableMixin, BuiltinLookup): lookup_name = 'range' def get_rhs_op(self, connection, rhs): return "BETWEEN %s AND %s" % (rhs[0], rhs[1]) @Field.register_lookup class IsNull(BuiltinLookup): lookup_name = 'isnull' prepare_rhs = False def as_sql(self, compiler, connection): if not isinstance(self.rhs, bool): raise ValueError( 'The QuerySet value for an isnull lookup must be True or ' 'False.' ) sql, params = compiler.compile(self.lhs) if self.rhs: return "%s IS NULL" % sql, params else: return "%s IS NOT NULL" % sql, params @Field.register_lookup class Regex(BuiltinLookup): lookup_name = 'regex' prepare_rhs = False def as_sql(self, compiler, connection): if self.lookup_name in connection.operators: return super().as_sql(compiler, connection) else: lhs, lhs_params = self.process_lhs(compiler, connection) rhs, rhs_params = self.process_rhs(compiler, connection) sql_template = connection.ops.regex_lookup(self.lookup_name) return sql_template % (lhs, rhs), lhs_params + rhs_params @Field.register_lookup class IRegex(Regex): lookup_name = 'iregex' class YearLookup(Lookup): def year_lookup_bounds(self, connection, year): from django.db.models.functions import ExtractIsoYear iso_year = isinstance(self.lhs, ExtractIsoYear) output_field = self.lhs.lhs.output_field if isinstance(output_field, DateTimeField): bounds = connection.ops.year_lookup_bounds_for_datetime_field( year, iso_year=iso_year, ) else: bounds = connection.ops.year_lookup_bounds_for_date_field( year, iso_year=iso_year, ) return bounds def as_sql(self, compiler, connection): # Avoid the extract operation if the rhs is a direct value to allow # indexes to be used. if self.rhs_is_direct_value(): # Skip the extract part by directly using the originating field, # that is self.lhs.lhs. lhs_sql, params = self.process_lhs(compiler, connection, self.lhs.lhs) rhs_sql, _ = self.process_rhs(compiler, connection) rhs_sql = self.get_direct_rhs_sql(connection, rhs_sql) start, finish = self.year_lookup_bounds(connection, self.rhs) params.extend(self.get_bound_params(start, finish)) return '%s %s' % (lhs_sql, rhs_sql), params return super().as_sql(compiler, connection) def get_direct_rhs_sql(self, connection, rhs): return connection.operators[self.lookup_name] % rhs def get_bound_params(self, start, finish): raise NotImplementedError( 'subclasses of YearLookup must provide a get_bound_params() method' ) class YearExact(YearLookup, Exact): def get_direct_rhs_sql(self, connection, rhs): return 'BETWEEN %s AND %s' def get_bound_params(self, start, finish): return (start, finish) class YearGt(YearLookup, GreaterThan): def get_bound_params(self, start, finish): return (finish,) class YearGte(YearLookup, GreaterThanOrEqual): def get_bound_params(self, start, finish): return (start,) class YearLt(YearLookup, LessThan): def get_bound_params(self, start, finish): return (start,) class YearLte(YearLookup, LessThanOrEqual): def get_bound_params(self, start, finish): return (finish,) class UUIDTextMixin: """ Strip hyphens from a value when filtering a UUIDField on backends without a native datatype for UUID. """ def process_rhs(self, qn, connection): if not connection.features.has_native_uuid_field: from django.db.models.functions import Replace if self.rhs_is_direct_value(): self.rhs = Value(self.rhs) self.rhs = Replace(self.rhs, Value('-'), Value(''), output_field=CharField()) rhs, params = super().process_rhs(qn, connection) return rhs, params @UUIDField.register_lookup class UUIDIExact(UUIDTextMixin, IExact): pass @UUIDField.register_lookup class UUIDContains(UUIDTextMixin, Contains): pass @UUIDField.register_lookup class UUIDIContains(UUIDTextMixin, IContains): pass @UUIDField.register_lookup class UUIDStartsWith(UUIDTextMixin, StartsWith): pass @UUIDField.register_lookup class UUIDIStartsWith(UUIDTextMixin, IStartsWith): pass @UUIDField.register_lookup class UUIDEndsWith(UUIDTextMixin, EndsWith): pass @UUIDField.register_lookup class UUIDIEndsWith(UUIDTextMixin, IEndsWith): pass
116e874ca8cdb29d3728ee2b2ebe6482cb7269b7e4a60c0b0d820c0c571cbaa9
from django.db import models from django.db.migrations.operations.base import Operation from django.db.migrations.state import ModelState from django.db.migrations.utils import resolve_relation from django.db.models.options import normalize_together from django.utils.functional import cached_property from .fields import ( AddField, AlterField, FieldOperation, RemoveField, RenameField, ) from .utils import field_references, get_references def _check_for_duplicates(arg_name, objs): used_vals = set() for val in objs: if val in used_vals: raise ValueError( "Found duplicate value %s in CreateModel %s argument." % (val, arg_name) ) used_vals.add(val) class ModelOperation(Operation): def __init__(self, name): self.name = name @cached_property def name_lower(self): return self.name.lower() def references_model(self, name, app_label): return name.lower() == self.name_lower def reduce(self, operation, app_label): return ( super().reduce(operation, app_label) or not operation.references_model(self.name, app_label) ) class CreateModel(ModelOperation): """Create a model's table.""" serialization_expand_args = ['fields', 'options', 'managers'] def __init__(self, name, fields, options=None, bases=None, managers=None): self.fields = fields self.options = options or {} self.bases = bases or (models.Model,) self.managers = managers or [] super().__init__(name) # Sanity-check that there are no duplicated field names, bases, or # manager names _check_for_duplicates('fields', (name for name, _ in self.fields)) _check_for_duplicates('bases', ( base._meta.label_lower if hasattr(base, '_meta') else base.lower() if isinstance(base, str) else base for base in self.bases )) _check_for_duplicates('managers', (name for name, _ in self.managers)) def deconstruct(self): kwargs = { 'name': self.name, 'fields': self.fields, } if self.options: kwargs['options'] = self.options if self.bases and self.bases != (models.Model,): kwargs['bases'] = self.bases if self.managers and self.managers != [('objects', models.Manager())]: kwargs['managers'] = self.managers return ( self.__class__.__qualname__, [], kwargs ) def state_forwards(self, app_label, state): state.add_model(ModelState( app_label, self.name, list(self.fields), dict(self.options), tuple(self.bases), list(self.managers), )) def database_forwards(self, app_label, schema_editor, from_state, to_state): model = to_state.apps.get_model(app_label, self.name) if self.allow_migrate_model(schema_editor.connection.alias, model): schema_editor.create_model(model) def database_backwards(self, app_label, schema_editor, from_state, to_state): model = from_state.apps.get_model(app_label, self.name) if self.allow_migrate_model(schema_editor.connection.alias, model): schema_editor.delete_model(model) def describe(self): return "Create %smodel %s" % ("proxy " if self.options.get("proxy", False) else "", self.name) @property def migration_name_fragment(self): return self.name_lower def references_model(self, name, app_label): name_lower = name.lower() if name_lower == self.name_lower: return True # Check we didn't inherit from the model reference_model_tuple = (app_label, name_lower) for base in self.bases: if (base is not models.Model and isinstance(base, (models.base.ModelBase, str)) and resolve_relation(base, app_label) == reference_model_tuple): return True # Check we have no FKs/M2Ms with it for _name, field in self.fields: if field_references((app_label, self.name_lower), field, reference_model_tuple): return True return False def reduce(self, operation, app_label): if (isinstance(operation, DeleteModel) and self.name_lower == operation.name_lower and not self.options.get("proxy", False)): return [] elif isinstance(operation, RenameModel) and self.name_lower == operation.old_name_lower: return [ CreateModel( operation.new_name, fields=self.fields, options=self.options, bases=self.bases, managers=self.managers, ), ] elif isinstance(operation, AlterModelOptions) and self.name_lower == operation.name_lower: options = {**self.options, **operation.options} for key in operation.ALTER_OPTION_KEYS: if key not in operation.options: options.pop(key, None) return [ CreateModel( self.name, fields=self.fields, options=options, bases=self.bases, managers=self.managers, ), ] elif isinstance(operation, AlterTogetherOptionOperation) and self.name_lower == operation.name_lower: return [ CreateModel( self.name, fields=self.fields, options={**self.options, **{operation.option_name: operation.option_value}}, bases=self.bases, managers=self.managers, ), ] elif isinstance(operation, AlterOrderWithRespectTo) and self.name_lower == operation.name_lower: return [ CreateModel( self.name, fields=self.fields, options={**self.options, 'order_with_respect_to': operation.order_with_respect_to}, bases=self.bases, managers=self.managers, ), ] elif isinstance(operation, FieldOperation) and self.name_lower == operation.model_name_lower: if isinstance(operation, AddField): return [ CreateModel( self.name, fields=self.fields + [(operation.name, operation.field)], options=self.options, bases=self.bases, managers=self.managers, ), ] elif isinstance(operation, AlterField): return [ CreateModel( self.name, fields=[ (n, operation.field if n == operation.name else v) for n, v in self.fields ], options=self.options, bases=self.bases, managers=self.managers, ), ] elif isinstance(operation, RemoveField): options = self.options.copy() for option_name in ('unique_together', 'index_together'): option = options.pop(option_name, None) if option: option = set(filter(bool, ( tuple(f for f in fields if f != operation.name_lower) for fields in option ))) if option: options[option_name] = option order_with_respect_to = options.get('order_with_respect_to') if order_with_respect_to == operation.name_lower: del options['order_with_respect_to'] return [ CreateModel( self.name, fields=[ (n, v) for n, v in self.fields if n.lower() != operation.name_lower ], options=options, bases=self.bases, managers=self.managers, ), ] elif isinstance(operation, RenameField): options = self.options.copy() for option_name in ('unique_together', 'index_together'): option = options.get(option_name) if option: options[option_name] = { tuple(operation.new_name if f == operation.old_name else f for f in fields) for fields in option } order_with_respect_to = options.get('order_with_respect_to') if order_with_respect_to == operation.old_name: options['order_with_respect_to'] = operation.new_name return [ CreateModel( self.name, fields=[ (operation.new_name if n == operation.old_name else n, v) for n, v in self.fields ], options=options, bases=self.bases, managers=self.managers, ), ] return super().reduce(operation, app_label) class DeleteModel(ModelOperation): """Drop a model's table.""" def deconstruct(self): kwargs = { 'name': self.name, } return ( self.__class__.__qualname__, [], kwargs ) def state_forwards(self, app_label, state): state.remove_model(app_label, self.name_lower) def database_forwards(self, app_label, schema_editor, from_state, to_state): model = from_state.apps.get_model(app_label, self.name) if self.allow_migrate_model(schema_editor.connection.alias, model): schema_editor.delete_model(model) def database_backwards(self, app_label, schema_editor, from_state, to_state): model = to_state.apps.get_model(app_label, self.name) if self.allow_migrate_model(schema_editor.connection.alias, model): schema_editor.create_model(model) def references_model(self, name, app_label): # The deleted model could be referencing the specified model through # related fields. return True def describe(self): return "Delete model %s" % self.name @property def migration_name_fragment(self): return 'delete_%s' % self.name_lower class RenameModel(ModelOperation): """Rename a model.""" def __init__(self, old_name, new_name): self.old_name = old_name self.new_name = new_name super().__init__(old_name) @cached_property def old_name_lower(self): return self.old_name.lower() @cached_property def new_name_lower(self): return self.new_name.lower() def deconstruct(self): kwargs = { 'old_name': self.old_name, 'new_name': self.new_name, } return ( self.__class__.__qualname__, [], kwargs ) def state_forwards(self, app_label, state): # Add a new model. renamed_model = state.models[app_label, self.old_name_lower].clone() renamed_model.name = self.new_name state.models[app_label, self.new_name_lower] = renamed_model # Repoint all fields pointing to the old model to the new one. old_model_tuple = (app_label, self.old_name_lower) new_remote_model = '%s.%s' % (app_label, self.new_name) to_reload = set() for model_state, name, field, reference in get_references(state, old_model_tuple): changed_field = None if reference.to: changed_field = field.clone() changed_field.remote_field.model = new_remote_model if reference.through: if changed_field is None: changed_field = field.clone() changed_field.remote_field.through = new_remote_model if changed_field: model_state.fields[name] = changed_field to_reload.add((model_state.app_label, model_state.name_lower)) # Reload models related to old model before removing the old model. state.reload_models(to_reload, delay=True) # Remove the old model. state.remove_model(app_label, self.old_name_lower) state.reload_model(app_label, self.new_name_lower, delay=True) def database_forwards(self, app_label, schema_editor, from_state, to_state): new_model = to_state.apps.get_model(app_label, self.new_name) if self.allow_migrate_model(schema_editor.connection.alias, new_model): old_model = from_state.apps.get_model(app_label, self.old_name) # Move the main table schema_editor.alter_db_table( new_model, old_model._meta.db_table, new_model._meta.db_table, ) # Alter the fields pointing to us for related_object in old_model._meta.related_objects: if related_object.related_model == old_model: model = new_model related_key = (app_label, self.new_name_lower) else: model = related_object.related_model related_key = ( related_object.related_model._meta.app_label, related_object.related_model._meta.model_name, ) to_field = to_state.apps.get_model( *related_key )._meta.get_field(related_object.field.name) schema_editor.alter_field( model, related_object.field, to_field, ) # Rename M2M fields whose name is based on this model's name. fields = zip(old_model._meta.local_many_to_many, new_model._meta.local_many_to_many) for (old_field, new_field) in fields: # Skip self-referential fields as these are renamed above. if new_field.model == new_field.related_model or not new_field.remote_field.through._meta.auto_created: continue # Rename the M2M table that's based on this model's name. old_m2m_model = old_field.remote_field.through new_m2m_model = new_field.remote_field.through schema_editor.alter_db_table( new_m2m_model, old_m2m_model._meta.db_table, new_m2m_model._meta.db_table, ) # Rename the column in the M2M table that's based on this # model's name. schema_editor.alter_field( new_m2m_model, old_m2m_model._meta.get_field(old_model._meta.model_name), new_m2m_model._meta.get_field(new_model._meta.model_name), ) def database_backwards(self, app_label, schema_editor, from_state, to_state): self.new_name_lower, self.old_name_lower = self.old_name_lower, self.new_name_lower self.new_name, self.old_name = self.old_name, self.new_name self.database_forwards(app_label, schema_editor, from_state, to_state) self.new_name_lower, self.old_name_lower = self.old_name_lower, self.new_name_lower self.new_name, self.old_name = self.old_name, self.new_name def references_model(self, name, app_label): return ( name.lower() == self.old_name_lower or name.lower() == self.new_name_lower ) def describe(self): return "Rename model %s to %s" % (self.old_name, self.new_name) @property def migration_name_fragment(self): return 'rename_%s_%s' % (self.old_name_lower, self.new_name_lower) def reduce(self, operation, app_label): if (isinstance(operation, RenameModel) and self.new_name_lower == operation.old_name_lower): return [ RenameModel( self.old_name, operation.new_name, ), ] # Skip `ModelOperation.reduce` as we want to run `references_model` # against self.new_name. return ( super(ModelOperation, self).reduce(operation, app_label) or not operation.references_model(self.new_name, app_label) ) class ModelOptionOperation(ModelOperation): def reduce(self, operation, app_label): if isinstance(operation, (self.__class__, DeleteModel)) and self.name_lower == operation.name_lower: return [operation] return super().reduce(operation, app_label) class AlterModelTable(ModelOptionOperation): """Rename a model's table.""" def __init__(self, name, table): self.table = table super().__init__(name) def deconstruct(self): kwargs = { 'name': self.name, 'table': self.table, } return ( self.__class__.__qualname__, [], kwargs ) def state_forwards(self, app_label, state): state.models[app_label, self.name_lower].options["db_table"] = self.table state.reload_model(app_label, self.name_lower, delay=True) def database_forwards(self, app_label, schema_editor, from_state, to_state): new_model = to_state.apps.get_model(app_label, self.name) if self.allow_migrate_model(schema_editor.connection.alias, new_model): old_model = from_state.apps.get_model(app_label, self.name) schema_editor.alter_db_table( new_model, old_model._meta.db_table, new_model._meta.db_table, ) # Rename M2M fields whose name is based on this model's db_table for (old_field, new_field) in zip(old_model._meta.local_many_to_many, new_model._meta.local_many_to_many): if new_field.remote_field.through._meta.auto_created: schema_editor.alter_db_table( new_field.remote_field.through, old_field.remote_field.through._meta.db_table, new_field.remote_field.through._meta.db_table, ) def database_backwards(self, app_label, schema_editor, from_state, to_state): return self.database_forwards(app_label, schema_editor, from_state, to_state) def describe(self): return "Rename table for %s to %s" % ( self.name, self.table if self.table is not None else "(default)" ) @property def migration_name_fragment(self): return 'alter_%s_table' % self.name_lower class AlterTogetherOptionOperation(ModelOptionOperation): option_name = None def __init__(self, name, option_value): if option_value: option_value = set(normalize_together(option_value)) setattr(self, self.option_name, option_value) super().__init__(name) @cached_property def option_value(self): return getattr(self, self.option_name) def deconstruct(self): kwargs = { 'name': self.name, self.option_name: self.option_value, } return ( self.__class__.__qualname__, [], kwargs ) def state_forwards(self, app_label, state): model_state = state.models[app_label, self.name_lower] model_state.options[self.option_name] = self.option_value state.reload_model(app_label, self.name_lower, delay=True) def database_forwards(self, app_label, schema_editor, from_state, to_state): new_model = to_state.apps.get_model(app_label, self.name) if self.allow_migrate_model(schema_editor.connection.alias, new_model): old_model = from_state.apps.get_model(app_label, self.name) alter_together = getattr(schema_editor, 'alter_%s' % self.option_name) alter_together( new_model, getattr(old_model._meta, self.option_name, set()), getattr(new_model._meta, self.option_name, set()), ) def database_backwards(self, app_label, schema_editor, from_state, to_state): return self.database_forwards(app_label, schema_editor, from_state, to_state) def references_field(self, model_name, name, app_label): return ( self.references_model(model_name, app_label) and ( not self.option_value or any((name in fields) for fields in self.option_value) ) ) def describe(self): return "Alter %s for %s (%s constraint(s))" % (self.option_name, self.name, len(self.option_value or '')) @property def migration_name_fragment(self): return 'alter_%s_%s' % (self.name_lower, self.option_name) class AlterUniqueTogether(AlterTogetherOptionOperation): """ Change the value of unique_together to the target one. Input value of unique_together must be a set of tuples. """ option_name = 'unique_together' def __init__(self, name, unique_together): super().__init__(name, unique_together) class AlterIndexTogether(AlterTogetherOptionOperation): """ Change the value of index_together to the target one. Input value of index_together must be a set of tuples. """ option_name = "index_together" def __init__(self, name, index_together): super().__init__(name, index_together) class AlterOrderWithRespectTo(ModelOptionOperation): """Represent a change with the order_with_respect_to option.""" option_name = 'order_with_respect_to' def __init__(self, name, order_with_respect_to): self.order_with_respect_to = order_with_respect_to super().__init__(name) def deconstruct(self): kwargs = { 'name': self.name, 'order_with_respect_to': self.order_with_respect_to, } return ( self.__class__.__qualname__, [], kwargs ) def state_forwards(self, app_label, state): model_state = state.models[app_label, self.name_lower] model_state.options['order_with_respect_to'] = self.order_with_respect_to state.reload_model(app_label, self.name_lower, delay=True) def database_forwards(self, app_label, schema_editor, from_state, to_state): to_model = to_state.apps.get_model(app_label, self.name) if self.allow_migrate_model(schema_editor.connection.alias, to_model): from_model = from_state.apps.get_model(app_label, self.name) # Remove a field if we need to if from_model._meta.order_with_respect_to and not to_model._meta.order_with_respect_to: schema_editor.remove_field(from_model, from_model._meta.get_field("_order")) # Add a field if we need to (altering the column is untouched as # it's likely a rename) elif to_model._meta.order_with_respect_to and not from_model._meta.order_with_respect_to: field = to_model._meta.get_field("_order") if not field.has_default(): field.default = 0 schema_editor.add_field( from_model, field, ) def database_backwards(self, app_label, schema_editor, from_state, to_state): self.database_forwards(app_label, schema_editor, from_state, to_state) def references_field(self, model_name, name, app_label): return ( self.references_model(model_name, app_label) and ( self.order_with_respect_to is None or name == self.order_with_respect_to ) ) def describe(self): return "Set order_with_respect_to on %s to %s" % (self.name, self.order_with_respect_to) @property def migration_name_fragment(self): return 'alter_%s_order_with_respect_to' % self.name_lower class AlterModelOptions(ModelOptionOperation): """ Set new model options that don't directly affect the database schema (like verbose_name, permissions, ordering). Python code in migrations may still need them. """ # Model options we want to compare and preserve in an AlterModelOptions op ALTER_OPTION_KEYS = [ "base_manager_name", "default_manager_name", "default_related_name", "get_latest_by", "managed", "ordering", "permissions", "default_permissions", "select_on_save", "verbose_name", "verbose_name_plural", ] def __init__(self, name, options): self.options = options super().__init__(name) def deconstruct(self): kwargs = { 'name': self.name, 'options': self.options, } return ( self.__class__.__qualname__, [], kwargs ) def state_forwards(self, app_label, state): model_state = state.models[app_label, self.name_lower] model_state.options = {**model_state.options, **self.options} for key in self.ALTER_OPTION_KEYS: if key not in self.options: model_state.options.pop(key, False) state.reload_model(app_label, self.name_lower, delay=True) def database_forwards(self, app_label, schema_editor, from_state, to_state): pass def database_backwards(self, app_label, schema_editor, from_state, to_state): pass def describe(self): return "Change Meta options on %s" % self.name @property def migration_name_fragment(self): return 'alter_%s_options' % self.name_lower class AlterModelManagers(ModelOptionOperation): """Alter the model's managers.""" serialization_expand_args = ['managers'] def __init__(self, name, managers): self.managers = managers super().__init__(name) def deconstruct(self): return ( self.__class__.__qualname__, [self.name, self.managers], {} ) def state_forwards(self, app_label, state): model_state = state.models[app_label, self.name_lower] model_state.managers = list(self.managers) state.reload_model(app_label, self.name_lower, delay=True) def database_forwards(self, app_label, schema_editor, from_state, to_state): pass def database_backwards(self, app_label, schema_editor, from_state, to_state): pass def describe(self): return "Change managers on %s" % self.name @property def migration_name_fragment(self): return 'alter_%s_managers' % self.name_lower class IndexOperation(Operation): option_name = 'indexes' @cached_property def model_name_lower(self): return self.model_name.lower() class AddIndex(IndexOperation): """Add an index on a model.""" def __init__(self, model_name, index): self.model_name = model_name if not index.name: raise ValueError( "Indexes passed to AddIndex operations require a name " "argument. %r doesn't have one." % index ) self.index = index def state_forwards(self, app_label, state): model_state = state.models[app_label, self.model_name_lower] model_state.options[self.option_name] = [*model_state.options[self.option_name], self.index.clone()] state.reload_model(app_label, self.model_name_lower, delay=True) def database_forwards(self, app_label, schema_editor, from_state, to_state): model = to_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): schema_editor.add_index(model, self.index) def database_backwards(self, app_label, schema_editor, from_state, to_state): model = from_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): schema_editor.remove_index(model, self.index) def deconstruct(self): kwargs = { 'model_name': self.model_name, 'index': self.index, } return ( self.__class__.__qualname__, [], kwargs, ) def describe(self): if self.index.expressions: return 'Create index %s on %s on model %s' % ( self.index.name, ', '.join([str(expression) for expression in self.index.expressions]), self.model_name, ) return 'Create index %s on field(s) %s of model %s' % ( self.index.name, ', '.join(self.index.fields), self.model_name, ) @property def migration_name_fragment(self): return '%s_%s' % (self.model_name_lower, self.index.name.lower()) class RemoveIndex(IndexOperation): """Remove an index from a model.""" def __init__(self, model_name, name): self.model_name = model_name self.name = name def state_forwards(self, app_label, state): model_state = state.models[app_label, self.model_name_lower] indexes = model_state.options[self.option_name] model_state.options[self.option_name] = [idx for idx in indexes if idx.name != self.name] state.reload_model(app_label, self.model_name_lower, delay=True) def database_forwards(self, app_label, schema_editor, from_state, to_state): model = from_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): from_model_state = from_state.models[app_label, self.model_name_lower] index = from_model_state.get_index_by_name(self.name) schema_editor.remove_index(model, index) def database_backwards(self, app_label, schema_editor, from_state, to_state): model = to_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): to_model_state = to_state.models[app_label, self.model_name_lower] index = to_model_state.get_index_by_name(self.name) schema_editor.add_index(model, index) def deconstruct(self): kwargs = { 'model_name': self.model_name, 'name': self.name, } return ( self.__class__.__qualname__, [], kwargs, ) def describe(self): return 'Remove index %s from %s' % (self.name, self.model_name) @property def migration_name_fragment(self): return 'remove_%s_%s' % (self.model_name_lower, self.name.lower()) class AddConstraint(IndexOperation): option_name = 'constraints' def __init__(self, model_name, constraint): self.model_name = model_name self.constraint = constraint def state_forwards(self, app_label, state): model_state = state.models[app_label, self.model_name_lower] model_state.options[self.option_name] = [*model_state.options[self.option_name], self.constraint] state.reload_model(app_label, self.model_name_lower, delay=True) def database_forwards(self, app_label, schema_editor, from_state, to_state): model = to_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): schema_editor.add_constraint(model, self.constraint) def database_backwards(self, app_label, schema_editor, from_state, to_state): model = to_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): schema_editor.remove_constraint(model, self.constraint) def deconstruct(self): return self.__class__.__name__, [], { 'model_name': self.model_name, 'constraint': self.constraint, } def describe(self): return 'Create constraint %s on model %s' % (self.constraint.name, self.model_name) @property def migration_name_fragment(self): return '%s_%s' % (self.model_name_lower, self.constraint.name.lower()) class RemoveConstraint(IndexOperation): option_name = 'constraints' def __init__(self, model_name, name): self.model_name = model_name self.name = name def state_forwards(self, app_label, state): model_state = state.models[app_label, self.model_name_lower] constraints = model_state.options[self.option_name] model_state.options[self.option_name] = [c for c in constraints if c.name != self.name] state.reload_model(app_label, self.model_name_lower, delay=True) def database_forwards(self, app_label, schema_editor, from_state, to_state): model = to_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): from_model_state = from_state.models[app_label, self.model_name_lower] constraint = from_model_state.get_constraint_by_name(self.name) schema_editor.remove_constraint(model, constraint) def database_backwards(self, app_label, schema_editor, from_state, to_state): model = to_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): to_model_state = to_state.models[app_label, self.model_name_lower] constraint = to_model_state.get_constraint_by_name(self.name) schema_editor.add_constraint(model, constraint) def deconstruct(self): return self.__class__.__name__, [], { 'model_name': self.model_name, 'name': self.name, } def describe(self): return 'Remove constraint %s from model %s' % (self.name, self.model_name) @property def migration_name_fragment(self): return 'remove_%s_%s' % (self.model_name_lower, self.name.lower())
5006922b865041c80e69c0a27851278a762be658e2db1660ce9f8142f36190a8
from collections import namedtuple from django.db.migrations.utils import resolve_relation FieldReference = namedtuple('FieldReference', 'to through') def field_references( model_tuple, field, reference_model_tuple, reference_field_name=None, reference_field=None, ): """ Return either False or a FieldReference if `field` references provided context. False positives can be returned if `reference_field_name` is provided without `reference_field` because of the introspection limitation it incurs. This should not be an issue when this function is used to determine whether or not an optimization can take place. """ remote_field = field.remote_field if not remote_field: return False references_to = None references_through = None if resolve_relation(remote_field.model, *model_tuple) == reference_model_tuple: to_fields = getattr(field, 'to_fields', None) if ( reference_field_name is None or # Unspecified to_field(s). to_fields is None or # Reference to primary key. (None in to_fields and (reference_field is None or reference_field.primary_key)) or # Reference to field. reference_field_name in to_fields ): references_to = (remote_field, to_fields) through = getattr(remote_field, 'through', None) if through and resolve_relation(through, *model_tuple) == reference_model_tuple: through_fields = remote_field.through_fields if ( reference_field_name is None or # Unspecified through_fields. through_fields is None or # Reference to field. reference_field_name in through_fields ): references_through = (remote_field, through_fields) if not (references_to or references_through): return False return FieldReference(references_to, references_through) def get_references(state, model_tuple, field_tuple=()): """ Generator of (model_state, name, field, reference) referencing provided context. If field_tuple is provided only references to this particular field of model_tuple will be generated. """ for state_model_tuple, model_state in state.models.items(): for name, field in model_state.fields.items(): reference = field_references(state_model_tuple, field, model_tuple, *field_tuple) if reference: yield model_state, name, field, reference def field_is_referenced(state, model_tuple, field_tuple): """Return whether `field_tuple` is referenced by any state models.""" return next(get_references(state, model_tuple, field_tuple), None) is not None
4e2c39f237021dd450943c5e9703edec9a21d3a617bc900f5cec78d882aadcc7
import collections.abc import copy import datetime import decimal import operator import uuid import warnings from base64 import b64decode, b64encode from functools import partialmethod, total_ordering from django import forms from django.apps import apps from django.conf import settings from django.core import checks, exceptions, validators from django.db import connection, connections, router from django.db.models.constants import LOOKUP_SEP from django.db.models.query_utils import DeferredAttribute, RegisterLookupMixin from django.utils import timezone from django.utils.datastructures import DictWrapper from django.utils.dateparse import ( parse_date, parse_datetime, parse_duration, parse_time, ) from django.utils.duration import duration_microseconds, duration_string from django.utils.functional import Promise, cached_property from django.utils.ipv6 import clean_ipv6_address from django.utils.itercompat import is_iterable from django.utils.text import capfirst from django.utils.translation import gettext_lazy as _ __all__ = [ 'AutoField', 'BLANK_CHOICE_DASH', 'BigAutoField', 'BigIntegerField', 'BinaryField', 'BooleanField', 'CharField', 'CommaSeparatedIntegerField', 'DateField', 'DateTimeField', 'DecimalField', 'DurationField', 'EmailField', 'Empty', 'Field', 'FilePathField', 'FloatField', 'GenericIPAddressField', 'IPAddressField', 'IntegerField', 'NOT_PROVIDED', 'NullBooleanField', 'PositiveBigIntegerField', 'PositiveIntegerField', 'PositiveSmallIntegerField', 'SlugField', 'SmallAutoField', 'SmallIntegerField', 'TextField', 'TimeField', 'URLField', 'UUIDField', ] class Empty: pass class NOT_PROVIDED: pass # The values to use for "blank" in SelectFields. Will be appended to the start # of most "choices" lists. BLANK_CHOICE_DASH = [("", "---------")] def _load_field(app_label, model_name, field_name): return apps.get_model(app_label, model_name)._meta.get_field(field_name) # A guide to Field parameters: # # * name: The name of the field specified in the model. # * attname: The attribute to use on the model object. This is the same as # "name", except in the case of ForeignKeys, where "_id" is # appended. # * db_column: The db_column specified in the model (or None). # * column: The database column for this field. This is the same as # "attname", except if db_column is specified. # # Code that introspects values, or does other dynamic things, should use # attname. For example, this gets the primary key value of object "obj": # # getattr(obj, opts.pk.attname) def _empty(of_cls): new = Empty() new.__class__ = of_cls return new def return_None(): return None @total_ordering class Field(RegisterLookupMixin): """Base class for all field types""" # Designates whether empty strings fundamentally are allowed at the # database level. empty_strings_allowed = True empty_values = list(validators.EMPTY_VALUES) # These track each time a Field instance is created. Used to retain order. # The auto_creation_counter is used for fields that Django implicitly # creates, creation_counter is used for all user-specified fields. creation_counter = 0 auto_creation_counter = -1 default_validators = [] # Default set of validators default_error_messages = { 'invalid_choice': _('Value %(value)r is not a valid choice.'), 'null': _('This field cannot be null.'), 'blank': _('This field cannot be blank.'), 'unique': _('%(model_name)s with this %(field_label)s ' 'already exists.'), # Translators: The 'lookup_type' is one of 'date', 'year' or 'month'. # Eg: "Title must be unique for pub_date year" 'unique_for_date': _("%(field_label)s must be unique for " "%(date_field_label)s %(lookup_type)s."), } system_check_deprecated_details = None system_check_removed_details = None # Field flags hidden = False many_to_many = None many_to_one = None one_to_many = None one_to_one = None related_model = None descriptor_class = DeferredAttribute # Generic field type description, usually overridden by subclasses def _description(self): return _('Field of type: %(field_type)s') % { 'field_type': self.__class__.__name__ } description = property(_description) def __init__(self, verbose_name=None, name=None, primary_key=False, max_length=None, unique=False, blank=False, null=False, db_index=False, rel=None, default=NOT_PROVIDED, editable=True, serialize=True, unique_for_date=None, unique_for_month=None, unique_for_year=None, choices=None, help_text='', db_column=None, db_tablespace=None, auto_created=False, validators=(), error_messages=None): self.name = name self.verbose_name = verbose_name # May be set by set_attributes_from_name self._verbose_name = verbose_name # Store original for deconstruction self.primary_key = primary_key self.max_length, self._unique = max_length, unique self.blank, self.null = blank, null self.remote_field = rel self.is_relation = self.remote_field is not None self.default = default self.editable = editable self.serialize = serialize self.unique_for_date = unique_for_date self.unique_for_month = unique_for_month self.unique_for_year = unique_for_year if isinstance(choices, collections.abc.Iterator): choices = list(choices) self.choices = choices self.help_text = help_text self.db_index = db_index self.db_column = db_column self._db_tablespace = db_tablespace self.auto_created = auto_created # Adjust the appropriate creation counter, and save our local copy. if auto_created: self.creation_counter = Field.auto_creation_counter Field.auto_creation_counter -= 1 else: self.creation_counter = Field.creation_counter Field.creation_counter += 1 self._validators = list(validators) # Store for deconstruction later messages = {} for c in reversed(self.__class__.__mro__): messages.update(getattr(c, 'default_error_messages', {})) messages.update(error_messages or {}) self._error_messages = error_messages # Store for deconstruction later self.error_messages = messages def __str__(self): """ Return "app_label.model_label.field_name" for fields attached to models. """ if not hasattr(self, 'model'): return super().__str__() model = self.model return '%s.%s' % (model._meta.label, self.name) def __repr__(self): """Display the module, class, and name of the field.""" path = '%s.%s' % (self.__class__.__module__, self.__class__.__qualname__) name = getattr(self, 'name', None) if name is not None: return '<%s: %s>' % (path, name) return '<%s>' % path def check(self, **kwargs): return [ *self._check_field_name(), *self._check_choices(), *self._check_db_index(), *self._check_null_allowed_for_primary_keys(), *self._check_backend_specific_checks(**kwargs), *self._check_validators(), *self._check_deprecation_details(), ] def _check_field_name(self): """ Check if field name is valid, i.e. 1) does not end with an underscore, 2) does not contain "__" and 3) is not "pk". """ if self.name.endswith('_'): return [ checks.Error( 'Field names must not end with an underscore.', obj=self, id='fields.E001', ) ] elif LOOKUP_SEP in self.name: return [ checks.Error( 'Field names must not contain "%s".' % LOOKUP_SEP, obj=self, id='fields.E002', ) ] elif self.name == 'pk': return [ checks.Error( "'pk' is a reserved word that cannot be used as a field name.", obj=self, id='fields.E003', ) ] else: return [] @classmethod def _choices_is_value(cls, value): return isinstance(value, (str, Promise)) or not is_iterable(value) def _check_choices(self): if not self.choices: return [] if not is_iterable(self.choices) or isinstance(self.choices, str): return [ checks.Error( "'choices' must be an iterable (e.g., a list or tuple).", obj=self, id='fields.E004', ) ] choice_max_length = 0 # Expect [group_name, [value, display]] for choices_group in self.choices: try: group_name, group_choices = choices_group except (TypeError, ValueError): # Containing non-pairs break try: if not all( self._choices_is_value(value) and self._choices_is_value(human_name) for value, human_name in group_choices ): break if self.max_length is not None and group_choices: choice_max_length = max([ choice_max_length, *(len(value) for value, _ in group_choices if isinstance(value, str)), ]) except (TypeError, ValueError): # No groups, choices in the form [value, display] value, human_name = group_name, group_choices if not self._choices_is_value(value) or not self._choices_is_value(human_name): break if self.max_length is not None and isinstance(value, str): choice_max_length = max(choice_max_length, len(value)) # Special case: choices=['ab'] if isinstance(choices_group, str): break else: if self.max_length is not None and choice_max_length > self.max_length: return [ checks.Error( "'max_length' is too small to fit the longest value " "in 'choices' (%d characters)." % choice_max_length, obj=self, id='fields.E009', ), ] return [] return [ checks.Error( "'choices' must be an iterable containing " "(actual value, human readable name) tuples.", obj=self, id='fields.E005', ) ] def _check_db_index(self): if self.db_index not in (None, True, False): return [ checks.Error( "'db_index' must be None, True or False.", obj=self, id='fields.E006', ) ] else: return [] def _check_null_allowed_for_primary_keys(self): if (self.primary_key and self.null and not connection.features.interprets_empty_strings_as_nulls): # We cannot reliably check this for backends like Oracle which # consider NULL and '' to be equal (and thus set up # character-based fields a little differently). return [ checks.Error( 'Primary keys must not have null=True.', hint=('Set null=False on the field, or ' 'remove primary_key=True argument.'), obj=self, id='fields.E007', ) ] else: return [] def _check_backend_specific_checks(self, databases=None, **kwargs): if databases is None: return [] app_label = self.model._meta.app_label errors = [] for alias in databases: if router.allow_migrate(alias, app_label, model_name=self.model._meta.model_name): errors.extend(connections[alias].validation.check_field(self, **kwargs)) return errors def _check_validators(self): errors = [] for i, validator in enumerate(self.validators): if not callable(validator): errors.append( checks.Error( "All 'validators' must be callable.", hint=( "validators[{i}] ({repr}) isn't a function or " "instance of a validator class.".format( i=i, repr=repr(validator), ) ), obj=self, id='fields.E008', ) ) return errors def _check_deprecation_details(self): if self.system_check_removed_details is not None: return [ checks.Error( self.system_check_removed_details.get( 'msg', '%s has been removed except for support in historical ' 'migrations.' % self.__class__.__name__ ), hint=self.system_check_removed_details.get('hint'), obj=self, id=self.system_check_removed_details.get('id', 'fields.EXXX'), ) ] elif self.system_check_deprecated_details is not None: return [ checks.Warning( self.system_check_deprecated_details.get( 'msg', '%s has been deprecated.' % self.__class__.__name__ ), hint=self.system_check_deprecated_details.get('hint'), obj=self, id=self.system_check_deprecated_details.get('id', 'fields.WXXX'), ) ] return [] def get_col(self, alias, output_field=None): if output_field is None: output_field = self if alias != self.model._meta.db_table or output_field != self: from django.db.models.expressions import Col return Col(alias, self, output_field) else: return self.cached_col @cached_property def cached_col(self): from django.db.models.expressions import Col return Col(self.model._meta.db_table, self) def select_format(self, compiler, sql, params): """ Custom format for select clauses. For example, GIS columns need to be selected as AsText(table.col) on MySQL as the table.col data can't be used by Django. """ return sql, params def deconstruct(self): """ Return enough information to recreate the field as a 4-tuple: * The name of the field on the model, if contribute_to_class() has been run. * The import path of the field, including the class, e.g. django.db.models.IntegerField. This should be the most portable version, so less specific may be better. * A list of positional arguments. * A dict of keyword arguments. Note that the positional or keyword arguments must contain values of the following types (including inner values of collection types): * None, bool, str, int, float, complex, set, frozenset, list, tuple, dict * UUID * datetime.datetime (naive), datetime.date * top-level classes, top-level functions - will be referenced by their full import path * Storage instances - these have their own deconstruct() method This is because the values here must be serialized into a text format (possibly new Python code, possibly JSON) and these are the only types with encoding handlers defined. There's no need to return the exact way the field was instantiated this time, just ensure that the resulting field is the same - prefer keyword arguments over positional ones, and omit parameters with their default values. """ # Short-form way of fetching all the default parameters keywords = {} possibles = { "verbose_name": None, "primary_key": False, "max_length": None, "unique": False, "blank": False, "null": False, "db_index": False, "default": NOT_PROVIDED, "editable": True, "serialize": True, "unique_for_date": None, "unique_for_month": None, "unique_for_year": None, "choices": None, "help_text": '', "db_column": None, "db_tablespace": None, "auto_created": False, "validators": [], "error_messages": None, } attr_overrides = { "unique": "_unique", "error_messages": "_error_messages", "validators": "_validators", "verbose_name": "_verbose_name", "db_tablespace": "_db_tablespace", } equals_comparison = {"choices", "validators"} for name, default in possibles.items(): value = getattr(self, attr_overrides.get(name, name)) # Unroll anything iterable for choices into a concrete list if name == "choices" and isinstance(value, collections.abc.Iterable): value = list(value) # Do correct kind of comparison if name in equals_comparison: if value != default: keywords[name] = value else: if value is not default: keywords[name] = value # Work out path - we shorten it for known Django core fields path = "%s.%s" % (self.__class__.__module__, self.__class__.__qualname__) if path.startswith("django.db.models.fields.related"): path = path.replace("django.db.models.fields.related", "django.db.models") elif path.startswith("django.db.models.fields.files"): path = path.replace("django.db.models.fields.files", "django.db.models") elif path.startswith('django.db.models.fields.json'): path = path.replace('django.db.models.fields.json', 'django.db.models') elif path.startswith("django.db.models.fields.proxy"): path = path.replace("django.db.models.fields.proxy", "django.db.models") elif path.startswith("django.db.models.fields"): path = path.replace("django.db.models.fields", "django.db.models") # Return basic info - other fields should override this. return (self.name, path, [], keywords) def clone(self): """ Uses deconstruct() to clone a new copy of this Field. Will not preserve any class attachments/attribute names. """ name, path, args, kwargs = self.deconstruct() return self.__class__(*args, **kwargs) def __eq__(self, other): # Needed for @total_ordering if isinstance(other, Field): return ( self.creation_counter == other.creation_counter and getattr(self, 'model', None) == getattr(other, 'model', None) ) return NotImplemented def __lt__(self, other): # This is needed because bisect does not take a comparison function. # Order by creation_counter first for backward compatibility. if isinstance(other, Field): if ( self.creation_counter != other.creation_counter or not hasattr(self, 'model') and not hasattr(other, 'model') ): return self.creation_counter < other.creation_counter elif hasattr(self, 'model') != hasattr(other, 'model'): return not hasattr(self, 'model') # Order no-model fields first else: # creation_counter's are equal, compare only models. return ( (self.model._meta.app_label, self.model._meta.model_name) < (other.model._meta.app_label, other.model._meta.model_name) ) return NotImplemented def __hash__(self): return hash(( self.creation_counter, self.model._meta.app_label if hasattr(self, 'model') else None, self.model._meta.model_name if hasattr(self, 'model') else None, )) def __deepcopy__(self, memodict): # We don't have to deepcopy very much here, since most things are not # intended to be altered after initial creation. obj = copy.copy(self) if self.remote_field: obj.remote_field = copy.copy(self.remote_field) if hasattr(self.remote_field, 'field') and self.remote_field.field is self: obj.remote_field.field = obj memodict[id(self)] = obj return obj def __copy__(self): # We need to avoid hitting __reduce__, so define this # slightly weird copy construct. obj = Empty() obj.__class__ = self.__class__ obj.__dict__ = self.__dict__.copy() return obj def __reduce__(self): """ Pickling should return the model._meta.fields instance of the field, not a new copy of that field. So, use the app registry to load the model and then the field back. """ if not hasattr(self, 'model'): # Fields are sometimes used without attaching them to models (for # example in aggregation). In this case give back a plain field # instance. The code below will create a new empty instance of # class self.__class__, then update its dict with self.__dict__ # values - so, this is very close to normal pickle. state = self.__dict__.copy() # The _get_default cached_property can't be pickled due to lambda # usage. state.pop('_get_default', None) return _empty, (self.__class__,), state return _load_field, (self.model._meta.app_label, self.model._meta.object_name, self.name) def get_pk_value_on_save(self, instance): """ Hook to generate new PK values on save. This method is called when saving instances with no primary key value set. If this method returns something else than None, then the returned value is used when saving the new instance. """ if self.default: return self.get_default() return None def to_python(self, value): """ Convert the input value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Return the converted value. Subclasses should override this. """ return value @cached_property def validators(self): """ Some validators can't be created at field initialization time. This method provides a way to delay their creation until required. """ return [*self.default_validators, *self._validators] def run_validators(self, value): if value in self.empty_values: return errors = [] for v in self.validators: try: v(value) except exceptions.ValidationError as e: if hasattr(e, 'code') and e.code in self.error_messages: e.message = self.error_messages[e.code] errors.extend(e.error_list) if errors: raise exceptions.ValidationError(errors) def validate(self, value, model_instance): """ Validate value and raise ValidationError if necessary. Subclasses should override this to provide validation logic. """ if not self.editable: # Skip validation for non-editable fields. return if self.choices is not None and value not in self.empty_values: for option_key, option_value in self.choices: if isinstance(option_value, (list, tuple)): # This is an optgroup, so look inside the group for # options. for optgroup_key, optgroup_value in option_value: if value == optgroup_key: return elif value == option_key: return raise exceptions.ValidationError( self.error_messages['invalid_choice'], code='invalid_choice', params={'value': value}, ) if value is None and not self.null: raise exceptions.ValidationError(self.error_messages['null'], code='null') if not self.blank and value in self.empty_values: raise exceptions.ValidationError(self.error_messages['blank'], code='blank') def clean(self, value, model_instance): """ Convert the value's type and run validation. Validation errors from to_python() and validate() are propagated. Return the correct value if no error is raised. """ value = self.to_python(value) self.validate(value, model_instance) self.run_validators(value) return value def db_type_parameters(self, connection): return DictWrapper(self.__dict__, connection.ops.quote_name, 'qn_') def db_check(self, connection): """ Return the database column check constraint for this field, for the provided connection. Works the same way as db_type() for the case that get_internal_type() does not map to a preexisting model field. """ data = self.db_type_parameters(connection) try: return connection.data_type_check_constraints[self.get_internal_type()] % data except KeyError: return None def db_type(self, connection): """ Return the database column data type for this field, for the provided connection. """ # The default implementation of this method looks at the # backend-specific data_types dictionary, looking up the field by its # "internal type". # # A Field class can implement the get_internal_type() method to specify # which *preexisting* Django Field class it's most similar to -- i.e., # a custom field might be represented by a TEXT column type, which is # the same as the TextField Django field type, which means the custom # field's get_internal_type() returns 'TextField'. # # But the limitation of the get_internal_type() / data_types approach # is that it cannot handle database column types that aren't already # mapped to one of the built-in Django field types. In this case, you # can implement db_type() instead of get_internal_type() to specify # exactly which wacky database column type you want to use. data = self.db_type_parameters(connection) try: return connection.data_types[self.get_internal_type()] % data except KeyError: return None def rel_db_type(self, connection): """ Return the data type that a related field pointing to this field should use. For example, this method is called by ForeignKey and OneToOneField to determine its data type. """ return self.db_type(connection) def cast_db_type(self, connection): """Return the data type to use in the Cast() function.""" db_type = connection.ops.cast_data_types.get(self.get_internal_type()) if db_type: return db_type % self.db_type_parameters(connection) return self.db_type(connection) def db_parameters(self, connection): """ Extension of db_type(), providing a range of different return values (type, checks). This will look at db_type(), allowing custom model fields to override it. """ type_string = self.db_type(connection) check_string = self.db_check(connection) return { "type": type_string, "check": check_string, } def db_type_suffix(self, connection): return connection.data_types_suffix.get(self.get_internal_type()) def get_db_converters(self, connection): if hasattr(self, 'from_db_value'): return [self.from_db_value] return [] @property def unique(self): return self._unique or self.primary_key @property def db_tablespace(self): return self._db_tablespace or settings.DEFAULT_INDEX_TABLESPACE @property def db_returning(self): """ Private API intended only to be used by Django itself. Currently only the PostgreSQL backend supports returning multiple fields on a model. """ return False def set_attributes_from_name(self, name): self.name = self.name or name self.attname, self.column = self.get_attname_column() self.concrete = self.column is not None if self.verbose_name is None and self.name: self.verbose_name = self.name.replace('_', ' ') def contribute_to_class(self, cls, name, private_only=False): """ Register the field with the model class it belongs to. If private_only is True, create a separate instance of this field for every subclass of cls, even if cls is not an abstract model. """ self.set_attributes_from_name(name) self.model = cls cls._meta.add_field(self, private=private_only) if self.column: # Don't override classmethods with the descriptor. This means that # if you have a classmethod and a field with the same name, then # such fields can't be deferred (we don't have a check for this). if not getattr(cls, self.attname, None): setattr(cls, self.attname, self.descriptor_class(self)) if self.choices is not None: # Don't override a get_FOO_display() method defined explicitly on # this class, but don't check methods derived from inheritance, to # allow overriding inherited choices. For more complex inheritance # structures users should override contribute_to_class(). if 'get_%s_display' % self.name not in cls.__dict__: setattr( cls, 'get_%s_display' % self.name, partialmethod(cls._get_FIELD_display, field=self), ) def get_filter_kwargs_for_object(self, obj): """ Return a dict that when passed as kwargs to self.model.filter(), would yield all instances having the same value for this field as obj has. """ return {self.name: getattr(obj, self.attname)} def get_attname(self): return self.name def get_attname_column(self): attname = self.get_attname() column = self.db_column or attname return attname, column def get_internal_type(self): return self.__class__.__name__ def pre_save(self, model_instance, add): """Return field's value just before saving.""" return getattr(model_instance, self.attname) def get_prep_value(self, value): """Perform preliminary non-db specific value checks and conversions.""" if isinstance(value, Promise): value = value._proxy____cast() return value def get_db_prep_value(self, value, connection, prepared=False): """ Return field's value prepared for interacting with the database backend. Used by the default implementations of get_db_prep_save(). """ if not prepared: value = self.get_prep_value(value) return value def get_db_prep_save(self, value, connection): """Return field's value prepared for saving into a database.""" return self.get_db_prep_value(value, connection=connection, prepared=False) def has_default(self): """Return a boolean of whether this field has a default value.""" return self.default is not NOT_PROVIDED def get_default(self): """Return the default value for this field.""" return self._get_default() @cached_property def _get_default(self): if self.has_default(): if callable(self.default): return self.default return lambda: self.default if not self.empty_strings_allowed or self.null and not connection.features.interprets_empty_strings_as_nulls: return return_None return str # return empty string def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH, limit_choices_to=None, ordering=()): """ Return choices with a default blank choices included, for use as <select> choices for this field. """ if self.choices is not None: choices = list(self.choices) if include_blank: blank_defined = any(choice in ('', None) for choice, _ in self.flatchoices) if not blank_defined: choices = blank_choice + choices return choices rel_model = self.remote_field.model limit_choices_to = limit_choices_to or self.get_limit_choices_to() choice_func = operator.attrgetter( self.remote_field.get_related_field().attname if hasattr(self.remote_field, 'get_related_field') else 'pk' ) qs = rel_model._default_manager.complex_filter(limit_choices_to) if ordering: qs = qs.order_by(*ordering) return (blank_choice if include_blank else []) + [ (choice_func(x), str(x)) for x in qs ] def value_to_string(self, obj): """ Return a string value of this field from the passed obj. This is used by the serialization framework. """ return str(self.value_from_object(obj)) def _get_flatchoices(self): """Flattened version of choices tuple.""" if self.choices is None: return [] flat = [] for choice, value in self.choices: if isinstance(value, (list, tuple)): flat.extend(value) else: flat.append((choice, value)) return flat flatchoices = property(_get_flatchoices) def save_form_data(self, instance, data): setattr(instance, self.name, data) def formfield(self, form_class=None, choices_form_class=None, **kwargs): """Return a django.forms.Field instance for this field.""" defaults = { 'required': not self.blank, 'label': capfirst(self.verbose_name), 'help_text': self.help_text, } if self.has_default(): if callable(self.default): defaults['initial'] = self.default defaults['show_hidden_initial'] = True else: defaults['initial'] = self.get_default() if self.choices is not None: # Fields with choices get special treatment. include_blank = (self.blank or not (self.has_default() or 'initial' in kwargs)) defaults['choices'] = self.get_choices(include_blank=include_blank) defaults['coerce'] = self.to_python if self.null: defaults['empty_value'] = None if choices_form_class is not None: form_class = choices_form_class else: form_class = forms.TypedChoiceField # Many of the subclass-specific formfield arguments (min_value, # max_value) don't apply for choice fields, so be sure to only pass # the values that TypedChoiceField will understand. for k in list(kwargs): if k not in ('coerce', 'empty_value', 'choices', 'required', 'widget', 'label', 'initial', 'help_text', 'error_messages', 'show_hidden_initial', 'disabled'): del kwargs[k] defaults.update(kwargs) if form_class is None: form_class = forms.CharField return form_class(**defaults) def value_from_object(self, obj): """Return the value of this field in the given model instance.""" return getattr(obj, self.attname) class BooleanField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value must be either True or False.'), 'invalid_nullable': _('“%(value)s” value must be either True, False, or None.'), } description = _("Boolean (Either True or False)") def get_internal_type(self): return "BooleanField" def to_python(self, value): if self.null and value in self.empty_values: return None if value in (True, False): # 1/0 are equal to True/False. bool() converts former to latter. return bool(value) if value in ('t', 'True', '1'): return True if value in ('f', 'False', '0'): return False raise exceptions.ValidationError( self.error_messages['invalid_nullable' if self.null else 'invalid'], code='invalid', params={'value': value}, ) def get_prep_value(self, value): value = super().get_prep_value(value) if value is None: return None return self.to_python(value) def formfield(self, **kwargs): if self.choices is not None: include_blank = not (self.has_default() or 'initial' in kwargs) defaults = {'choices': self.get_choices(include_blank=include_blank)} else: form_class = forms.NullBooleanField if self.null else forms.BooleanField # In HTML checkboxes, 'required' means "must be checked" which is # different from the choices case ("must select some value"). # required=False allows unchecked checkboxes. defaults = {'form_class': form_class, 'required': False} return super().formfield(**{**defaults, **kwargs}) class CharField(Field): description = _("String (up to %(max_length)s)") def __init__(self, *args, db_collation=None, **kwargs): super().__init__(*args, **kwargs) self.db_collation = db_collation self.validators.append(validators.MaxLengthValidator(self.max_length)) def check(self, **kwargs): databases = kwargs.get('databases') or [] return [ *super().check(**kwargs), *self._check_db_collation(databases), *self._check_max_length_attribute(**kwargs), ] def _check_max_length_attribute(self, **kwargs): if self.max_length is None: return [ checks.Error( "CharFields must define a 'max_length' attribute.", obj=self, id='fields.E120', ) ] elif (not isinstance(self.max_length, int) or isinstance(self.max_length, bool) or self.max_length <= 0): return [ checks.Error( "'max_length' must be a positive integer.", obj=self, id='fields.E121', ) ] else: return [] def _check_db_collation(self, databases): errors = [] for db in databases: if not router.allow_migrate_model(db, self.model): continue connection = connections[db] if not ( self.db_collation is None or 'supports_collation_on_charfield' in self.model._meta.required_db_features or connection.features.supports_collation_on_charfield ): errors.append( checks.Error( '%s does not support a database collation on ' 'CharFields.' % connection.display_name, obj=self, id='fields.E190', ), ) return errors def cast_db_type(self, connection): if self.max_length is None: return connection.ops.cast_char_field_without_max_length return super().cast_db_type(connection) def get_internal_type(self): return "CharField" def to_python(self, value): if isinstance(value, str) or value is None: return value return str(value) def get_prep_value(self, value): value = super().get_prep_value(value) return self.to_python(value) def formfield(self, **kwargs): # Passing max_length to forms.CharField means that the value's length # will be validated twice. This is considered acceptable since we want # the value in the form field (to pass into widget for example). defaults = {'max_length': self.max_length} # TODO: Handle multiple backends with different feature flags. if self.null and not connection.features.interprets_empty_strings_as_nulls: defaults['empty_value'] = None defaults.update(kwargs) return super().formfield(**defaults) def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.db_collation: kwargs['db_collation'] = self.db_collation return name, path, args, kwargs class CommaSeparatedIntegerField(CharField): default_validators = [validators.validate_comma_separated_integer_list] description = _("Comma-separated integers") system_check_removed_details = { 'msg': ( 'CommaSeparatedIntegerField is removed except for support in ' 'historical migrations.' ), 'hint': ( 'Use CharField(validators=[validate_comma_separated_integer_list]) ' 'instead.' ), 'id': 'fields.E901', } class DateTimeCheckMixin: def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_mutually_exclusive_options(), *self._check_fix_default_value(), ] def _check_mutually_exclusive_options(self): # auto_now, auto_now_add, and default are mutually exclusive # options. The use of more than one of these options together # will trigger an Error mutually_exclusive_options = [self.auto_now_add, self.auto_now, self.has_default()] enabled_options = [option not in (None, False) for option in mutually_exclusive_options].count(True) if enabled_options > 1: return [ checks.Error( "The options auto_now, auto_now_add, and default " "are mutually exclusive. Only one of these options " "may be present.", obj=self, id='fields.E160', ) ] else: return [] def _check_fix_default_value(self): return [] class DateField(DateTimeCheckMixin, Field): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value has an invalid date format. It must be ' 'in YYYY-MM-DD format.'), 'invalid_date': _('“%(value)s” value has the correct format (YYYY-MM-DD) ' 'but it is an invalid date.'), } description = _("Date (without time)") def __init__(self, verbose_name=None, name=None, auto_now=False, auto_now_add=False, **kwargs): self.auto_now, self.auto_now_add = auto_now, auto_now_add if auto_now or auto_now_add: kwargs['editable'] = False kwargs['blank'] = True super().__init__(verbose_name, name, **kwargs) def _check_fix_default_value(self): """ Warn that using an actual date or datetime value is probably wrong; it's only evaluated on server startup. """ if not self.has_default(): return [] now = timezone.now() if not timezone.is_naive(now): now = timezone.make_naive(now, timezone.utc) value = self.default if isinstance(value, datetime.datetime): if not timezone.is_naive(value): value = timezone.make_naive(value, timezone.utc) value = value.date() elif isinstance(value, datetime.date): # Nothing to do, as dates don't have tz information pass else: # No explicit date / datetime value -- no checks necessary return [] offset = datetime.timedelta(days=1) lower = (now - offset).date() upper = (now + offset).date() if lower <= value <= upper: return [ checks.Warning( 'Fixed default value provided.', hint='It seems you set a fixed date / time / datetime ' 'value as default for this field. This may not be ' 'what you want. If you want to have the current date ' 'as default, use `django.utils.timezone.now`', obj=self, id='fields.W161', ) ] return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.auto_now: kwargs['auto_now'] = True if self.auto_now_add: kwargs['auto_now_add'] = True if self.auto_now or self.auto_now_add: del kwargs['editable'] del kwargs['blank'] return name, path, args, kwargs def get_internal_type(self): return "DateField" def to_python(self, value): if value is None: return value if isinstance(value, datetime.datetime): if settings.USE_TZ and timezone.is_aware(value): # Convert aware datetimes to the default time zone # before casting them to dates (#17742). default_timezone = timezone.get_default_timezone() value = timezone.make_naive(value, default_timezone) return value.date() if isinstance(value, datetime.date): return value try: parsed = parse_date(value) if parsed is not None: return parsed except ValueError: raise exceptions.ValidationError( self.error_messages['invalid_date'], code='invalid_date', params={'value': value}, ) raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def pre_save(self, model_instance, add): if self.auto_now or (self.auto_now_add and add): value = datetime.date.today() setattr(model_instance, self.attname, value) return value else: return super().pre_save(model_instance, add) def contribute_to_class(self, cls, name, **kwargs): super().contribute_to_class(cls, name, **kwargs) if not self.null: setattr( cls, 'get_next_by_%s' % self.name, partialmethod(cls._get_next_or_previous_by_FIELD, field=self, is_next=True) ) setattr( cls, 'get_previous_by_%s' % self.name, partialmethod(cls._get_next_or_previous_by_FIELD, field=self, is_next=False) ) def get_prep_value(self, value): value = super().get_prep_value(value) return self.to_python(value) def get_db_prep_value(self, value, connection, prepared=False): # Casts dates into the format expected by the backend if not prepared: value = self.get_prep_value(value) return connection.ops.adapt_datefield_value(value) def value_to_string(self, obj): val = self.value_from_object(obj) return '' if val is None else val.isoformat() def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.DateField, **kwargs, }) class DateTimeField(DateField): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value has an invalid format. It must be in ' 'YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format.'), 'invalid_date': _("“%(value)s” value has the correct format " "(YYYY-MM-DD) but it is an invalid date."), 'invalid_datetime': _('“%(value)s” value has the correct format ' '(YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ]) ' 'but it is an invalid date/time.'), } description = _("Date (with time)") # __init__ is inherited from DateField def _check_fix_default_value(self): """ Warn that using an actual date or datetime value is probably wrong; it's only evaluated on server startup. """ if not self.has_default(): return [] now = timezone.now() if not timezone.is_naive(now): now = timezone.make_naive(now, timezone.utc) value = self.default if isinstance(value, datetime.datetime): second_offset = datetime.timedelta(seconds=10) lower = now - second_offset upper = now + second_offset if timezone.is_aware(value): value = timezone.make_naive(value, timezone.utc) elif isinstance(value, datetime.date): second_offset = datetime.timedelta(seconds=10) lower = now - second_offset lower = datetime.datetime(lower.year, lower.month, lower.day) upper = now + second_offset upper = datetime.datetime(upper.year, upper.month, upper.day) value = datetime.datetime(value.year, value.month, value.day) else: # No explicit date / datetime value -- no checks necessary return [] if lower <= value <= upper: return [ checks.Warning( 'Fixed default value provided.', hint='It seems you set a fixed date / time / datetime ' 'value as default for this field. This may not be ' 'what you want. If you want to have the current date ' 'as default, use `django.utils.timezone.now`', obj=self, id='fields.W161', ) ] return [] def get_internal_type(self): return "DateTimeField" def to_python(self, value): if value is None: return value if isinstance(value, datetime.datetime): return value if isinstance(value, datetime.date): value = datetime.datetime(value.year, value.month, value.day) if settings.USE_TZ: # For backwards compatibility, interpret naive datetimes in # local time. This won't work during DST change, but we can't # do much about it, so we let the exceptions percolate up the # call stack. warnings.warn("DateTimeField %s.%s received a naive datetime " "(%s) while time zone support is active." % (self.model.__name__, self.name, value), RuntimeWarning) default_timezone = timezone.get_default_timezone() value = timezone.make_aware(value, default_timezone) return value try: parsed = parse_datetime(value) if parsed is not None: return parsed except ValueError: raise exceptions.ValidationError( self.error_messages['invalid_datetime'], code='invalid_datetime', params={'value': value}, ) try: parsed = parse_date(value) if parsed is not None: return datetime.datetime(parsed.year, parsed.month, parsed.day) except ValueError: raise exceptions.ValidationError( self.error_messages['invalid_date'], code='invalid_date', params={'value': value}, ) raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def pre_save(self, model_instance, add): if self.auto_now or (self.auto_now_add and add): value = timezone.now() setattr(model_instance, self.attname, value) return value else: return super().pre_save(model_instance, add) # contribute_to_class is inherited from DateField, it registers # get_next_by_FOO and get_prev_by_FOO def get_prep_value(self, value): value = super().get_prep_value(value) value = self.to_python(value) if value is not None and settings.USE_TZ and timezone.is_naive(value): # For backwards compatibility, interpret naive datetimes in local # time. This won't work during DST change, but we can't do much # about it, so we let the exceptions percolate up the call stack. try: name = '%s.%s' % (self.model.__name__, self.name) except AttributeError: name = '(unbound)' warnings.warn("DateTimeField %s received a naive datetime (%s)" " while time zone support is active." % (name, value), RuntimeWarning) default_timezone = timezone.get_default_timezone() value = timezone.make_aware(value, default_timezone) return value def get_db_prep_value(self, value, connection, prepared=False): # Casts datetimes into the format expected by the backend if not prepared: value = self.get_prep_value(value) return connection.ops.adapt_datetimefield_value(value) def value_to_string(self, obj): val = self.value_from_object(obj) return '' if val is None else val.isoformat() def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.DateTimeField, **kwargs, }) class DecimalField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value must be a decimal number.'), } description = _("Decimal number") def __init__(self, verbose_name=None, name=None, max_digits=None, decimal_places=None, **kwargs): self.max_digits, self.decimal_places = max_digits, decimal_places super().__init__(verbose_name, name, **kwargs) def check(self, **kwargs): errors = super().check(**kwargs) digits_errors = [ *self._check_decimal_places(), *self._check_max_digits(), ] if not digits_errors: errors.extend(self._check_decimal_places_and_max_digits(**kwargs)) else: errors.extend(digits_errors) return errors def _check_decimal_places(self): try: decimal_places = int(self.decimal_places) if decimal_places < 0: raise ValueError() except TypeError: return [ checks.Error( "DecimalFields must define a 'decimal_places' attribute.", obj=self, id='fields.E130', ) ] except ValueError: return [ checks.Error( "'decimal_places' must be a non-negative integer.", obj=self, id='fields.E131', ) ] else: return [] def _check_max_digits(self): try: max_digits = int(self.max_digits) if max_digits <= 0: raise ValueError() except TypeError: return [ checks.Error( "DecimalFields must define a 'max_digits' attribute.", obj=self, id='fields.E132', ) ] except ValueError: return [ checks.Error( "'max_digits' must be a positive integer.", obj=self, id='fields.E133', ) ] else: return [] def _check_decimal_places_and_max_digits(self, **kwargs): if int(self.decimal_places) > int(self.max_digits): return [ checks.Error( "'max_digits' must be greater or equal to 'decimal_places'.", obj=self, id='fields.E134', ) ] return [] @cached_property def validators(self): return super().validators + [ validators.DecimalValidator(self.max_digits, self.decimal_places) ] @cached_property def context(self): return decimal.Context(prec=self.max_digits) def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.max_digits is not None: kwargs['max_digits'] = self.max_digits if self.decimal_places is not None: kwargs['decimal_places'] = self.decimal_places return name, path, args, kwargs def get_internal_type(self): return "DecimalField" def to_python(self, value): if value is None: return value if isinstance(value, float): return self.context.create_decimal_from_float(value) try: return decimal.Decimal(value) except (decimal.InvalidOperation, TypeError, ValueError): raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def get_db_prep_save(self, value, connection): return connection.ops.adapt_decimalfield_value(self.to_python(value), self.max_digits, self.decimal_places) def get_prep_value(self, value): value = super().get_prep_value(value) return self.to_python(value) def formfield(self, **kwargs): return super().formfield(**{ 'max_digits': self.max_digits, 'decimal_places': self.decimal_places, 'form_class': forms.DecimalField, **kwargs, }) class DurationField(Field): """ Store timedelta objects. Use interval on PostgreSQL, INTERVAL DAY TO SECOND on Oracle, and bigint of microseconds on other databases. """ empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value has an invalid format. It must be in ' '[DD] [[HH:]MM:]ss[.uuuuuu] format.') } description = _("Duration") def get_internal_type(self): return "DurationField" def to_python(self, value): if value is None: return value if isinstance(value, datetime.timedelta): return value try: parsed = parse_duration(value) except ValueError: pass else: if parsed is not None: return parsed raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def get_db_prep_value(self, value, connection, prepared=False): if connection.features.has_native_duration_field: return value if value is None: return None return duration_microseconds(value) def get_db_converters(self, connection): converters = [] if not connection.features.has_native_duration_field: converters.append(connection.ops.convert_durationfield_value) return converters + super().get_db_converters(connection) def value_to_string(self, obj): val = self.value_from_object(obj) return '' if val is None else duration_string(val) def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.DurationField, **kwargs, }) class EmailField(CharField): default_validators = [validators.validate_email] description = _("Email address") def __init__(self, *args, **kwargs): # max_length=254 to be compliant with RFCs 3696 and 5321 kwargs.setdefault('max_length', 254) super().__init__(*args, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() # We do not exclude max_length if it matches default as we want to change # the default in future. return name, path, args, kwargs def formfield(self, **kwargs): # As with CharField, this will cause email validation to be performed # twice. return super().formfield(**{ 'form_class': forms.EmailField, **kwargs, }) class FilePathField(Field): description = _("File path") def __init__(self, verbose_name=None, name=None, path='', match=None, recursive=False, allow_files=True, allow_folders=False, **kwargs): self.path, self.match, self.recursive = path, match, recursive self.allow_files, self.allow_folders = allow_files, allow_folders kwargs.setdefault('max_length', 100) super().__init__(verbose_name, name, **kwargs) def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_allowing_files_or_folders(**kwargs), ] def _check_allowing_files_or_folders(self, **kwargs): if not self.allow_files and not self.allow_folders: return [ checks.Error( "FilePathFields must have either 'allow_files' or 'allow_folders' set to True.", obj=self, id='fields.E140', ) ] return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.path != '': kwargs['path'] = self.path if self.match is not None: kwargs['match'] = self.match if self.recursive is not False: kwargs['recursive'] = self.recursive if self.allow_files is not True: kwargs['allow_files'] = self.allow_files if self.allow_folders is not False: kwargs['allow_folders'] = self.allow_folders if kwargs.get("max_length") == 100: del kwargs["max_length"] return name, path, args, kwargs def get_prep_value(self, value): value = super().get_prep_value(value) if value is None: return None return str(value) def formfield(self, **kwargs): return super().formfield(**{ 'path': self.path() if callable(self.path) else self.path, 'match': self.match, 'recursive': self.recursive, 'form_class': forms.FilePathField, 'allow_files': self.allow_files, 'allow_folders': self.allow_folders, **kwargs, }) def get_internal_type(self): return "FilePathField" class FloatField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value must be a float.'), } description = _("Floating point number") def get_prep_value(self, value): value = super().get_prep_value(value) if value is None: return None try: return float(value) except (TypeError, ValueError) as e: raise e.__class__( "Field '%s' expected a number but got %r." % (self.name, value), ) from e def get_internal_type(self): return "FloatField" def to_python(self, value): if value is None: return value try: return float(value) except (TypeError, ValueError): raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.FloatField, **kwargs, }) class IntegerField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value must be an integer.'), } description = _("Integer") def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_max_length_warning(), ] def _check_max_length_warning(self): if self.max_length is not None: return [ checks.Warning( "'max_length' is ignored when used with %s." % self.__class__.__name__, hint="Remove 'max_length' from field", obj=self, id='fields.W122', ) ] return [] @cached_property def validators(self): # These validators can't be added at field initialization time since # they're based on values retrieved from `connection`. validators_ = super().validators internal_type = self.get_internal_type() min_value, max_value = connection.ops.integer_field_range(internal_type) if min_value is not None and not any( ( isinstance(validator, validators.MinValueValidator) and ( validator.limit_value() if callable(validator.limit_value) else validator.limit_value ) >= min_value ) for validator in validators_ ): validators_.append(validators.MinValueValidator(min_value)) if max_value is not None and not any( ( isinstance(validator, validators.MaxValueValidator) and ( validator.limit_value() if callable(validator.limit_value) else validator.limit_value ) <= max_value ) for validator in validators_ ): validators_.append(validators.MaxValueValidator(max_value)) return validators_ def get_prep_value(self, value): value = super().get_prep_value(value) if value is None: return None try: return int(value) except (TypeError, ValueError) as e: raise e.__class__( "Field '%s' expected a number but got %r." % (self.name, value), ) from e def get_internal_type(self): return "IntegerField" def to_python(self, value): if value is None: return value try: return int(value) except (TypeError, ValueError): raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.IntegerField, **kwargs, }) class BigIntegerField(IntegerField): description = _("Big (8 byte) integer") MAX_BIGINT = 9223372036854775807 def get_internal_type(self): return "BigIntegerField" def formfield(self, **kwargs): return super().formfield(**{ 'min_value': -BigIntegerField.MAX_BIGINT - 1, 'max_value': BigIntegerField.MAX_BIGINT, **kwargs, }) class SmallIntegerField(IntegerField): description = _('Small integer') def get_internal_type(self): return 'SmallIntegerField' class IPAddressField(Field): empty_strings_allowed = False description = _("IPv4 address") system_check_removed_details = { 'msg': ( 'IPAddressField has been removed except for support in ' 'historical migrations.' ), 'hint': 'Use GenericIPAddressField instead.', 'id': 'fields.E900', } def __init__(self, *args, **kwargs): kwargs['max_length'] = 15 super().__init__(*args, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() del kwargs['max_length'] return name, path, args, kwargs def get_prep_value(self, value): value = super().get_prep_value(value) if value is None: return None return str(value) def get_internal_type(self): return "IPAddressField" class GenericIPAddressField(Field): empty_strings_allowed = False description = _("IP address") default_error_messages = {} def __init__(self, verbose_name=None, name=None, protocol='both', unpack_ipv4=False, *args, **kwargs): self.unpack_ipv4 = unpack_ipv4 self.protocol = protocol self.default_validators, invalid_error_message = \ validators.ip_address_validators(protocol, unpack_ipv4) self.default_error_messages['invalid'] = invalid_error_message kwargs['max_length'] = 39 super().__init__(verbose_name, name, *args, **kwargs) def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_blank_and_null_values(**kwargs), ] def _check_blank_and_null_values(self, **kwargs): if not getattr(self, 'null', False) and getattr(self, 'blank', False): return [ checks.Error( 'GenericIPAddressFields cannot have blank=True if null=False, ' 'as blank values are stored as nulls.', obj=self, id='fields.E150', ) ] return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.unpack_ipv4 is not False: kwargs['unpack_ipv4'] = self.unpack_ipv4 if self.protocol != "both": kwargs['protocol'] = self.protocol if kwargs.get("max_length") == 39: del kwargs['max_length'] return name, path, args, kwargs def get_internal_type(self): return "GenericIPAddressField" def to_python(self, value): if value is None: return None if not isinstance(value, str): value = str(value) value = value.strip() if ':' in value: return clean_ipv6_address(value, self.unpack_ipv4, self.error_messages['invalid']) return value def get_db_prep_value(self, value, connection, prepared=False): if not prepared: value = self.get_prep_value(value) return connection.ops.adapt_ipaddressfield_value(value) def get_prep_value(self, value): value = super().get_prep_value(value) if value is None: return None if value and ':' in value: try: return clean_ipv6_address(value, self.unpack_ipv4) except exceptions.ValidationError: pass return str(value) def formfield(self, **kwargs): return super().formfield(**{ 'protocol': self.protocol, 'form_class': forms.GenericIPAddressField, **kwargs, }) class NullBooleanField(BooleanField): default_error_messages = { 'invalid': _('“%(value)s” value must be either None, True or False.'), 'invalid_nullable': _('“%(value)s” value must be either None, True or False.'), } description = _("Boolean (Either True, False or None)") system_check_removed_details = { 'msg': ( 'NullBooleanField is removed except for support in historical ' 'migrations.' ), 'hint': 'Use BooleanField(null=True) instead.', 'id': 'fields.E903', } def __init__(self, *args, **kwargs): kwargs['null'] = True kwargs['blank'] = True super().__init__(*args, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() del kwargs['null'] del kwargs['blank'] return name, path, args, kwargs def get_internal_type(self): return "NullBooleanField" class PositiveIntegerRelDbTypeMixin: def __init_subclass__(cls, **kwargs): super().__init_subclass__(**kwargs) if not hasattr(cls, 'integer_field_class'): cls.integer_field_class = next( ( parent for parent in cls.__mro__[1:] if issubclass(parent, IntegerField) ), None, ) def rel_db_type(self, connection): """ Return the data type that a related field pointing to this field should use. In most cases, a foreign key pointing to a positive integer primary key will have an integer column data type but some databases (e.g. MySQL) have an unsigned integer type. In that case (related_fields_match_type=True), the primary key should return its db_type. """ if connection.features.related_fields_match_type: return self.db_type(connection) else: return self.integer_field_class().db_type(connection=connection) class PositiveBigIntegerField(PositiveIntegerRelDbTypeMixin, BigIntegerField): description = _('Positive big integer') def get_internal_type(self): return 'PositiveBigIntegerField' def formfield(self, **kwargs): return super().formfield(**{ 'min_value': 0, **kwargs, }) class PositiveIntegerField(PositiveIntegerRelDbTypeMixin, IntegerField): description = _("Positive integer") def get_internal_type(self): return "PositiveIntegerField" def formfield(self, **kwargs): return super().formfield(**{ 'min_value': 0, **kwargs, }) class PositiveSmallIntegerField(PositiveIntegerRelDbTypeMixin, SmallIntegerField): description = _("Positive small integer") def get_internal_type(self): return "PositiveSmallIntegerField" def formfield(self, **kwargs): return super().formfield(**{ 'min_value': 0, **kwargs, }) class SlugField(CharField): default_validators = [validators.validate_slug] description = _("Slug (up to %(max_length)s)") def __init__(self, *args, max_length=50, db_index=True, allow_unicode=False, **kwargs): self.allow_unicode = allow_unicode if self.allow_unicode: self.default_validators = [validators.validate_unicode_slug] super().__init__(*args, max_length=max_length, db_index=db_index, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() if kwargs.get("max_length") == 50: del kwargs['max_length'] if self.db_index is False: kwargs['db_index'] = False else: del kwargs['db_index'] if self.allow_unicode is not False: kwargs['allow_unicode'] = self.allow_unicode return name, path, args, kwargs def get_internal_type(self): return "SlugField" def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.SlugField, 'allow_unicode': self.allow_unicode, **kwargs, }) class TextField(Field): description = _("Text") def __init__(self, *args, db_collation=None, **kwargs): super().__init__(*args, **kwargs) self.db_collation = db_collation def check(self, **kwargs): databases = kwargs.get('databases') or [] return [ *super().check(**kwargs), *self._check_db_collation(databases), ] def _check_db_collation(self, databases): errors = [] for db in databases: if not router.allow_migrate_model(db, self.model): continue connection = connections[db] if not ( self.db_collation is None or 'supports_collation_on_textfield' in self.model._meta.required_db_features or connection.features.supports_collation_on_textfield ): errors.append( checks.Error( '%s does not support a database collation on ' 'TextFields.' % connection.display_name, obj=self, id='fields.E190', ), ) return errors def get_internal_type(self): return "TextField" def to_python(self, value): if isinstance(value, str) or value is None: return value return str(value) def get_prep_value(self, value): value = super().get_prep_value(value) return self.to_python(value) def formfield(self, **kwargs): # Passing max_length to forms.CharField means that the value's length # will be validated twice. This is considered acceptable since we want # the value in the form field (to pass into widget for example). return super().formfield(**{ 'max_length': self.max_length, **({} if self.choices is not None else {'widget': forms.Textarea}), **kwargs, }) def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.db_collation: kwargs['db_collation'] = self.db_collation return name, path, args, kwargs class TimeField(DateTimeCheckMixin, Field): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value has an invalid format. It must be in ' 'HH:MM[:ss[.uuuuuu]] format.'), 'invalid_time': _('“%(value)s” value has the correct format ' '(HH:MM[:ss[.uuuuuu]]) but it is an invalid time.'), } description = _("Time") def __init__(self, verbose_name=None, name=None, auto_now=False, auto_now_add=False, **kwargs): self.auto_now, self.auto_now_add = auto_now, auto_now_add if auto_now or auto_now_add: kwargs['editable'] = False kwargs['blank'] = True super().__init__(verbose_name, name, **kwargs) def _check_fix_default_value(self): """ Warn that using an actual date or datetime value is probably wrong; it's only evaluated on server startup. """ if not self.has_default(): return [] now = timezone.now() if not timezone.is_naive(now): now = timezone.make_naive(now, timezone.utc) value = self.default if isinstance(value, datetime.datetime): second_offset = datetime.timedelta(seconds=10) lower = now - second_offset upper = now + second_offset if timezone.is_aware(value): value = timezone.make_naive(value, timezone.utc) elif isinstance(value, datetime.time): second_offset = datetime.timedelta(seconds=10) lower = now - second_offset upper = now + second_offset value = datetime.datetime.combine(now.date(), value) if timezone.is_aware(value): value = timezone.make_naive(value, timezone.utc).time() else: # No explicit time / datetime value -- no checks necessary return [] if lower <= value <= upper: return [ checks.Warning( 'Fixed default value provided.', hint='It seems you set a fixed date / time / datetime ' 'value as default for this field. This may not be ' 'what you want. If you want to have the current date ' 'as default, use `django.utils.timezone.now`', obj=self, id='fields.W161', ) ] return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.auto_now is not False: kwargs["auto_now"] = self.auto_now if self.auto_now_add is not False: kwargs["auto_now_add"] = self.auto_now_add if self.auto_now or self.auto_now_add: del kwargs['blank'] del kwargs['editable'] return name, path, args, kwargs def get_internal_type(self): return "TimeField" def to_python(self, value): if value is None: return None if isinstance(value, datetime.time): return value if isinstance(value, datetime.datetime): # Not usually a good idea to pass in a datetime here (it loses # information), but this can be a side-effect of interacting with a # database backend (e.g. Oracle), so we'll be accommodating. return value.time() try: parsed = parse_time(value) if parsed is not None: return parsed except ValueError: raise exceptions.ValidationError( self.error_messages['invalid_time'], code='invalid_time', params={'value': value}, ) raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def pre_save(self, model_instance, add): if self.auto_now or (self.auto_now_add and add): value = datetime.datetime.now().time() setattr(model_instance, self.attname, value) return value else: return super().pre_save(model_instance, add) def get_prep_value(self, value): value = super().get_prep_value(value) return self.to_python(value) def get_db_prep_value(self, value, connection, prepared=False): # Casts times into the format expected by the backend if not prepared: value = self.get_prep_value(value) return connection.ops.adapt_timefield_value(value) def value_to_string(self, obj): val = self.value_from_object(obj) return '' if val is None else val.isoformat() def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.TimeField, **kwargs, }) class URLField(CharField): default_validators = [validators.URLValidator()] description = _("URL") def __init__(self, verbose_name=None, name=None, **kwargs): kwargs.setdefault('max_length', 200) super().__init__(verbose_name, name, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() if kwargs.get("max_length") == 200: del kwargs['max_length'] return name, path, args, kwargs def formfield(self, **kwargs): # As with CharField, this will cause URL validation to be performed # twice. return super().formfield(**{ 'form_class': forms.URLField, **kwargs, }) class BinaryField(Field): description = _("Raw binary data") empty_values = [None, b''] def __init__(self, *args, **kwargs): kwargs.setdefault('editable', False) super().__init__(*args, **kwargs) if self.max_length is not None: self.validators.append(validators.MaxLengthValidator(self.max_length)) def check(self, **kwargs): return [*super().check(**kwargs), *self._check_str_default_value()] def _check_str_default_value(self): if self.has_default() and isinstance(self.default, str): return [ checks.Error( "BinaryField's default cannot be a string. Use bytes " "content instead.", obj=self, id='fields.E170', ) ] return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.editable: kwargs['editable'] = True else: del kwargs['editable'] return name, path, args, kwargs def get_internal_type(self): return "BinaryField" def get_placeholder(self, value, compiler, connection): return connection.ops.binary_placeholder_sql(value) def get_default(self): if self.has_default() and not callable(self.default): return self.default default = super().get_default() if default == '': return b'' return default def get_db_prep_value(self, value, connection, prepared=False): value = super().get_db_prep_value(value, connection, prepared) if value is not None: return connection.Database.Binary(value) return value def value_to_string(self, obj): """Binary data is serialized as base64""" return b64encode(self.value_from_object(obj)).decode('ascii') def to_python(self, value): # If it's a string, it should be base64-encoded data if isinstance(value, str): return memoryview(b64decode(value.encode('ascii'))) return value class UUIDField(Field): default_error_messages = { 'invalid': _('“%(value)s” is not a valid UUID.'), } description = _('Universally unique identifier') empty_strings_allowed = False def __init__(self, verbose_name=None, **kwargs): kwargs['max_length'] = 32 super().__init__(verbose_name, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() del kwargs['max_length'] return name, path, args, kwargs def get_internal_type(self): return "UUIDField" def get_prep_value(self, value): value = super().get_prep_value(value) return self.to_python(value) def get_db_prep_value(self, value, connection, prepared=False): if value is None: return None if not isinstance(value, uuid.UUID): value = self.to_python(value) if connection.features.has_native_uuid_field: return value return value.hex def to_python(self, value): if value is not None and not isinstance(value, uuid.UUID): input_form = 'int' if isinstance(value, int) else 'hex' try: return uuid.UUID(**{input_form: value}) except (AttributeError, ValueError): raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) return value def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.UUIDField, **kwargs, }) class AutoFieldMixin: db_returning = True def __init__(self, *args, **kwargs): kwargs['blank'] = True super().__init__(*args, **kwargs) def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_primary_key(), ] def _check_primary_key(self): if not self.primary_key: return [ checks.Error( 'AutoFields must set primary_key=True.', obj=self, id='fields.E100', ), ] else: return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() del kwargs['blank'] kwargs['primary_key'] = True return name, path, args, kwargs def validate(self, value, model_instance): pass def get_db_prep_value(self, value, connection, prepared=False): if not prepared: value = self.get_prep_value(value) value = connection.ops.validate_autopk_value(value) return value def contribute_to_class(self, cls, name, **kwargs): assert not cls._meta.auto_field, ( "Model %s can't have more than one auto-generated field." % cls._meta.label ) super().contribute_to_class(cls, name, **kwargs) cls._meta.auto_field = self def formfield(self, **kwargs): return None class AutoFieldMeta(type): """ Metaclass to maintain backward inheritance compatibility for AutoField. It is intended that AutoFieldMixin become public API when it is possible to create a non-integer automatically-generated field using column defaults stored in the database. In many areas Django also relies on using isinstance() to check for an automatically-generated field as a subclass of AutoField. A new flag needs to be implemented on Field to be used instead. When these issues have been addressed, this metaclass could be used to deprecate inheritance from AutoField and use of isinstance() with AutoField for detecting automatically-generated fields. """ @property def _subclasses(self): return (BigAutoField, SmallAutoField) def __instancecheck__(self, instance): return isinstance(instance, self._subclasses) or super().__instancecheck__(instance) def __subclasscheck__(self, subclass): return issubclass(subclass, self._subclasses) or super().__subclasscheck__(subclass) class AutoField(AutoFieldMixin, IntegerField, metaclass=AutoFieldMeta): def get_internal_type(self): return 'AutoField' def rel_db_type(self, connection): return IntegerField().db_type(connection=connection) class BigAutoField(AutoFieldMixin, BigIntegerField): def get_internal_type(self): return 'BigAutoField' def rel_db_type(self, connection): return BigIntegerField().db_type(connection=connection) class SmallAutoField(AutoFieldMixin, SmallIntegerField): def get_internal_type(self): return 'SmallAutoField' def rel_db_type(self, connection): return SmallIntegerField().db_type(connection=connection)
b6e0cf1f49e2c84e26345b0c085503b6d6857bee2857dfb84a16a6da14f4d54f
from datetime import datetime from django.conf import settings from django.db.models.expressions import Func from django.db.models.fields import ( DateField, DateTimeField, DurationField, Field, IntegerField, TimeField, ) from django.db.models.lookups import ( Transform, YearExact, YearGt, YearGte, YearLt, YearLte, ) from django.utils import timezone class TimezoneMixin: tzinfo = None def get_tzname(self): # Timezone conversions must happen to the input datetime *before* # applying a function. 2015-12-31 23:00:00 -02:00 is stored in the # database as 2016-01-01 01:00:00 +00:00. Any results should be # based on the input datetime not the stored datetime. tzname = None if settings.USE_TZ: if self.tzinfo is None: tzname = timezone.get_current_timezone_name() else: tzname = timezone._get_timezone_name(self.tzinfo) return tzname class Extract(TimezoneMixin, Transform): lookup_name = None output_field = IntegerField() def __init__(self, expression, lookup_name=None, tzinfo=None, **extra): if self.lookup_name is None: self.lookup_name = lookup_name if self.lookup_name is None: raise ValueError('lookup_name must be provided') self.tzinfo = tzinfo super().__init__(expression, **extra) def as_sql(self, compiler, connection): sql, params = compiler.compile(self.lhs) lhs_output_field = self.lhs.output_field if isinstance(lhs_output_field, DateTimeField): tzname = self.get_tzname() sql = connection.ops.datetime_extract_sql(self.lookup_name, sql, tzname) elif self.tzinfo is not None: raise ValueError('tzinfo can only be used with DateTimeField.') elif isinstance(lhs_output_field, DateField): sql = connection.ops.date_extract_sql(self.lookup_name, sql) elif isinstance(lhs_output_field, TimeField): sql = connection.ops.time_extract_sql(self.lookup_name, sql) elif isinstance(lhs_output_field, DurationField): if not connection.features.has_native_duration_field: raise ValueError('Extract requires native DurationField database support.') sql = connection.ops.time_extract_sql(self.lookup_name, sql) else: # resolve_expression has already validated the output_field so this # assert should never be hit. assert False, "Tried to Extract from an invalid type." return sql, params def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): copy = super().resolve_expression(query, allow_joins, reuse, summarize, for_save) field = getattr(copy.lhs, 'output_field', None) if field is None: return copy if not isinstance(field, (DateField, DateTimeField, TimeField, DurationField)): raise ValueError( 'Extract input expression must be DateField, DateTimeField, ' 'TimeField, or DurationField.' ) # Passing dates to functions expecting datetimes is most likely a mistake. if type(field) == DateField and copy.lookup_name in ('hour', 'minute', 'second'): raise ValueError( "Cannot extract time component '%s' from DateField '%s'." % (copy.lookup_name, field.name) ) if ( isinstance(field, DurationField) and copy.lookup_name in ('year', 'iso_year', 'month', 'week', 'week_day', 'iso_week_day', 'quarter') ): raise ValueError( "Cannot extract component '%s' from DurationField '%s'." % (copy.lookup_name, field.name) ) return copy class ExtractYear(Extract): lookup_name = 'year' class ExtractIsoYear(Extract): """Return the ISO-8601 week-numbering year.""" lookup_name = 'iso_year' class ExtractMonth(Extract): lookup_name = 'month' class ExtractDay(Extract): lookup_name = 'day' class ExtractWeek(Extract): """ Return 1-52 or 53, based on ISO-8601, i.e., Monday is the first of the week. """ lookup_name = 'week' class ExtractWeekDay(Extract): """ Return Sunday=1 through Saturday=7. To replicate this in Python: (mydatetime.isoweekday() % 7) + 1 """ lookup_name = 'week_day' class ExtractIsoWeekDay(Extract): """Return Monday=1 through Sunday=7, based on ISO-8601.""" lookup_name = 'iso_week_day' class ExtractQuarter(Extract): lookup_name = 'quarter' class ExtractHour(Extract): lookup_name = 'hour' class ExtractMinute(Extract): lookup_name = 'minute' class ExtractSecond(Extract): lookup_name = 'second' DateField.register_lookup(ExtractYear) DateField.register_lookup(ExtractMonth) DateField.register_lookup(ExtractDay) DateField.register_lookup(ExtractWeekDay) DateField.register_lookup(ExtractIsoWeekDay) DateField.register_lookup(ExtractWeek) DateField.register_lookup(ExtractIsoYear) DateField.register_lookup(ExtractQuarter) TimeField.register_lookup(ExtractHour) TimeField.register_lookup(ExtractMinute) TimeField.register_lookup(ExtractSecond) DateTimeField.register_lookup(ExtractHour) DateTimeField.register_lookup(ExtractMinute) DateTimeField.register_lookup(ExtractSecond) ExtractYear.register_lookup(YearExact) ExtractYear.register_lookup(YearGt) ExtractYear.register_lookup(YearGte) ExtractYear.register_lookup(YearLt) ExtractYear.register_lookup(YearLte) ExtractIsoYear.register_lookup(YearExact) ExtractIsoYear.register_lookup(YearGt) ExtractIsoYear.register_lookup(YearGte) ExtractIsoYear.register_lookup(YearLt) ExtractIsoYear.register_lookup(YearLte) class Now(Func): template = 'CURRENT_TIMESTAMP' output_field = DateTimeField() def as_postgresql(self, compiler, connection, **extra_context): # PostgreSQL's CURRENT_TIMESTAMP means "the time at the start of the # transaction". Use STATEMENT_TIMESTAMP to be cross-compatible with # other databases. return self.as_sql(compiler, connection, template='STATEMENT_TIMESTAMP()', **extra_context) class TruncBase(TimezoneMixin, Transform): kind = None tzinfo = None def __init__(self, expression, output_field=None, tzinfo=None, is_dst=None, **extra): self.tzinfo = tzinfo self.is_dst = is_dst super().__init__(expression, output_field=output_field, **extra) def as_sql(self, compiler, connection): inner_sql, inner_params = compiler.compile(self.lhs) tzname = None if isinstance(self.lhs.output_field, DateTimeField): tzname = self.get_tzname() elif self.tzinfo is not None: raise ValueError('tzinfo can only be used with DateTimeField.') if isinstance(self.output_field, DateTimeField): sql = connection.ops.datetime_trunc_sql(self.kind, inner_sql, tzname) elif isinstance(self.output_field, DateField): sql = connection.ops.date_trunc_sql(self.kind, inner_sql, tzname) elif isinstance(self.output_field, TimeField): sql = connection.ops.time_trunc_sql(self.kind, inner_sql, tzname) else: raise ValueError('Trunc only valid on DateField, TimeField, or DateTimeField.') return sql, inner_params def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): copy = super().resolve_expression(query, allow_joins, reuse, summarize, for_save) field = copy.lhs.output_field # DateTimeField is a subclass of DateField so this works for both. assert isinstance(field, (DateField, TimeField)), ( "%r isn't a DateField, TimeField, or DateTimeField." % field.name ) # If self.output_field was None, then accessing the field will trigger # the resolver to assign it to self.lhs.output_field. if not isinstance(copy.output_field, (DateField, DateTimeField, TimeField)): raise ValueError('output_field must be either DateField, TimeField, or DateTimeField') # Passing dates or times to functions expecting datetimes is most # likely a mistake. class_output_field = self.__class__.output_field if isinstance(self.__class__.output_field, Field) else None output_field = class_output_field or copy.output_field has_explicit_output_field = class_output_field or field.__class__ is not copy.output_field.__class__ if type(field) == DateField and ( isinstance(output_field, DateTimeField) or copy.kind in ('hour', 'minute', 'second', 'time')): raise ValueError("Cannot truncate DateField '%s' to %s." % ( field.name, output_field.__class__.__name__ if has_explicit_output_field else 'DateTimeField' )) elif isinstance(field, TimeField) and ( isinstance(output_field, DateTimeField) or copy.kind in ('year', 'quarter', 'month', 'week', 'day', 'date')): raise ValueError("Cannot truncate TimeField '%s' to %s." % ( field.name, output_field.__class__.__name__ if has_explicit_output_field else 'DateTimeField' )) return copy def convert_value(self, value, expression, connection): if isinstance(self.output_field, DateTimeField): if not settings.USE_TZ: pass elif value is not None: value = value.replace(tzinfo=None) value = timezone.make_aware(value, self.tzinfo, is_dst=self.is_dst) elif not connection.features.has_zoneinfo_database: raise ValueError( 'Database returned an invalid datetime value. Are time ' 'zone definitions for your database installed?' ) elif isinstance(value, datetime): if value is None: pass elif isinstance(self.output_field, DateField): value = value.date() elif isinstance(self.output_field, TimeField): value = value.time() return value class Trunc(TruncBase): def __init__(self, expression, kind, output_field=None, tzinfo=None, is_dst=None, **extra): self.kind = kind super().__init__( expression, output_field=output_field, tzinfo=tzinfo, is_dst=is_dst, **extra ) class TruncYear(TruncBase): kind = 'year' class TruncQuarter(TruncBase): kind = 'quarter' class TruncMonth(TruncBase): kind = 'month' class TruncWeek(TruncBase): """Truncate to midnight on the Monday of the week.""" kind = 'week' class TruncDay(TruncBase): kind = 'day' class TruncDate(TruncBase): kind = 'date' lookup_name = 'date' output_field = DateField() def as_sql(self, compiler, connection): # Cast to date rather than truncate to date. lhs, lhs_params = compiler.compile(self.lhs) tzname = self.get_tzname() sql = connection.ops.datetime_cast_date_sql(lhs, tzname) return sql, lhs_params class TruncTime(TruncBase): kind = 'time' lookup_name = 'time' output_field = TimeField() def as_sql(self, compiler, connection): # Cast to time rather than truncate to time. lhs, lhs_params = compiler.compile(self.lhs) tzname = self.get_tzname() sql = connection.ops.datetime_cast_time_sql(lhs, tzname) return sql, lhs_params class TruncHour(TruncBase): kind = 'hour' class TruncMinute(TruncBase): kind = 'minute' class TruncSecond(TruncBase): kind = 'second' DateTimeField.register_lookup(TruncDate) DateTimeField.register_lookup(TruncTime)
d5e933f629df8ab8a3af9d11a4063152b5ea68b446c3e4e67cc527bec28a37a0
""" Create SQL statements for QuerySets. The code in here encapsulates all of the SQL construction so that QuerySets themselves do not have to (and could be backed by things other than SQL databases). The abstraction barrier only works one way: this module has to know all about the internals of models in order to get the information it needs. """ import copy import difflib import functools import sys from collections import Counter, namedtuple from collections.abc import Iterator, Mapping from itertools import chain, count, product from string import ascii_uppercase from django.core.exceptions import FieldDoesNotExist, FieldError from django.db import DEFAULT_DB_ALIAS, NotSupportedError, connections from django.db.models.aggregates import Count from django.db.models.constants import LOOKUP_SEP from django.db.models.expressions import ( BaseExpression, Col, Exists, F, OuterRef, Ref, ResolvedOuterRef, ) from django.db.models.fields import Field from django.db.models.fields.related_lookups import MultiColSource from django.db.models.lookups import Lookup from django.db.models.query_utils import ( Q, check_rel_lookup_compatibility, refs_expression, ) from django.db.models.sql.constants import INNER, LOUTER, ORDER_DIR, SINGLE from django.db.models.sql.datastructures import ( BaseTable, Empty, Join, MultiJoin, ) from django.db.models.sql.where import ( AND, OR, ExtraWhere, NothingNode, WhereNode, ) from django.utils.functional import cached_property from django.utils.tree import Node __all__ = ['Query', 'RawQuery'] def get_field_names_from_opts(opts): return set(chain.from_iterable( (f.name, f.attname) if f.concrete else (f.name,) for f in opts.get_fields() )) def get_children_from_q(q): for child in q.children: if isinstance(child, Node): yield from get_children_from_q(child) else: yield child JoinInfo = namedtuple( 'JoinInfo', ('final_field', 'targets', 'opts', 'joins', 'path', 'transform_function') ) class RawQuery: """A single raw SQL query.""" def __init__(self, sql, using, params=()): self.params = params self.sql = sql self.using = using self.cursor = None # Mirror some properties of a normal query so that # the compiler can be used to process results. self.low_mark, self.high_mark = 0, None # Used for offset/limit self.extra_select = {} self.annotation_select = {} def chain(self, using): return self.clone(using) def clone(self, using): return RawQuery(self.sql, using, params=self.params) def get_columns(self): if self.cursor is None: self._execute_query() converter = connections[self.using].introspection.identifier_converter return [converter(column_meta[0]) for column_meta in self.cursor.description] def __iter__(self): # Always execute a new query for a new iterator. # This could be optimized with a cache at the expense of RAM. self._execute_query() if not connections[self.using].features.can_use_chunked_reads: # If the database can't use chunked reads we need to make sure we # evaluate the entire query up front. result = list(self.cursor) else: result = self.cursor return iter(result) def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, self) @property def params_type(self): if self.params is None: return None return dict if isinstance(self.params, Mapping) else tuple def __str__(self): if self.params_type is None: return self.sql return self.sql % self.params_type(self.params) def _execute_query(self): connection = connections[self.using] # Adapt parameters to the database, as much as possible considering # that the target type isn't known. See #17755. params_type = self.params_type adapter = connection.ops.adapt_unknown_value if params_type is tuple: params = tuple(adapter(val) for val in self.params) elif params_type is dict: params = {key: adapter(val) for key, val in self.params.items()} elif params_type is None: params = None else: raise RuntimeError("Unexpected params type: %s" % params_type) self.cursor = connection.cursor() self.cursor.execute(self.sql, params) class Query(BaseExpression): """A single SQL query.""" alias_prefix = 'T' subq_aliases = frozenset([alias_prefix]) compiler = 'SQLCompiler' def __init__(self, model, where=WhereNode, alias_cols=True): self.model = model self.alias_refcount = {} # alias_map is the most important data structure regarding joins. # It's used for recording which joins exist in the query and what # types they are. The key is the alias of the joined table (possibly # the table name) and the value is a Join-like object (see # sql.datastructures.Join for more information). self.alias_map = {} # Whether to provide alias to columns during reference resolving. self.alias_cols = alias_cols # Sometimes the query contains references to aliases in outer queries (as # a result of split_exclude). Correct alias quoting needs to know these # aliases too. # Map external tables to whether they are aliased. self.external_aliases = {} self.table_map = {} # Maps table names to list of aliases. self.default_cols = True self.default_ordering = True self.standard_ordering = True self.used_aliases = set() self.filter_is_sticky = False self.subquery = False # SQL-related attributes # Select and related select clauses are expressions to use in the # SELECT clause of the query. # The select is used for cases where we want to set up the select # clause to contain other than default fields (values(), subqueries...) # Note that annotations go to annotations dictionary. self.select = () self.where = where() self.where_class = where # The group_by attribute can have one of the following forms: # - None: no group by at all in the query # - A tuple of expressions: group by (at least) those expressions. # String refs are also allowed for now. # - True: group by all select fields of the model # See compiler.get_group_by() for details. self.group_by = None self.order_by = () self.low_mark, self.high_mark = 0, None # Used for offset/limit self.distinct = False self.distinct_fields = () self.select_for_update = False self.select_for_update_nowait = False self.select_for_update_skip_locked = False self.select_for_update_of = () self.select_for_no_key_update = False self.select_related = False # Arbitrary limit for select_related to prevents infinite recursion. self.max_depth = 5 # Holds the selects defined by a call to values() or values_list() # excluding annotation_select and extra_select. self.values_select = () # SQL annotation-related attributes self.annotations = {} # Maps alias -> Annotation Expression self.annotation_select_mask = None self._annotation_select_cache = None # Set combination attributes self.combinator = None self.combinator_all = False self.combined_queries = () # These are for extensions. The contents are more or less appended # verbatim to the appropriate clause. self.extra = {} # Maps col_alias -> (col_sql, params). self.extra_select_mask = None self._extra_select_cache = None self.extra_tables = () self.extra_order_by = () # A tuple that is a set of model field names and either True, if these # are the fields to defer, or False if these are the only fields to # load. self.deferred_loading = (frozenset(), True) self._filtered_relations = {} self.explain_query = False self.explain_format = None self.explain_options = {} @property def output_field(self): if len(self.select) == 1: select = self.select[0] return getattr(select, 'target', None) or select.field elif len(self.annotation_select) == 1: return next(iter(self.annotation_select.values())).output_field @property def has_select_fields(self): return bool(self.select or self.annotation_select_mask or self.extra_select_mask) @cached_property def base_table(self): for alias in self.alias_map: return alias def __str__(self): """ Return the query as a string of SQL with the parameter values substituted in (use sql_with_params() to see the unsubstituted string). Parameter values won't necessarily be quoted correctly, since that is done by the database interface at execution time. """ sql, params = self.sql_with_params() return sql % params def sql_with_params(self): """ Return the query as an SQL string and the parameters that will be substituted into the query. """ return self.get_compiler(DEFAULT_DB_ALIAS).as_sql() def __deepcopy__(self, memo): """Limit the amount of work when a Query is deepcopied.""" result = self.clone() memo[id(self)] = result return result def get_compiler(self, using=None, connection=None): if using is None and connection is None: raise ValueError("Need either using or connection") if using: connection = connections[using] return connection.ops.compiler(self.compiler)(self, connection, using) def get_meta(self): """ Return the Options instance (the model._meta) from which to start processing. Normally, this is self.model._meta, but it can be changed by subclasses. """ return self.model._meta def clone(self): """ Return a copy of the current Query. A lightweight alternative to to deepcopy(). """ obj = Empty() obj.__class__ = self.__class__ # Copy references to everything. obj.__dict__ = self.__dict__.copy() # Clone attributes that can't use shallow copy. obj.alias_refcount = self.alias_refcount.copy() obj.alias_map = self.alias_map.copy() obj.external_aliases = self.external_aliases.copy() obj.table_map = self.table_map.copy() obj.where = self.where.clone() obj.annotations = self.annotations.copy() if self.annotation_select_mask is None: obj.annotation_select_mask = None else: obj.annotation_select_mask = self.annotation_select_mask.copy() obj.combined_queries = tuple(query.clone() for query in self.combined_queries) # _annotation_select_cache cannot be copied, as doing so breaks the # (necessary) state in which both annotations and # _annotation_select_cache point to the same underlying objects. # It will get re-populated in the cloned queryset the next time it's # used. obj._annotation_select_cache = None obj.extra = self.extra.copy() if self.extra_select_mask is None: obj.extra_select_mask = None else: obj.extra_select_mask = self.extra_select_mask.copy() if self._extra_select_cache is None: obj._extra_select_cache = None else: obj._extra_select_cache = self._extra_select_cache.copy() if self.select_related is not False: # Use deepcopy because select_related stores fields in nested # dicts. obj.select_related = copy.deepcopy(obj.select_related) if 'subq_aliases' in self.__dict__: obj.subq_aliases = self.subq_aliases.copy() obj.used_aliases = self.used_aliases.copy() obj._filtered_relations = self._filtered_relations.copy() # Clear the cached_property try: del obj.base_table except AttributeError: pass return obj def chain(self, klass=None): """ Return a copy of the current Query that's ready for another operation. The klass argument changes the type of the Query, e.g. UpdateQuery. """ obj = self.clone() if klass and obj.__class__ != klass: obj.__class__ = klass if not obj.filter_is_sticky: obj.used_aliases = set() obj.filter_is_sticky = False if hasattr(obj, '_setup_query'): obj._setup_query() return obj def relabeled_clone(self, change_map): clone = self.clone() clone.change_aliases(change_map) return clone def _get_col(self, target, field, alias): if not self.alias_cols: alias = None return target.get_col(alias, field) def rewrite_cols(self, annotation, col_cnt): # We must make sure the inner query has the referred columns in it. # If we are aggregating over an annotation, then Django uses Ref() # instances to note this. However, if we are annotating over a column # of a related model, then it might be that column isn't part of the # SELECT clause of the inner query, and we must manually make sure # the column is selected. An example case is: # .aggregate(Sum('author__awards')) # Resolving this expression results in a join to author, but there # is no guarantee the awards column of author is in the select clause # of the query. Thus we must manually add the column to the inner # query. orig_exprs = annotation.get_source_expressions() new_exprs = [] for expr in orig_exprs: # FIXME: These conditions are fairly arbitrary. Identify a better # method of having expressions decide which code path they should # take. if isinstance(expr, Ref): # Its already a Ref to subquery (see resolve_ref() for # details) new_exprs.append(expr) elif isinstance(expr, (WhereNode, Lookup)): # Decompose the subexpressions further. The code here is # copied from the else clause, but this condition must appear # before the contains_aggregate/is_summary condition below. new_expr, col_cnt = self.rewrite_cols(expr, col_cnt) new_exprs.append(new_expr) else: # Reuse aliases of expressions already selected in subquery. for col_alias, selected_annotation in self.annotation_select.items(): if selected_annotation is expr: new_expr = Ref(col_alias, expr) break else: # An expression that is not selected the subquery. if isinstance(expr, Col) or (expr.contains_aggregate and not expr.is_summary): # Reference column or another aggregate. Select it # under a non-conflicting alias. col_cnt += 1 col_alias = '__col%d' % col_cnt self.annotations[col_alias] = expr self.append_annotation_mask([col_alias]) new_expr = Ref(col_alias, expr) else: # Some other expression not referencing database values # directly. Its subexpression might contain Cols. new_expr, col_cnt = self.rewrite_cols(expr, col_cnt) new_exprs.append(new_expr) annotation.set_source_expressions(new_exprs) return annotation, col_cnt def get_aggregation(self, using, added_aggregate_names): """ Return the dictionary with the values of the existing aggregations. """ if not self.annotation_select: return {} existing_annotations = [ annotation for alias, annotation in self.annotations.items() if alias not in added_aggregate_names ] # Decide if we need to use a subquery. # # Existing annotations would cause incorrect results as get_aggregation() # must produce just one result and thus must not use GROUP BY. But we # aren't smart enough to remove the existing annotations from the # query, so those would force us to use GROUP BY. # # If the query has limit or distinct, or uses set operations, then # those operations must be done in a subquery so that the query # aggregates on the limit and/or distinct results instead of applying # the distinct and limit after the aggregation. if (isinstance(self.group_by, tuple) or self.is_sliced or existing_annotations or self.distinct or self.combinator): from django.db.models.sql.subqueries import AggregateQuery inner_query = self.clone() inner_query.subquery = True outer_query = AggregateQuery(self.model, inner_query) inner_query.select_for_update = False inner_query.select_related = False inner_query.set_annotation_mask(self.annotation_select) if not self.is_sliced and not self.distinct_fields: # Queries with distinct_fields need ordering and when a limit # is applied we must take the slice from the ordered query. # Otherwise no need for ordering. inner_query.clear_ordering(True) if not inner_query.distinct: # If the inner query uses default select and it has some # aggregate annotations, then we must make sure the inner # query is grouped by the main model's primary key. However, # clearing the select clause can alter results if distinct is # used. has_existing_aggregate_annotations = any( annotation for annotation in existing_annotations if getattr(annotation, 'contains_aggregate', True) ) if inner_query.default_cols and has_existing_aggregate_annotations: inner_query.group_by = (self.model._meta.pk.get_col(inner_query.get_initial_alias()),) inner_query.default_cols = False relabels = {t: 'subquery' for t in inner_query.alias_map} relabels[None] = 'subquery' # Remove any aggregates marked for reduction from the subquery # and move them to the outer AggregateQuery. col_cnt = 0 for alias, expression in list(inner_query.annotation_select.items()): annotation_select_mask = inner_query.annotation_select_mask if expression.is_summary: expression, col_cnt = inner_query.rewrite_cols(expression, col_cnt) outer_query.annotations[alias] = expression.relabeled_clone(relabels) del inner_query.annotations[alias] annotation_select_mask.remove(alias) # Make sure the annotation_select wont use cached results. inner_query.set_annotation_mask(inner_query.annotation_select_mask) if inner_query.select == () and not inner_query.default_cols and not inner_query.annotation_select_mask: # In case of Model.objects[0:3].count(), there would be no # field selected in the inner query, yet we must use a subquery. # So, make sure at least one field is selected. inner_query.select = (self.model._meta.pk.get_col(inner_query.get_initial_alias()),) else: outer_query = self self.select = () self.default_cols = False self.extra = {} outer_query.clear_ordering(True) outer_query.clear_limits() outer_query.select_for_update = False outer_query.select_related = False compiler = outer_query.get_compiler(using) result = compiler.execute_sql(SINGLE) if result is None: result = [None] * len(outer_query.annotation_select) converters = compiler.get_converters(outer_query.annotation_select.values()) result = next(compiler.apply_converters((result,), converters)) return dict(zip(outer_query.annotation_select, result)) def get_count(self, using): """ Perform a COUNT() query using the current filter constraints. """ obj = self.clone() obj.add_annotation(Count('*'), alias='__count', is_summary=True) number = obj.get_aggregation(using, ['__count'])['__count'] if number is None: number = 0 return number def has_filters(self): return self.where def exists(self, using, limit=True): q = self.clone() if not q.distinct: if q.group_by is True: q.add_fields((f.attname for f in self.model._meta.concrete_fields), False) # Disable GROUP BY aliases to avoid orphaning references to the # SELECT clause which is about to be cleared. q.set_group_by(allow_aliases=False) q.clear_select_clause() if q.combined_queries and q.combinator == 'union': limit_combined = connections[using].features.supports_slicing_ordering_in_compound q.combined_queries = tuple( combined_query.exists(using, limit=limit_combined) for combined_query in q.combined_queries ) q.clear_ordering(True) if limit: q.set_limits(high=1) q.add_extra({'a': 1}, None, None, None, None, None) q.set_extra_mask(['a']) return q def has_results(self, using): q = self.exists(using) compiler = q.get_compiler(using=using) return compiler.has_results() def explain(self, using, format=None, **options): q = self.clone() q.explain_query = True q.explain_format = format q.explain_options = options compiler = q.get_compiler(using=using) return '\n'.join(compiler.explain_query()) def combine(self, rhs, connector): """ Merge the 'rhs' query into the current one (with any 'rhs' effects being applied *after* (that is, "to the right of") anything in the current query. 'rhs' is not modified during a call to this function. The 'connector' parameter describes how to connect filters from the 'rhs' query. """ assert self.model == rhs.model, \ "Cannot combine queries on two different base models." if self.is_sliced: raise TypeError('Cannot combine queries once a slice has been taken.') assert self.distinct == rhs.distinct, \ "Cannot combine a unique query with a non-unique query." assert self.distinct_fields == rhs.distinct_fields, \ "Cannot combine queries with different distinct fields." # Work out how to relabel the rhs aliases, if necessary. change_map = {} conjunction = (connector == AND) # Determine which existing joins can be reused. When combining the # query with AND we must recreate all joins for m2m filters. When # combining with OR we can reuse joins. The reason is that in AND # case a single row can't fulfill a condition like: # revrel__col=1 & revrel__col=2 # But, there might be two different related rows matching this # condition. In OR case a single True is enough, so single row is # enough, too. # # Note that we will be creating duplicate joins for non-m2m joins in # the AND case. The results will be correct but this creates too many # joins. This is something that could be fixed later on. reuse = set() if conjunction else set(self.alias_map) # Base table must be present in the query - this is the same # table on both sides. self.get_initial_alias() joinpromoter = JoinPromoter(connector, 2, False) joinpromoter.add_votes( j for j in self.alias_map if self.alias_map[j].join_type == INNER) rhs_votes = set() # Now, add the joins from rhs query into the new query (skipping base # table). rhs_tables = list(rhs.alias_map)[1:] for alias in rhs_tables: join = rhs.alias_map[alias] # If the left side of the join was already relabeled, use the # updated alias. join = join.relabeled_clone(change_map) new_alias = self.join(join, reuse=reuse) if join.join_type == INNER: rhs_votes.add(new_alias) # We can't reuse the same join again in the query. If we have two # distinct joins for the same connection in rhs query, then the # combined query must have two joins, too. reuse.discard(new_alias) if alias != new_alias: change_map[alias] = new_alias if not rhs.alias_refcount[alias]: # The alias was unused in the rhs query. Unref it so that it # will be unused in the new query, too. We have to add and # unref the alias so that join promotion has information of # the join type for the unused alias. self.unref_alias(new_alias) joinpromoter.add_votes(rhs_votes) joinpromoter.update_join_types(self) # Combine subqueries aliases to ensure aliases relabelling properly # handle subqueries when combining where and select clauses. self.subq_aliases |= rhs.subq_aliases # Now relabel a copy of the rhs where-clause and add it to the current # one. w = rhs.where.clone() w.relabel_aliases(change_map) self.where.add(w, connector) # Selection columns and extra extensions are those provided by 'rhs'. if rhs.select: self.set_select([col.relabeled_clone(change_map) for col in rhs.select]) else: self.select = () if connector == OR: # It would be nice to be able to handle this, but the queries don't # really make sense (or return consistent value sets). Not worth # the extra complexity when you can write a real query instead. if self.extra and rhs.extra: raise ValueError("When merging querysets using 'or', you cannot have extra(select=...) on both sides.") self.extra.update(rhs.extra) extra_select_mask = set() if self.extra_select_mask is not None: extra_select_mask.update(self.extra_select_mask) if rhs.extra_select_mask is not None: extra_select_mask.update(rhs.extra_select_mask) if extra_select_mask: self.set_extra_mask(extra_select_mask) self.extra_tables += rhs.extra_tables # Ordering uses the 'rhs' ordering, unless it has none, in which case # the current ordering is used. self.order_by = rhs.order_by or self.order_by self.extra_order_by = rhs.extra_order_by or self.extra_order_by def deferred_to_data(self, target, callback): """ Convert the self.deferred_loading data structure to an alternate data structure, describing the field that *will* be loaded. This is used to compute the columns to select from the database and also by the QuerySet class to work out which fields are being initialized on each model. Models that have all their fields included aren't mentioned in the result, only those that have field restrictions in place. The "target" parameter is the instance that is populated (in place). The "callback" is a function that is called whenever a (model, field) pair need to be added to "target". It accepts three parameters: "target", and the model and list of fields being added for that model. """ field_names, defer = self.deferred_loading if not field_names: return orig_opts = self.get_meta() seen = {} must_include = {orig_opts.concrete_model: {orig_opts.pk}} for field_name in field_names: parts = field_name.split(LOOKUP_SEP) cur_model = self.model._meta.concrete_model opts = orig_opts for name in parts[:-1]: old_model = cur_model if name in self._filtered_relations: name = self._filtered_relations[name].relation_name source = opts.get_field(name) if is_reverse_o2o(source): cur_model = source.related_model else: cur_model = source.remote_field.model opts = cur_model._meta # Even if we're "just passing through" this model, we must add # both the current model's pk and the related reference field # (if it's not a reverse relation) to the things we select. if not is_reverse_o2o(source): must_include[old_model].add(source) add_to_dict(must_include, cur_model, opts.pk) field = opts.get_field(parts[-1]) is_reverse_object = field.auto_created and not field.concrete model = field.related_model if is_reverse_object else field.model model = model._meta.concrete_model if model == opts.model: model = cur_model if not is_reverse_o2o(field): add_to_dict(seen, model, field) if defer: # We need to load all fields for each model, except those that # appear in "seen" (for all models that appear in "seen"). The only # slight complexity here is handling fields that exist on parent # models. workset = {} for model, values in seen.items(): for field in model._meta.local_fields: if field not in values: m = field.model._meta.concrete_model add_to_dict(workset, m, field) for model, values in must_include.items(): # If we haven't included a model in workset, we don't add the # corresponding must_include fields for that model, since an # empty set means "include all fields". That's why there's no # "else" branch here. if model in workset: workset[model].update(values) for model, values in workset.items(): callback(target, model, values) else: for model, values in must_include.items(): if model in seen: seen[model].update(values) else: # As we've passed through this model, but not explicitly # included any fields, we have to make sure it's mentioned # so that only the "must include" fields are pulled in. seen[model] = values # Now ensure that every model in the inheritance chain is mentioned # in the parent list. Again, it must be mentioned to ensure that # only "must include" fields are pulled in. for model in orig_opts.get_parent_list(): seen.setdefault(model, set()) for model, values in seen.items(): callback(target, model, values) def table_alias(self, table_name, create=False, filtered_relation=None): """ Return a table alias for the given table_name and whether this is a new alias or not. If 'create' is true, a new alias is always created. Otherwise, the most recently created alias for the table (if one exists) is reused. """ alias_list = self.table_map.get(table_name) if not create and alias_list: alias = alias_list[0] self.alias_refcount[alias] += 1 return alias, False # Create a new alias for this table. if alias_list: alias = '%s%d' % (self.alias_prefix, len(self.alias_map) + 1) alias_list.append(alias) else: # The first occurrence of a table uses the table name directly. alias = filtered_relation.alias if filtered_relation is not None else table_name self.table_map[table_name] = [alias] self.alias_refcount[alias] = 1 return alias, True def ref_alias(self, alias): """Increases the reference count for this alias.""" self.alias_refcount[alias] += 1 def unref_alias(self, alias, amount=1): """Decreases the reference count for this alias.""" self.alias_refcount[alias] -= amount def promote_joins(self, aliases): """ Promote recursively the join type of given aliases and its children to an outer join. If 'unconditional' is False, only promote the join if it is nullable or the parent join is an outer join. The children promotion is done to avoid join chains that contain a LOUTER b INNER c. So, if we have currently a INNER b INNER c and a->b is promoted, then we must also promote b->c automatically, or otherwise the promotion of a->b doesn't actually change anything in the query results. """ aliases = list(aliases) while aliases: alias = aliases.pop(0) if self.alias_map[alias].join_type is None: # This is the base table (first FROM entry) - this table # isn't really joined at all in the query, so we should not # alter its join type. continue # Only the first alias (skipped above) should have None join_type assert self.alias_map[alias].join_type is not None parent_alias = self.alias_map[alias].parent_alias parent_louter = parent_alias and self.alias_map[parent_alias].join_type == LOUTER already_louter = self.alias_map[alias].join_type == LOUTER if ((self.alias_map[alias].nullable or parent_louter) and not already_louter): self.alias_map[alias] = self.alias_map[alias].promote() # Join type of 'alias' changed, so re-examine all aliases that # refer to this one. aliases.extend( join for join in self.alias_map if self.alias_map[join].parent_alias == alias and join not in aliases ) def demote_joins(self, aliases): """ Change join type from LOUTER to INNER for all joins in aliases. Similarly to promote_joins(), this method must ensure no join chains containing first an outer, then an inner join are generated. If we are demoting b->c join in chain a LOUTER b LOUTER c then we must demote a->b automatically, or otherwise the demotion of b->c doesn't actually change anything in the query results. . """ aliases = list(aliases) while aliases: alias = aliases.pop(0) if self.alias_map[alias].join_type == LOUTER: self.alias_map[alias] = self.alias_map[alias].demote() parent_alias = self.alias_map[alias].parent_alias if self.alias_map[parent_alias].join_type == INNER: aliases.append(parent_alias) def reset_refcounts(self, to_counts): """ Reset reference counts for aliases so that they match the value passed in `to_counts`. """ for alias, cur_refcount in self.alias_refcount.copy().items(): unref_amount = cur_refcount - to_counts.get(alias, 0) self.unref_alias(alias, unref_amount) def change_aliases(self, change_map): """ Change the aliases in change_map (which maps old-alias -> new-alias), relabelling any references to them in select columns and the where clause. """ assert set(change_map).isdisjoint(change_map.values()) # 1. Update references in "select" (normal columns plus aliases), # "group by" and "where". self.where.relabel_aliases(change_map) if isinstance(self.group_by, tuple): self.group_by = tuple([col.relabeled_clone(change_map) for col in self.group_by]) self.select = tuple([col.relabeled_clone(change_map) for col in self.select]) self.annotations = self.annotations and { key: col.relabeled_clone(change_map) for key, col in self.annotations.items() } # 2. Rename the alias in the internal table/alias datastructures. for old_alias, new_alias in change_map.items(): if old_alias not in self.alias_map: continue alias_data = self.alias_map[old_alias].relabeled_clone(change_map) self.alias_map[new_alias] = alias_data self.alias_refcount[new_alias] = self.alias_refcount[old_alias] del self.alias_refcount[old_alias] del self.alias_map[old_alias] table_aliases = self.table_map[alias_data.table_name] for pos, alias in enumerate(table_aliases): if alias == old_alias: table_aliases[pos] = new_alias break self.external_aliases = { # Table is aliased or it's being changed and thus is aliased. change_map.get(alias, alias): (aliased or alias in change_map) for alias, aliased in self.external_aliases.items() } def bump_prefix(self, outer_query): """ Change the alias prefix to the next letter in the alphabet in a way that the outer query's aliases and this query's aliases will not conflict. Even tables that previously had no alias will get an alias after this call. """ def prefix_gen(): """ Generate a sequence of characters in alphabetical order: -> 'A', 'B', 'C', ... When the alphabet is finished, the sequence will continue with the Cartesian product: -> 'AA', 'AB', 'AC', ... """ alphabet = ascii_uppercase prefix = chr(ord(self.alias_prefix) + 1) yield prefix for n in count(1): seq = alphabet[alphabet.index(prefix):] if prefix else alphabet for s in product(seq, repeat=n): yield ''.join(s) prefix = None if self.alias_prefix != outer_query.alias_prefix: # No clashes between self and outer query should be possible. return # Explicitly avoid infinite loop. The constant divider is based on how # much depth recursive subquery references add to the stack. This value # might need to be adjusted when adding or removing function calls from # the code path in charge of performing these operations. local_recursion_limit = sys.getrecursionlimit() // 16 for pos, prefix in enumerate(prefix_gen()): if prefix not in self.subq_aliases: self.alias_prefix = prefix break if pos > local_recursion_limit: raise RecursionError( 'Maximum recursion depth exceeded: too many subqueries.' ) self.subq_aliases = self.subq_aliases.union([self.alias_prefix]) outer_query.subq_aliases = outer_query.subq_aliases.union(self.subq_aliases) self.change_aliases({ alias: '%s%d' % (self.alias_prefix, pos) for pos, alias in enumerate(self.alias_map) }) def get_initial_alias(self): """ Return the first alias for this query, after increasing its reference count. """ if self.alias_map: alias = self.base_table self.ref_alias(alias) else: alias = self.join(BaseTable(self.get_meta().db_table, None)) return alias def count_active_tables(self): """ Return the number of tables in this query with a non-zero reference count. After execution, the reference counts are zeroed, so tables added in compiler will not be seen by this method. """ return len([1 for count in self.alias_refcount.values() if count]) def join(self, join, reuse=None): """ Return an alias for the 'join', either reusing an existing alias for that join or creating a new one. 'join' is either a sql.datastructures.BaseTable or Join. The 'reuse' parameter can be either None which means all joins are reusable, or it can be a set containing the aliases that can be reused. A join is always created as LOUTER if the lhs alias is LOUTER to make sure chains like t1 LOUTER t2 INNER t3 aren't generated. All new joins are created as LOUTER if the join is nullable. """ reuse_aliases = [ a for a, j in self.alias_map.items() if (reuse is None or a in reuse) and j.equals(join) ] if reuse_aliases: if join.table_alias in reuse_aliases: reuse_alias = join.table_alias else: # Reuse the most recent alias of the joined table # (a many-to-many relation may be joined multiple times). reuse_alias = reuse_aliases[-1] self.ref_alias(reuse_alias) return reuse_alias # No reuse is possible, so we need a new alias. alias, _ = self.table_alias(join.table_name, create=True, filtered_relation=join.filtered_relation) if join.join_type: if self.alias_map[join.parent_alias].join_type == LOUTER or join.nullable: join_type = LOUTER else: join_type = INNER join.join_type = join_type join.table_alias = alias self.alias_map[alias] = join return alias def join_parent_model(self, opts, model, alias, seen): """ Make sure the given 'model' is joined in the query. If 'model' isn't a parent of 'opts' or if it is None this method is a no-op. The 'alias' is the root alias for starting the join, 'seen' is a dict of model -> alias of existing joins. It must also contain a mapping of None -> some alias. This will be returned in the no-op case. """ if model in seen: return seen[model] chain = opts.get_base_chain(model) if not chain: return alias curr_opts = opts for int_model in chain: if int_model in seen: curr_opts = int_model._meta alias = seen[int_model] continue # Proxy model have elements in base chain # with no parents, assign the new options # object and skip to the next base in that # case if not curr_opts.parents[int_model]: curr_opts = int_model._meta continue link_field = curr_opts.get_ancestor_link(int_model) join_info = self.setup_joins([link_field.name], curr_opts, alias) curr_opts = int_model._meta alias = seen[int_model] = join_info.joins[-1] return alias or seen[None] def add_annotation(self, annotation, alias, is_summary=False, select=True): """Add a single annotation expression to the Query.""" annotation = annotation.resolve_expression(self, allow_joins=True, reuse=None, summarize=is_summary) if select: self.append_annotation_mask([alias]) else: self.set_annotation_mask(set(self.annotation_select).difference({alias})) self.annotations[alias] = annotation def resolve_expression(self, query, *args, **kwargs): clone = self.clone() # Subqueries need to use a different set of aliases than the outer query. clone.bump_prefix(query) clone.subquery = True # It's safe to drop ordering if the queryset isn't using slicing, # distinct(*fields) or select_for_update(). if (self.low_mark == 0 and self.high_mark is None and not self.distinct_fields and not self.select_for_update): clone.clear_ordering(True) clone.where.resolve_expression(query, *args, **kwargs) for key, value in clone.annotations.items(): resolved = value.resolve_expression(query, *args, **kwargs) if hasattr(resolved, 'external_aliases'): resolved.external_aliases.update(clone.external_aliases) clone.annotations[key] = resolved # Outer query's aliases are considered external. for alias, table in query.alias_map.items(): clone.external_aliases[alias] = ( (isinstance(table, Join) and table.join_field.related_model._meta.db_table != alias) or (isinstance(table, BaseTable) and table.table_name != table.table_alias) ) return clone def get_external_cols(self): exprs = chain(self.annotations.values(), self.where.children) return [ col for col in self._gen_cols(exprs, include_external=True) if col.alias in self.external_aliases ] def as_sql(self, compiler, connection): sql, params = self.get_compiler(connection=connection).as_sql() if self.subquery: sql = '(%s)' % sql return sql, params def resolve_lookup_value(self, value, can_reuse, allow_joins): if hasattr(value, 'resolve_expression'): value = value.resolve_expression( self, reuse=can_reuse, allow_joins=allow_joins, ) elif isinstance(value, (list, tuple)): # The items of the iterable may be expressions and therefore need # to be resolved independently. values = ( self.resolve_lookup_value(sub_value, can_reuse, allow_joins) for sub_value in value ) type_ = type(value) if hasattr(type_, '_make'): # namedtuple return type_(*values) return type_(values) return value def solve_lookup_type(self, lookup): """ Solve the lookup type from the lookup (e.g.: 'foobar__id__icontains'). """ lookup_splitted = lookup.split(LOOKUP_SEP) if self.annotations: expression, expression_lookups = refs_expression(lookup_splitted, self.annotations) if expression: return expression_lookups, (), expression _, field, _, lookup_parts = self.names_to_path(lookup_splitted, self.get_meta()) field_parts = lookup_splitted[0:len(lookup_splitted) - len(lookup_parts)] if len(lookup_parts) > 1 and not field_parts: raise FieldError( 'Invalid lookup "%s" for model %s".' % (lookup, self.get_meta().model.__name__) ) return lookup_parts, field_parts, False def check_query_object_type(self, value, opts, field): """ Check whether the object passed while querying is of the correct type. If not, raise a ValueError specifying the wrong object. """ if hasattr(value, '_meta'): if not check_rel_lookup_compatibility(value._meta.model, opts, field): raise ValueError( 'Cannot query "%s": Must be "%s" instance.' % (value, opts.object_name)) def check_related_objects(self, field, value, opts): """Check the type of object passed to query relations.""" if field.is_relation: # Check that the field and the queryset use the same model in a # query like .filter(author=Author.objects.all()). For example, the # opts would be Author's (from the author field) and value.model # would be Author.objects.all() queryset's .model (Author also). # The field is the related field on the lhs side. if (isinstance(value, Query) and not value.has_select_fields and not check_rel_lookup_compatibility(value.model, opts, field)): raise ValueError( 'Cannot use QuerySet for "%s": Use a QuerySet for "%s".' % (value.model._meta.object_name, opts.object_name) ) elif hasattr(value, '_meta'): self.check_query_object_type(value, opts, field) elif hasattr(value, '__iter__'): for v in value: self.check_query_object_type(v, opts, field) def check_filterable(self, expression): """Raise an error if expression cannot be used in a WHERE clause.""" if ( hasattr(expression, 'resolve_expression') and not getattr(expression, 'filterable', True) ): raise NotSupportedError( expression.__class__.__name__ + ' is disallowed in the filter ' 'clause.' ) if hasattr(expression, 'get_source_expressions'): for expr in expression.get_source_expressions(): self.check_filterable(expr) def build_lookup(self, lookups, lhs, rhs): """ Try to extract transforms and lookup from given lhs. The lhs value is something that works like SQLExpression. The rhs value is what the lookup is going to compare against. The lookups is a list of names to extract using get_lookup() and get_transform(). """ # __exact is the default lookup if one isn't given. *transforms, lookup_name = lookups or ['exact'] for name in transforms: lhs = self.try_transform(lhs, name) # First try get_lookup() so that the lookup takes precedence if the lhs # supports both transform and lookup for the name. lookup_class = lhs.get_lookup(lookup_name) if not lookup_class: if lhs.field.is_relation: raise FieldError('Related Field got invalid lookup: {}'.format(lookup_name)) # A lookup wasn't found. Try to interpret the name as a transform # and do an Exact lookup against it. lhs = self.try_transform(lhs, lookup_name) lookup_name = 'exact' lookup_class = lhs.get_lookup(lookup_name) if not lookup_class: return lookup = lookup_class(lhs, rhs) # Interpret '__exact=None' as the sql 'is NULL'; otherwise, reject all # uses of None as a query value unless the lookup supports it. if lookup.rhs is None and not lookup.can_use_none_as_rhs: if lookup_name not in ('exact', 'iexact'): raise ValueError("Cannot use None as a query value") return lhs.get_lookup('isnull')(lhs, True) # For Oracle '' is equivalent to null. The check must be done at this # stage because join promotion can't be done in the compiler. Using # DEFAULT_DB_ALIAS isn't nice but it's the best that can be done here. # A similar thing is done in is_nullable(), too. if (connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls and lookup_name == 'exact' and lookup.rhs == ''): return lhs.get_lookup('isnull')(lhs, True) return lookup def try_transform(self, lhs, name): """ Helper method for build_lookup(). Try to fetch and initialize a transform for name parameter from lhs. """ transform_class = lhs.get_transform(name) if transform_class: return transform_class(lhs) else: output_field = lhs.output_field.__class__ suggested_lookups = difflib.get_close_matches(name, output_field.get_lookups()) if suggested_lookups: suggestion = ', perhaps you meant %s?' % ' or '.join(suggested_lookups) else: suggestion = '.' raise FieldError( "Unsupported lookup '%s' for %s or join on the field not " "permitted%s" % (name, output_field.__name__, suggestion) ) def build_filter(self, filter_expr, branch_negated=False, current_negated=False, can_reuse=None, allow_joins=True, split_subq=True, check_filterable=True): """ Build a WhereNode for a single filter clause but don't add it to this Query. Query.add_q() will then add this filter to the where Node. The 'branch_negated' tells us if the current branch contains any negations. This will be used to determine if subqueries are needed. The 'current_negated' is used to determine if the current filter is negated or not and this will be used to determine if IS NULL filtering is needed. The difference between current_negated and branch_negated is that branch_negated is set on first negation, but current_negated is flipped for each negation. Note that add_filter will not do any negating itself, that is done upper in the code by add_q(). The 'can_reuse' is a set of reusable joins for multijoins. The method will create a filter clause that can be added to the current query. However, if the filter isn't added to the query then the caller is responsible for unreffing the joins used. """ if isinstance(filter_expr, dict): raise FieldError("Cannot parse keyword query as dict") if isinstance(filter_expr, Q): return self._add_q( filter_expr, branch_negated=branch_negated, current_negated=current_negated, used_aliases=can_reuse, allow_joins=allow_joins, split_subq=split_subq, check_filterable=check_filterable, ) if hasattr(filter_expr, 'resolve_expression'): if not getattr(filter_expr, 'conditional', False): raise TypeError('Cannot filter against a non-conditional expression.') condition = self.build_lookup( ['exact'], filter_expr.resolve_expression(self, allow_joins=allow_joins), True ) clause = self.where_class() clause.add(condition, AND) return clause, [] arg, value = filter_expr if not arg: raise FieldError("Cannot parse keyword query %r" % arg) lookups, parts, reffed_expression = self.solve_lookup_type(arg) if check_filterable: self.check_filterable(reffed_expression) if not allow_joins and len(parts) > 1: raise FieldError("Joined field references are not permitted in this query") pre_joins = self.alias_refcount.copy() value = self.resolve_lookup_value(value, can_reuse, allow_joins) used_joins = {k for k, v in self.alias_refcount.items() if v > pre_joins.get(k, 0)} if check_filterable: self.check_filterable(value) clause = self.where_class() if reffed_expression: condition = self.build_lookup(lookups, reffed_expression, value) clause.add(condition, AND) return clause, [] opts = self.get_meta() alias = self.get_initial_alias() allow_many = not branch_negated or not split_subq try: join_info = self.setup_joins( parts, opts, alias, can_reuse=can_reuse, allow_many=allow_many, ) # Prevent iterator from being consumed by check_related_objects() if isinstance(value, Iterator): value = list(value) self.check_related_objects(join_info.final_field, value, join_info.opts) # split_exclude() needs to know which joins were generated for the # lookup parts self._lookup_joins = join_info.joins except MultiJoin as e: return self.split_exclude(filter_expr, can_reuse, e.names_with_path) # Update used_joins before trimming since they are reused to determine # which joins could be later promoted to INNER. used_joins.update(join_info.joins) targets, alias, join_list = self.trim_joins(join_info.targets, join_info.joins, join_info.path) if can_reuse is not None: can_reuse.update(join_list) if join_info.final_field.is_relation: # No support for transforms for relational fields num_lookups = len(lookups) if num_lookups > 1: raise FieldError('Related Field got invalid lookup: {}'.format(lookups[0])) if len(targets) == 1: col = self._get_col(targets[0], join_info.final_field, alias) else: col = MultiColSource(alias, targets, join_info.targets, join_info.final_field) else: col = self._get_col(targets[0], join_info.final_field, alias) condition = self.build_lookup(lookups, col, value) lookup_type = condition.lookup_name clause.add(condition, AND) require_outer = lookup_type == 'isnull' and condition.rhs is True and not current_negated if current_negated and (lookup_type != 'isnull' or condition.rhs is False) and condition.rhs is not None: require_outer = True if lookup_type != 'isnull': # The condition added here will be SQL like this: # NOT (col IS NOT NULL), where the first NOT is added in # upper layers of code. The reason for addition is that if col # is null, then col != someval will result in SQL "unknown" # which isn't the same as in Python. The Python None handling # is wanted, and it can be gotten by # (col IS NULL OR col != someval) # <=> # NOT (col IS NOT NULL AND col = someval). if ( self.is_nullable(targets[0]) or self.alias_map[join_list[-1]].join_type == LOUTER ): lookup_class = targets[0].get_lookup('isnull') col = self._get_col(targets[0], join_info.targets[0], alias) clause.add(lookup_class(col, False), AND) # If someval is a nullable column, someval IS NOT NULL is # added. if isinstance(value, Col) and self.is_nullable(value.target): lookup_class = value.target.get_lookup('isnull') clause.add(lookup_class(value, False), AND) return clause, used_joins if not require_outer else () def add_filter(self, filter_clause): self.add_q(Q(**{filter_clause[0]: filter_clause[1]})) def add_q(self, q_object): """ A preprocessor for the internal _add_q(). Responsible for doing final join promotion. """ # For join promotion this case is doing an AND for the added q_object # and existing conditions. So, any existing inner join forces the join # type to remain inner. Existing outer joins can however be demoted. # (Consider case where rel_a is LOUTER and rel_a__col=1 is added - if # rel_a doesn't produce any rows, then the whole condition must fail. # So, demotion is OK. existing_inner = {a for a in self.alias_map if self.alias_map[a].join_type == INNER} clause, _ = self._add_q(q_object, self.used_aliases) if clause: self.where.add(clause, AND) self.demote_joins(existing_inner) def build_where(self, filter_expr): return self.build_filter(filter_expr, allow_joins=False)[0] def _add_q(self, q_object, used_aliases, branch_negated=False, current_negated=False, allow_joins=True, split_subq=True, check_filterable=True): """Add a Q-object to the current filter.""" connector = q_object.connector current_negated = current_negated ^ q_object.negated branch_negated = branch_negated or q_object.negated target_clause = self.where_class(connector=connector, negated=q_object.negated) joinpromoter = JoinPromoter(q_object.connector, len(q_object.children), current_negated) for child in q_object.children: child_clause, needed_inner = self.build_filter( child, can_reuse=used_aliases, branch_negated=branch_negated, current_negated=current_negated, allow_joins=allow_joins, split_subq=split_subq, check_filterable=check_filterable, ) joinpromoter.add_votes(needed_inner) if child_clause: target_clause.add(child_clause, connector) needed_inner = joinpromoter.update_join_types(self) return target_clause, needed_inner def build_filtered_relation_q(self, q_object, reuse, branch_negated=False, current_negated=False): """Add a FilteredRelation object to the current filter.""" connector = q_object.connector current_negated ^= q_object.negated branch_negated = branch_negated or q_object.negated target_clause = self.where_class(connector=connector, negated=q_object.negated) for child in q_object.children: if isinstance(child, Node): child_clause = self.build_filtered_relation_q( child, reuse=reuse, branch_negated=branch_negated, current_negated=current_negated, ) else: child_clause, _ = self.build_filter( child, can_reuse=reuse, branch_negated=branch_negated, current_negated=current_negated, allow_joins=True, split_subq=False, ) target_clause.add(child_clause, connector) return target_clause def add_filtered_relation(self, filtered_relation, alias): filtered_relation.alias = alias lookups = dict(get_children_from_q(filtered_relation.condition)) relation_lookup_parts, relation_field_parts, _ = self.solve_lookup_type(filtered_relation.relation_name) if relation_lookup_parts: raise ValueError( "FilteredRelation's relation_name cannot contain lookups " "(got %r)." % filtered_relation.relation_name ) for lookup in chain(lookups): lookup_parts, lookup_field_parts, _ = self.solve_lookup_type(lookup) shift = 2 if not lookup_parts else 1 lookup_field_path = lookup_field_parts[:-shift] for idx, lookup_field_part in enumerate(lookup_field_path): if len(relation_field_parts) > idx: if relation_field_parts[idx] != lookup_field_part: raise ValueError( "FilteredRelation's condition doesn't support " "relations outside the %r (got %r)." % (filtered_relation.relation_name, lookup) ) else: raise ValueError( "FilteredRelation's condition doesn't support nested " "relations deeper than the relation_name (got %r for " "%r)." % (lookup, filtered_relation.relation_name) ) self._filtered_relations[filtered_relation.alias] = filtered_relation def names_to_path(self, names, opts, allow_many=True, fail_on_missing=False): """ Walk the list of names and turns them into PathInfo tuples. A single name in 'names' can generate multiple PathInfos (m2m, for example). 'names' is the path of names to travel, 'opts' is the model Options we start the name resolving from, 'allow_many' is as for setup_joins(). If fail_on_missing is set to True, then a name that can't be resolved will generate a FieldError. Return a list of PathInfo tuples. In addition return the final field (the last used join field) and target (which is a field guaranteed to contain the same value as the final field). Finally, return those names that weren't found (which are likely transforms and the final lookup). """ path, names_with_path = [], [] for pos, name in enumerate(names): cur_names_with_path = (name, []) if name == 'pk': name = opts.pk.name field = None filtered_relation = None try: field = opts.get_field(name) except FieldDoesNotExist: if name in self.annotation_select: field = self.annotation_select[name].output_field elif name in self._filtered_relations and pos == 0: filtered_relation = self._filtered_relations[name] if LOOKUP_SEP in filtered_relation.relation_name: parts = filtered_relation.relation_name.split(LOOKUP_SEP) filtered_relation_path, field, _, _ = self.names_to_path( parts, opts, allow_many, fail_on_missing, ) path.extend(filtered_relation_path[:-1]) else: field = opts.get_field(filtered_relation.relation_name) if field is not None: # Fields that contain one-to-many relations with a generic # model (like a GenericForeignKey) cannot generate reverse # relations and therefore cannot be used for reverse querying. if field.is_relation and not field.related_model: raise FieldError( "Field %r does not generate an automatic reverse " "relation and therefore cannot be used for reverse " "querying. If it is a GenericForeignKey, consider " "adding a GenericRelation." % name ) try: model = field.model._meta.concrete_model except AttributeError: # QuerySet.annotate() may introduce fields that aren't # attached to a model. model = None else: # We didn't find the current field, so move position back # one step. pos -= 1 if pos == -1 or fail_on_missing: available = sorted([ *get_field_names_from_opts(opts), *self.annotation_select, *self._filtered_relations, ]) raise FieldError("Cannot resolve keyword '%s' into field. " "Choices are: %s" % (name, ", ".join(available))) break # Check if we need any joins for concrete inheritance cases (the # field lives in parent, but we are currently in one of its # children) if model is not opts.model: path_to_parent = opts.get_path_to_parent(model) if path_to_parent: path.extend(path_to_parent) cur_names_with_path[1].extend(path_to_parent) opts = path_to_parent[-1].to_opts if hasattr(field, 'get_path_info'): pathinfos = field.get_path_info(filtered_relation) if not allow_many: for inner_pos, p in enumerate(pathinfos): if p.m2m: cur_names_with_path[1].extend(pathinfos[0:inner_pos + 1]) names_with_path.append(cur_names_with_path) raise MultiJoin(pos + 1, names_with_path) last = pathinfos[-1] path.extend(pathinfos) final_field = last.join_field opts = last.to_opts targets = last.target_fields cur_names_with_path[1].extend(pathinfos) names_with_path.append(cur_names_with_path) else: # Local non-relational field. final_field = field targets = (field,) if fail_on_missing and pos + 1 != len(names): raise FieldError( "Cannot resolve keyword %r into field. Join on '%s'" " not permitted." % (names[pos + 1], name)) break return path, final_field, targets, names[pos + 1:] def setup_joins(self, names, opts, alias, can_reuse=None, allow_many=True): """ Compute the necessary table joins for the passage through the fields given in 'names'. 'opts' is the Options class for the current model (which gives the table we are starting from), 'alias' is the alias for the table to start the joining from. The 'can_reuse' defines the reverse foreign key joins we can reuse. It can be None in which case all joins are reusable or a set of aliases that can be reused. Note that non-reverse foreign keys are always reusable when using setup_joins(). If 'allow_many' is False, then any reverse foreign key seen will generate a MultiJoin exception. Return the final field involved in the joins, the target field (used for any 'where' constraint), the final 'opts' value, the joins, the field path traveled to generate the joins, and a transform function that takes a field and alias and is equivalent to `field.get_col(alias)` in the simple case but wraps field transforms if they were included in names. The target field is the field containing the concrete value. Final field can be something different, for example foreign key pointing to that value. Final field is needed for example in some value conversions (convert 'obj' in fk__id=obj to pk val using the foreign key field for example). """ joins = [alias] # The transform can't be applied yet, as joins must be trimmed later. # To avoid making every caller of this method look up transforms # directly, compute transforms here and create a partial that converts # fields to the appropriate wrapped version. def final_transformer(field, alias): if not self.alias_cols: alias = None return field.get_col(alias) # Try resolving all the names as fields first. If there's an error, # treat trailing names as lookups until a field can be resolved. last_field_exception = None for pivot in range(len(names), 0, -1): try: path, final_field, targets, rest = self.names_to_path( names[:pivot], opts, allow_many, fail_on_missing=True, ) except FieldError as exc: if pivot == 1: # The first item cannot be a lookup, so it's safe # to raise the field error here. raise else: last_field_exception = exc else: # The transforms are the remaining items that couldn't be # resolved into fields. transforms = names[pivot:] break for name in transforms: def transform(field, alias, *, name, previous): try: wrapped = previous(field, alias) return self.try_transform(wrapped, name) except FieldError: # FieldError is raised if the transform doesn't exist. if isinstance(final_field, Field) and last_field_exception: raise last_field_exception else: raise final_transformer = functools.partial(transform, name=name, previous=final_transformer) # Then, add the path to the query's joins. Note that we can't trim # joins at this stage - we will need the information about join type # of the trimmed joins. for join in path: if join.filtered_relation: filtered_relation = join.filtered_relation.clone() table_alias = filtered_relation.alias else: filtered_relation = None table_alias = None opts = join.to_opts if join.direct: nullable = self.is_nullable(join.join_field) else: nullable = True connection = Join( opts.db_table, alias, table_alias, INNER, join.join_field, nullable, filtered_relation=filtered_relation, ) reuse = can_reuse if join.m2m else None alias = self.join(connection, reuse=reuse) joins.append(alias) if filtered_relation: filtered_relation.path = joins[:] return JoinInfo(final_field, targets, opts, joins, path, final_transformer) def trim_joins(self, targets, joins, path): """ The 'target' parameter is the final field being joined to, 'joins' is the full list of join aliases. The 'path' contain the PathInfos used to create the joins. Return the final target field and table alias and the new active joins. Always trim any direct join if the target column is already in the previous table. Can't trim reverse joins as it's unknown if there's anything on the other side of the join. """ joins = joins[:] for pos, info in enumerate(reversed(path)): if len(joins) == 1 or not info.direct: break if info.filtered_relation: break join_targets = {t.column for t in info.join_field.foreign_related_fields} cur_targets = {t.column for t in targets} if not cur_targets.issubset(join_targets): break targets_dict = {r[1].column: r[0] for r in info.join_field.related_fields if r[1].column in cur_targets} targets = tuple(targets_dict[t.column] for t in targets) self.unref_alias(joins.pop()) return targets, joins[-1], joins @classmethod def _gen_cols(cls, exprs, include_external=False): for expr in exprs: if isinstance(expr, Col): yield expr elif include_external and callable(getattr(expr, 'get_external_cols', None)): yield from expr.get_external_cols() else: yield from cls._gen_cols( expr.get_source_expressions(), include_external=include_external, ) @classmethod def _gen_col_aliases(cls, exprs): yield from (expr.alias for expr in cls._gen_cols(exprs)) def resolve_ref(self, name, allow_joins=True, reuse=None, summarize=False): annotation = self.annotations.get(name) if annotation is not None: if not allow_joins: for alias in self._gen_col_aliases([annotation]): if isinstance(self.alias_map[alias], Join): raise FieldError( 'Joined field references are not permitted in ' 'this query' ) if summarize: # Summarize currently means we are doing an aggregate() query # which is executed as a wrapped subquery if any of the # aggregate() elements reference an existing annotation. In # that case we need to return a Ref to the subquery's annotation. if name not in self.annotation_select: raise FieldError( "Cannot aggregate over the '%s' alias. Use annotate() " "to promote it." % name ) return Ref(name, self.annotation_select[name]) else: return annotation else: field_list = name.split(LOOKUP_SEP) annotation = self.annotations.get(field_list[0]) if annotation is not None: for transform in field_list[1:]: annotation = self.try_transform(annotation, transform) return annotation join_info = self.setup_joins(field_list, self.get_meta(), self.get_initial_alias(), can_reuse=reuse) targets, final_alias, join_list = self.trim_joins(join_info.targets, join_info.joins, join_info.path) if not allow_joins and len(join_list) > 1: raise FieldError('Joined field references are not permitted in this query') if len(targets) > 1: raise FieldError("Referencing multicolumn fields with F() objects " "isn't supported") # Verify that the last lookup in name is a field or a transform: # transform_function() raises FieldError if not. transform = join_info.transform_function(targets[0], final_alias) if reuse is not None: reuse.update(join_list) return transform def split_exclude(self, filter_expr, can_reuse, names_with_path): """ When doing an exclude against any kind of N-to-many relation, we need to use a subquery. This method constructs the nested query, given the original exclude filter (filter_expr) and the portion up to the first N-to-many relation field. For example, if the origin filter is ~Q(child__name='foo'), filter_expr is ('child__name', 'foo') and can_reuse is a set of joins usable for filters in the original query. We will turn this into equivalent of: WHERE NOT EXISTS( SELECT 1 FROM child WHERE name = 'foo' AND child.parent_id = parent.id LIMIT 1 ) """ filter_lhs, filter_rhs = filter_expr if isinstance(filter_rhs, OuterRef): filter_expr = (filter_lhs, OuterRef(filter_rhs)) elif isinstance(filter_rhs, F): filter_expr = (filter_lhs, OuterRef(filter_rhs.name)) # Generate the inner query. query = Query(self.model) query._filtered_relations = self._filtered_relations query.add_filter(filter_expr) query.clear_ordering(True) # Try to have as simple as possible subquery -> trim leading joins from # the subquery. trimmed_prefix, contains_louter = query.trim_start(names_with_path) col = query.select[0] select_field = col.target alias = col.alias if alias in can_reuse: pk = select_field.model._meta.pk # Need to add a restriction so that outer query's filters are in effect for # the subquery, too. query.bump_prefix(self) lookup_class = select_field.get_lookup('exact') # Note that the query.select[0].alias is different from alias # due to bump_prefix above. lookup = lookup_class(pk.get_col(query.select[0].alias), pk.get_col(alias)) query.where.add(lookup, AND) query.external_aliases[alias] = True lookup_class = select_field.get_lookup('exact') lookup = lookup_class(col, ResolvedOuterRef(trimmed_prefix)) query.where.add(lookup, AND) condition, needed_inner = self.build_filter(Exists(query)) if contains_louter: or_null_condition, _ = self.build_filter( ('%s__isnull' % trimmed_prefix, True), current_negated=True, branch_negated=True, can_reuse=can_reuse) condition.add(or_null_condition, OR) # Note that the end result will be: # (outercol NOT IN innerq AND outercol IS NOT NULL) OR outercol IS NULL. # This might look crazy but due to how IN works, this seems to be # correct. If the IS NOT NULL check is removed then outercol NOT # IN will return UNKNOWN. If the IS NULL check is removed, then if # outercol IS NULL we will not match the row. return condition, needed_inner def set_empty(self): self.where.add(NothingNode(), AND) for query in self.combined_queries: query.set_empty() def is_empty(self): return any(isinstance(c, NothingNode) for c in self.where.children) def set_limits(self, low=None, high=None): """ Adjust the limits on the rows retrieved. Use low/high to set these, as it makes it more Pythonic to read and write. When the SQL query is created, convert them to the appropriate offset and limit values. Apply any limits passed in here to the existing constraints. Add low to the current low value and clamp both to any existing high value. """ if high is not None: if self.high_mark is not None: self.high_mark = min(self.high_mark, self.low_mark + high) else: self.high_mark = self.low_mark + high if low is not None: if self.high_mark is not None: self.low_mark = min(self.high_mark, self.low_mark + low) else: self.low_mark = self.low_mark + low if self.low_mark == self.high_mark: self.set_empty() def clear_limits(self): """Clear any existing limits.""" self.low_mark, self.high_mark = 0, None @property def is_sliced(self): return self.low_mark != 0 or self.high_mark is not None def has_limit_one(self): return self.high_mark is not None and (self.high_mark - self.low_mark) == 1 def can_filter(self): """ Return True if adding filters to this instance is still possible. Typically, this means no limits or offsets have been put on the results. """ return not self.is_sliced def clear_select_clause(self): """Remove all fields from SELECT clause.""" self.select = () self.default_cols = False self.select_related = False self.set_extra_mask(()) self.set_annotation_mask(()) def clear_select_fields(self): """ Clear the list of fields to select (but not extra_select columns). Some queryset types completely replace any existing list of select columns. """ self.select = () self.values_select = () def add_select_col(self, col, name): self.select += col, self.values_select += name, def set_select(self, cols): self.default_cols = False self.select = tuple(cols) def add_distinct_fields(self, *field_names): """ Add and resolve the given fields to the query's "distinct on" clause. """ self.distinct_fields = field_names self.distinct = True def add_fields(self, field_names, allow_m2m=True): """ Add the given (model) fields to the select set. Add the field names in the order specified. """ alias = self.get_initial_alias() opts = self.get_meta() try: cols = [] for name in field_names: # Join promotion note - we must not remove any rows here, so # if there is no existing joins, use outer join. join_info = self.setup_joins(name.split(LOOKUP_SEP), opts, alias, allow_many=allow_m2m) targets, final_alias, joins = self.trim_joins( join_info.targets, join_info.joins, join_info.path, ) for target in targets: cols.append(join_info.transform_function(target, final_alias)) if cols: self.set_select(cols) except MultiJoin: raise FieldError("Invalid field name: '%s'" % name) except FieldError: if LOOKUP_SEP in name: # For lookups spanning over relationships, show the error # from the model on which the lookup failed. raise elif name in self.annotations: raise FieldError( "Cannot select the '%s' alias. Use annotate() to promote " "it." % name ) else: names = sorted([ *get_field_names_from_opts(opts), *self.extra, *self.annotation_select, *self._filtered_relations ]) raise FieldError("Cannot resolve keyword %r into field. " "Choices are: %s" % (name, ", ".join(names))) def add_ordering(self, *ordering): """ Add items from the 'ordering' sequence to the query's "order by" clause. These items are either field names (not column names) -- possibly with a direction prefix ('-' or '?') -- or OrderBy expressions. If 'ordering' is empty, clear all ordering from the query. """ errors = [] for item in ordering: if isinstance(item, str): if item == '?': continue if item.startswith('-'): item = item[1:] if item in self.annotations: continue if self.extra and item in self.extra: continue # names_to_path() validates the lookup. A descriptive # FieldError will be raise if it's not. self.names_to_path(item.split(LOOKUP_SEP), self.model._meta) elif not hasattr(item, 'resolve_expression'): errors.append(item) if getattr(item, 'contains_aggregate', False): raise FieldError( 'Using an aggregate in order_by() without also including ' 'it in annotate() is not allowed: %s' % item ) if errors: raise FieldError('Invalid order_by arguments: %s' % errors) if ordering: self.order_by += ordering else: self.default_ordering = False def clear_ordering(self, force_empty): """ Remove any ordering settings. If 'force_empty' is True, there will be no ordering in the resulting query (not even the model's default). """ self.order_by = () self.extra_order_by = () if force_empty: self.default_ordering = False def set_group_by(self, allow_aliases=True): """ Expand the GROUP BY clause required by the query. This will usually be the set of all non-aggregate fields in the return data. If the database backend supports grouping by the primary key, and the query would be equivalent, the optimization will be made automatically. """ # Column names from JOINs to check collisions with aliases. if allow_aliases: column_names = set() seen_models = set() for join in list(self.alias_map.values())[1:]: # Skip base table. model = join.join_field.related_model if model not in seen_models: column_names.update({ field.column for field in model._meta.local_concrete_fields }) seen_models.add(model) group_by = list(self.select) if self.annotation_select: for alias, annotation in self.annotation_select.items(): if not allow_aliases or alias in column_names: alias = None group_by_cols = annotation.get_group_by_cols(alias=alias) group_by.extend(group_by_cols) self.group_by = tuple(group_by) def add_select_related(self, fields): """ Set up the select_related data structure so that we only select certain related models (as opposed to all models, when self.select_related=True). """ if isinstance(self.select_related, bool): field_dict = {} else: field_dict = self.select_related for field in fields: d = field_dict for part in field.split(LOOKUP_SEP): d = d.setdefault(part, {}) self.select_related = field_dict def add_extra(self, select, select_params, where, params, tables, order_by): """ Add data to the various extra_* attributes for user-created additions to the query. """ if select: # We need to pair any placeholder markers in the 'select' # dictionary with their parameters in 'select_params' so that # subsequent updates to the select dictionary also adjust the # parameters appropriately. select_pairs = {} if select_params: param_iter = iter(select_params) else: param_iter = iter([]) for name, entry in select.items(): entry = str(entry) entry_params = [] pos = entry.find("%s") while pos != -1: if pos == 0 or entry[pos - 1] != '%': entry_params.append(next(param_iter)) pos = entry.find("%s", pos + 2) select_pairs[name] = (entry, entry_params) self.extra.update(select_pairs) if where or params: self.where.add(ExtraWhere(where, params), AND) if tables: self.extra_tables += tuple(tables) if order_by: self.extra_order_by = order_by def clear_deferred_loading(self): """Remove any fields from the deferred loading set.""" self.deferred_loading = (frozenset(), True) def add_deferred_loading(self, field_names): """ Add the given list of model field names to the set of fields to exclude from loading from the database when automatic column selection is done. Add the new field names to any existing field names that are deferred (or removed from any existing field names that are marked as the only ones for immediate loading). """ # Fields on related models are stored in the literal double-underscore # format, so that we can use a set datastructure. We do the foo__bar # splitting and handling when computing the SQL column names (as part of # get_columns()). existing, defer = self.deferred_loading if defer: # Add to existing deferred names. self.deferred_loading = existing.union(field_names), True else: # Remove names from the set of any existing "immediate load" names. self.deferred_loading = existing.difference(field_names), False def add_immediate_loading(self, field_names): """ Add the given list of model field names to the set of fields to retrieve when the SQL is executed ("immediate loading" fields). The field names replace any existing immediate loading field names. If there are field names already specified for deferred loading, remove those names from the new field_names before storing the new names for immediate loading. (That is, immediate loading overrides any existing immediate values, but respects existing deferrals.) """ existing, defer = self.deferred_loading field_names = set(field_names) if 'pk' in field_names: field_names.remove('pk') field_names.add(self.get_meta().pk.name) if defer: # Remove any existing deferred names from the current set before # setting the new names. self.deferred_loading = field_names.difference(existing), False else: # Replace any existing "immediate load" field names. self.deferred_loading = frozenset(field_names), False def get_loaded_field_names(self): """ If any fields are marked to be deferred, return a dictionary mapping models to a set of names in those fields that will be loaded. If a model is not in the returned dictionary, none of its fields are deferred. If no fields are marked for deferral, return an empty dictionary. """ # We cache this because we call this function multiple times # (compiler.fill_related_selections, query.iterator) try: return self._loaded_field_names_cache except AttributeError: collection = {} self.deferred_to_data(collection, self.get_loaded_field_names_cb) self._loaded_field_names_cache = collection return collection def get_loaded_field_names_cb(self, target, model, fields): """Callback used by get_deferred_field_names().""" target[model] = {f.attname for f in fields} def set_annotation_mask(self, names): """Set the mask of annotations that will be returned by the SELECT.""" if names is None: self.annotation_select_mask = None else: self.annotation_select_mask = set(names) self._annotation_select_cache = None def append_annotation_mask(self, names): if self.annotation_select_mask is not None: self.set_annotation_mask(self.annotation_select_mask.union(names)) def set_extra_mask(self, names): """ Set the mask of extra select items that will be returned by SELECT. Don't remove them from the Query since they might be used later. """ if names is None: self.extra_select_mask = None else: self.extra_select_mask = set(names) self._extra_select_cache = None def set_values(self, fields): self.select_related = False self.clear_deferred_loading() self.clear_select_fields() if fields: field_names = [] extra_names = [] annotation_names = [] if not self.extra and not self.annotations: # Shortcut - if there are no extra or annotations, then # the values() clause must be just field names. field_names = list(fields) else: self.default_cols = False for f in fields: if f in self.extra_select: extra_names.append(f) elif f in self.annotation_select: annotation_names.append(f) else: field_names.append(f) self.set_extra_mask(extra_names) self.set_annotation_mask(annotation_names) selected = frozenset(field_names + extra_names + annotation_names) else: field_names = [f.attname for f in self.model._meta.concrete_fields] selected = frozenset(field_names) # Selected annotations must be known before setting the GROUP BY # clause. if self.group_by is True: self.add_fields((f.attname for f in self.model._meta.concrete_fields), False) # Disable GROUP BY aliases to avoid orphaning references to the # SELECT clause which is about to be cleared. self.set_group_by(allow_aliases=False) self.clear_select_fields() elif self.group_by: # Resolve GROUP BY annotation references if they are not part of # the selected fields anymore. group_by = [] for expr in self.group_by: if isinstance(expr, Ref) and expr.refs not in selected: expr = self.annotations[expr.refs] group_by.append(expr) self.group_by = tuple(group_by) self.values_select = tuple(field_names) self.add_fields(field_names, True) @property def annotation_select(self): """ Return the dictionary of aggregate columns that are not masked and should be used in the SELECT clause. Cache this result for performance. """ if self._annotation_select_cache is not None: return self._annotation_select_cache elif not self.annotations: return {} elif self.annotation_select_mask is not None: self._annotation_select_cache = { k: v for k, v in self.annotations.items() if k in self.annotation_select_mask } return self._annotation_select_cache else: return self.annotations @property def extra_select(self): if self._extra_select_cache is not None: return self._extra_select_cache if not self.extra: return {} elif self.extra_select_mask is not None: self._extra_select_cache = { k: v for k, v in self.extra.items() if k in self.extra_select_mask } return self._extra_select_cache else: return self.extra def trim_start(self, names_with_path): """ Trim joins from the start of the join path. The candidates for trim are the PathInfos in names_with_path structure that are m2m joins. Also set the select column so the start matches the join. This method is meant to be used for generating the subquery joins & cols in split_exclude(). Return a lookup usable for doing outerq.filter(lookup=self) and a boolean indicating if the joins in the prefix contain a LEFT OUTER join. _""" all_paths = [] for _, paths in names_with_path: all_paths.extend(paths) contains_louter = False # Trim and operate only on tables that were generated for # the lookup part of the query. That is, avoid trimming # joins generated for F() expressions. lookup_tables = [ t for t in self.alias_map if t in self._lookup_joins or t == self.base_table ] for trimmed_paths, path in enumerate(all_paths): if path.m2m: break if self.alias_map[lookup_tables[trimmed_paths + 1]].join_type == LOUTER: contains_louter = True alias = lookup_tables[trimmed_paths] self.unref_alias(alias) # The path.join_field is a Rel, lets get the other side's field join_field = path.join_field.field # Build the filter prefix. paths_in_prefix = trimmed_paths trimmed_prefix = [] for name, path in names_with_path: if paths_in_prefix - len(path) < 0: break trimmed_prefix.append(name) paths_in_prefix -= len(path) trimmed_prefix.append( join_field.foreign_related_fields[0].name) trimmed_prefix = LOOKUP_SEP.join(trimmed_prefix) # Lets still see if we can trim the first join from the inner query # (that is, self). We can't do this for: # - LEFT JOINs because we would miss those rows that have nothing on # the outer side, # - INNER JOINs from filtered relations because we would miss their # filters. first_join = self.alias_map[lookup_tables[trimmed_paths + 1]] if first_join.join_type != LOUTER and not first_join.filtered_relation: select_fields = [r[0] for r in join_field.related_fields] select_alias = lookup_tables[trimmed_paths + 1] self.unref_alias(lookup_tables[trimmed_paths]) extra_restriction = join_field.get_extra_restriction( self.where_class, None, lookup_tables[trimmed_paths + 1]) if extra_restriction: self.where.add(extra_restriction, AND) else: # TODO: It might be possible to trim more joins from the start of the # inner query if it happens to have a longer join chain containing the # values in select_fields. Lets punt this one for now. select_fields = [r[1] for r in join_field.related_fields] select_alias = lookup_tables[trimmed_paths] # The found starting point is likely a Join instead of a BaseTable reference. # But the first entry in the query's FROM clause must not be a JOIN. for table in self.alias_map: if self.alias_refcount[table] > 0: self.alias_map[table] = BaseTable(self.alias_map[table].table_name, table) break self.set_select([f.get_col(select_alias) for f in select_fields]) return trimmed_prefix, contains_louter def is_nullable(self, field): """ Check if the given field should be treated as nullable. Some backends treat '' as null and Django treats such fields as nullable for those backends. In such situations field.null can be False even if we should treat the field as nullable. """ # We need to use DEFAULT_DB_ALIAS here, as QuerySet does not have # (nor should it have) knowledge of which connection is going to be # used. The proper fix would be to defer all decisions where # is_nullable() is needed to the compiler stage, but that is not easy # to do currently. return ( connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls and field.empty_strings_allowed ) or field.null def get_order_dir(field, default='ASC'): """ Return the field name and direction for an order specification. For example, '-foo' is returned as ('foo', 'DESC'). The 'default' param is used to indicate which way no prefix (or a '+' prefix) should sort. The '-' prefix always sorts the opposite way. """ dirn = ORDER_DIR[default] if field[0] == '-': return field[1:], dirn[1] return field, dirn[0] def add_to_dict(data, key, value): """ Add "value" to the set of values for "key", whether or not "key" already exists. """ if key in data: data[key].add(value) else: data[key] = {value} def is_reverse_o2o(field): """ Check if the given field is reverse-o2o. The field is expected to be some sort of relation field or related object. """ return field.is_relation and field.one_to_one and not field.concrete class JoinPromoter: """ A class to abstract away join promotion problems for complex filter conditions. """ def __init__(self, connector, num_children, negated): self.connector = connector self.negated = negated if self.negated: if connector == AND: self.effective_connector = OR else: self.effective_connector = AND else: self.effective_connector = self.connector self.num_children = num_children # Maps of table alias to how many times it is seen as required for # inner and/or outer joins. self.votes = Counter() def add_votes(self, votes): """ Add single vote per item to self.votes. Parameter can be any iterable. """ self.votes.update(votes) def update_join_types(self, query): """ Change join types so that the generated query is as efficient as possible, but still correct. So, change as many joins as possible to INNER, but don't make OUTER joins INNER if that could remove results from the query. """ to_promote = set() to_demote = set() # The effective_connector is used so that NOT (a AND b) is treated # similarly to (a OR b) for join promotion. for table, votes in self.votes.items(): # We must use outer joins in OR case when the join isn't contained # in all of the joins. Otherwise the INNER JOIN itself could remove # valid results. Consider the case where a model with rel_a and # rel_b relations is queried with rel_a__col=1 | rel_b__col=2. Now, # if rel_a join doesn't produce any results is null (for example # reverse foreign key or null value in direct foreign key), and # there is a matching row in rel_b with col=2, then an INNER join # to rel_a would remove a valid match from the query. So, we need # to promote any existing INNER to LOUTER (it is possible this # promotion in turn will be demoted later on). if self.effective_connector == 'OR' and votes < self.num_children: to_promote.add(table) # If connector is AND and there is a filter that can match only # when there is a joinable row, then use INNER. For example, in # rel_a__col=1 & rel_b__col=2, if either of the rels produce NULL # as join output, then the col=1 or col=2 can't match (as # NULL=anything is always false). # For the OR case, if all children voted for a join to be inner, # then we can use INNER for the join. For example: # (rel_a__col__icontains=Alex | rel_a__col__icontains=Russell) # then if rel_a doesn't produce any rows, the whole condition # can't match. Hence we can safely use INNER join. if self.effective_connector == 'AND' or ( self.effective_connector == 'OR' and votes == self.num_children): to_demote.add(table) # Finally, what happens in cases where we have: # (rel_a__col=1|rel_b__col=2) & rel_a__col__gte=0 # Now, we first generate the OR clause, and promote joins for it # in the first if branch above. Both rel_a and rel_b are promoted # to LOUTER joins. After that we do the AND case. The OR case # voted no inner joins but the rel_a__col__gte=0 votes inner join # for rel_a. We demote it back to INNER join (in AND case a single # vote is enough). The demotion is OK, if rel_a doesn't produce # rows, then the rel_a__col__gte=0 clause can't be true, and thus # the whole clause must be false. So, it is safe to use INNER # join. # Note that in this example we could just as well have the __gte # clause and the OR clause swapped. Or we could replace the __gte # clause with an OR clause containing rel_a__col=1|rel_a__col=2, # and again we could safely demote to INNER. query.promote_joins(to_promote) query.demote_joins(to_demote) return to_demote
b48dafa9547ce8784a9f63ba7ee636a0f69be9481cc7073c38b5ed2895ffeace
import collections import re from functools import partial from itertools import chain from django.core.exceptions import EmptyResultSet, FieldError from django.db import DatabaseError, NotSupportedError from django.db.models.constants import LOOKUP_SEP from django.db.models.expressions import F, OrderBy, RawSQL, Ref, Value from django.db.models.functions import Cast, Random from django.db.models.query_utils import Q, select_related_descend from django.db.models.sql.constants import ( CURSOR, GET_ITERATOR_CHUNK_SIZE, MULTI, NO_RESULTS, ORDER_DIR, SINGLE, ) from django.db.models.sql.query import Query, get_order_dir from django.db.transaction import TransactionManagementError from django.utils.functional import cached_property from django.utils.hashable import make_hashable from django.utils.regex_helper import _lazy_re_compile class SQLCompiler: # Multiline ordering SQL clause may appear from RawSQL. ordering_parts = _lazy_re_compile( r'^(.*)\s(?:ASC|DESC).*', re.MULTILINE | re.DOTALL, ) def __init__(self, query, connection, using): self.query = query self.connection = connection self.using = using self.quote_cache = {'*': '*'} # The select, klass_info, and annotations are needed by QuerySet.iterator() # these are set as a side-effect of executing the query. Note that we calculate # separately a list of extra select columns needed for grammatical correctness # of the query, but these columns are not included in self.select. self.select = None self.annotation_col_map = None self.klass_info = None self._meta_ordering = None def setup_query(self): if all(self.query.alias_refcount[a] == 0 for a in self.query.alias_map): self.query.get_initial_alias() self.select, self.klass_info, self.annotation_col_map = self.get_select() self.col_count = len(self.select) def pre_sql_setup(self): """ Do any necessary class setup immediately prior to producing SQL. This is for things that can't necessarily be done in __init__ because we might not have all the pieces in place at that time. """ self.setup_query() order_by = self.get_order_by() self.where, self.having = self.query.where.split_having() extra_select = self.get_extra_select(order_by, self.select) self.has_extra_select = bool(extra_select) group_by = self.get_group_by(self.select + extra_select, order_by) return extra_select, order_by, group_by def get_group_by(self, select, order_by): """ Return a list of 2-tuples of form (sql, params). The logic of what exactly the GROUP BY clause contains is hard to describe in other words than "if it passes the test suite, then it is correct". """ # Some examples: # SomeModel.objects.annotate(Count('somecol')) # GROUP BY: all fields of the model # # SomeModel.objects.values('name').annotate(Count('somecol')) # GROUP BY: name # # SomeModel.objects.annotate(Count('somecol')).values('name') # GROUP BY: all cols of the model # # SomeModel.objects.values('name', 'pk').annotate(Count('somecol')).values('pk') # GROUP BY: name, pk # # SomeModel.objects.values('name').annotate(Count('somecol')).values('pk') # GROUP BY: name, pk # # In fact, the self.query.group_by is the minimal set to GROUP BY. It # can't be ever restricted to a smaller set, but additional columns in # HAVING, ORDER BY, and SELECT clauses are added to it. Unfortunately # the end result is that it is impossible to force the query to have # a chosen GROUP BY clause - you can almost do this by using the form: # .values(*wanted_cols).annotate(AnAggregate()) # but any later annotations, extra selects, values calls that # refer some column outside of the wanted_cols, order_by, or even # filter calls can alter the GROUP BY clause. # The query.group_by is either None (no GROUP BY at all), True # (group by select fields), or a list of expressions to be added # to the group by. if self.query.group_by is None: return [] expressions = [] if self.query.group_by is not True: # If the group by is set to a list (by .values() call most likely), # then we need to add everything in it to the GROUP BY clause. # Backwards compatibility hack for setting query.group_by. Remove # when we have public API way of forcing the GROUP BY clause. # Converts string references to expressions. for expr in self.query.group_by: if not hasattr(expr, 'as_sql'): expressions.append(self.query.resolve_ref(expr)) else: expressions.append(expr) # Note that even if the group_by is set, it is only the minimal # set to group by. So, we need to add cols in select, order_by, and # having into the select in any case. ref_sources = { expr.source for expr in expressions if isinstance(expr, Ref) } for expr, _, _ in select: # Skip members of the select clause that are already included # by reference. if expr in ref_sources: continue cols = expr.get_group_by_cols() for col in cols: expressions.append(col) if not self._meta_ordering: for expr, (sql, params, is_ref) in order_by: # Skip references to the SELECT clause, as all expressions in # the SELECT clause are already part of the GROUP BY. if not is_ref: expressions.extend(expr.get_group_by_cols()) having_group_by = self.having.get_group_by_cols() if self.having else () for expr in having_group_by: expressions.append(expr) result = [] seen = set() expressions = self.collapse_group_by(expressions, having_group_by) for expr in expressions: sql, params = self.compile(expr) sql, params = expr.select_format(self, sql, params) params_hash = make_hashable(params) if (sql, params_hash) not in seen: result.append((sql, params)) seen.add((sql, params_hash)) return result def collapse_group_by(self, expressions, having): # If the DB can group by primary key, then group by the primary key of # query's main model. Note that for PostgreSQL the GROUP BY clause must # include the primary key of every table, but for MySQL it is enough to # have the main table's primary key. if self.connection.features.allows_group_by_pk: # Determine if the main model's primary key is in the query. pk = None for expr in expressions: # Is this a reference to query's base table primary key? If the # expression isn't a Col-like, then skip the expression. if (getattr(expr, 'target', None) == self.query.model._meta.pk and getattr(expr, 'alias', None) == self.query.base_table): pk = expr break # If the main model's primary key is in the query, group by that # field, HAVING expressions, and expressions associated with tables # that don't have a primary key included in the grouped columns. if pk: pk_aliases = { expr.alias for expr in expressions if hasattr(expr, 'target') and expr.target.primary_key } expressions = [pk] + [ expr for expr in expressions if expr in having or ( getattr(expr, 'alias', None) is not None and expr.alias not in pk_aliases ) ] elif self.connection.features.allows_group_by_selected_pks: # Filter out all expressions associated with a table's primary key # present in the grouped columns. This is done by identifying all # tables that have their primary key included in the grouped # columns and removing non-primary key columns referring to them. # Unmanaged models are excluded because they could be representing # database views on which the optimization might not be allowed. pks = { expr for expr in expressions if ( hasattr(expr, 'target') and expr.target.primary_key and self.connection.features.allows_group_by_selected_pks_on_model(expr.target.model) ) } aliases = {expr.alias for expr in pks} expressions = [ expr for expr in expressions if expr in pks or getattr(expr, 'alias', None) not in aliases ] return expressions def get_select(self): """ Return three values: - a list of 3-tuples of (expression, (sql, params), alias) - a klass_info structure, - a dictionary of annotations The (sql, params) is what the expression will produce, and alias is the "AS alias" for the column (possibly None). The klass_info structure contains the following information: - The base model of the query. - Which columns for that model are present in the query (by position of the select clause). - related_klass_infos: [f, klass_info] to descent into The annotations is a dictionary of {'attname': column position} values. """ select = [] klass_info = None annotations = {} select_idx = 0 for alias, (sql, params) in self.query.extra_select.items(): annotations[alias] = select_idx select.append((RawSQL(sql, params), alias)) select_idx += 1 assert not (self.query.select and self.query.default_cols) if self.query.default_cols: cols = self.get_default_columns() else: # self.query.select is a special case. These columns never go to # any model. cols = self.query.select if cols: select_list = [] for col in cols: select_list.append(select_idx) select.append((col, None)) select_idx += 1 klass_info = { 'model': self.query.model, 'select_fields': select_list, } for alias, annotation in self.query.annotation_select.items(): annotations[alias] = select_idx select.append((annotation, alias)) select_idx += 1 if self.query.select_related: related_klass_infos = self.get_related_selections(select) klass_info['related_klass_infos'] = related_klass_infos def get_select_from_parent(klass_info): for ki in klass_info['related_klass_infos']: if ki['from_parent']: ki['select_fields'] = (klass_info['select_fields'] + ki['select_fields']) get_select_from_parent(ki) get_select_from_parent(klass_info) ret = [] for col, alias in select: try: sql, params = self.compile(col) except EmptyResultSet: # Select a predicate that's always False. sql, params = '0', () else: sql, params = col.select_format(self, sql, params) ret.append((col, (sql, params), alias)) return ret, klass_info, annotations def _order_by_pairs(self): if self.query.extra_order_by: ordering = self.query.extra_order_by elif not self.query.default_ordering: ordering = self.query.order_by elif self.query.order_by: ordering = self.query.order_by elif self.query.get_meta().ordering: ordering = self.query.get_meta().ordering self._meta_ordering = ordering else: ordering = [] if self.query.standard_ordering: default_order, _ = ORDER_DIR['ASC'] else: default_order, _ = ORDER_DIR['DESC'] for field in ordering: if hasattr(field, 'resolve_expression'): if isinstance(field, Value): # output_field must be resolved for constants. field = Cast(field, field.output_field) if not isinstance(field, OrderBy): field = field.asc() if not self.query.standard_ordering: field = field.copy() field.reverse_ordering() yield field, False continue if field == '?': # random yield OrderBy(Random()), False continue col, order = get_order_dir(field, default_order) descending = order == 'DESC' if col in self.query.annotation_select: # Reference to expression in SELECT clause yield ( OrderBy( Ref(col, self.query.annotation_select[col]), descending=descending, ), True, ) continue if col in self.query.annotations: # References to an expression which is masked out of the SELECT # clause. if self.query.combinator and self.select: # Don't use the resolved annotation because other # combinated queries might define it differently. expr = F(col) else: expr = self.query.annotations[col] if isinstance(expr, Value): # output_field must be resolved for constants. expr = Cast(expr, expr.output_field) yield OrderBy(expr, descending=descending), False continue if '.' in field: # This came in through an extra(order_by=...) addition. Pass it # on verbatim. table, col = col.split('.', 1) yield ( OrderBy( RawSQL('%s.%s' % (self.quote_name_unless_alias(table), col), []), descending=descending, ), False, ) continue if self.query.extra and col in self.query.extra: if col in self.query.extra_select: yield ( OrderBy(Ref(col, RawSQL(*self.query.extra[col])), descending=descending), True, ) else: yield ( OrderBy(RawSQL(*self.query.extra[col]), descending=descending), False, ) else: if self.query.combinator and self.select: # Don't use the first model's field because other # combinated queries might define it differently. yield OrderBy(F(col), descending=descending), False else: # 'col' is of the form 'field' or 'field1__field2' or # '-field1__field2__field', etc. yield from self.find_ordering_name( field, self.query.get_meta(), default_order=default_order, ) def get_order_by(self): """ Return a list of 2-tuples of the form (expr, (sql, params, is_ref)) for the ORDER BY clause. The order_by clause can alter the select clause (for example it can add aliases to clauses that do not yet have one, or it can add totally new select clauses). """ result = [] seen = set() for expr, is_ref in self._order_by_pairs(): resolved = expr.resolve_expression(self.query, allow_joins=True, reuse=None) if self.query.combinator and self.select: src = resolved.get_source_expressions()[0] expr_src = expr.get_source_expressions()[0] # Relabel order by columns to raw numbers if this is a combined # query; necessary since the columns can't be referenced by the # fully qualified name and the simple column names may collide. for idx, (sel_expr, _, col_alias) in enumerate(self.select): if is_ref and col_alias == src.refs: src = src.source elif col_alias and not ( isinstance(expr_src, F) and col_alias == expr_src.name ): continue if src == sel_expr: resolved.set_source_expressions([RawSQL('%d' % (idx + 1), ())]) break else: if col_alias: raise DatabaseError('ORDER BY term does not match any column in the result set.') # Add column used in ORDER BY clause to the selected # columns and to each combined query. order_by_idx = len(self.query.select) + 1 col_name = f'__orderbycol{order_by_idx}' for q in self.query.combined_queries: q.add_annotation(expr_src, col_name) self.query.add_select_col(resolved, col_name) resolved.set_source_expressions([RawSQL(f'{order_by_idx}', ())]) sql, params = self.compile(resolved) # Don't add the same column twice, but the order direction is # not taken into account so we strip it. When this entire method # is refactored into expressions, then we can check each part as we # generate it. without_ordering = self.ordering_parts.search(sql)[1] params_hash = make_hashable(params) if (without_ordering, params_hash) in seen: continue seen.add((without_ordering, params_hash)) result.append((resolved, (sql, params, is_ref))) return result def get_extra_select(self, order_by, select): extra_select = [] if self.query.distinct and not self.query.distinct_fields: select_sql = [t[1] for t in select] for expr, (sql, params, is_ref) in order_by: without_ordering = self.ordering_parts.search(sql)[1] if not is_ref and (without_ordering, params) not in select_sql: extra_select.append((expr, (without_ordering, params), None)) return extra_select def quote_name_unless_alias(self, name): """ A wrapper around connection.ops.quote_name that doesn't quote aliases for table names. This avoids problems with some SQL dialects that treat quoted strings specially (e.g. PostgreSQL). """ if name in self.quote_cache: return self.quote_cache[name] if ((name in self.query.alias_map and name not in self.query.table_map) or name in self.query.extra_select or ( self.query.external_aliases.get(name) and name not in self.query.table_map)): self.quote_cache[name] = name return name r = self.connection.ops.quote_name(name) self.quote_cache[name] = r return r def compile(self, node): vendor_impl = getattr(node, 'as_' + self.connection.vendor, None) if vendor_impl: sql, params = vendor_impl(self, self.connection) else: sql, params = node.as_sql(self, self.connection) return sql, params def get_combinator_sql(self, combinator, all): features = self.connection.features compilers = [ query.get_compiler(self.using, self.connection) for query in self.query.combined_queries if not query.is_empty() ] if not features.supports_slicing_ordering_in_compound: for query, compiler in zip(self.query.combined_queries, compilers): if query.low_mark or query.high_mark: raise DatabaseError('LIMIT/OFFSET not allowed in subqueries of compound statements.') if compiler.get_order_by(): raise DatabaseError('ORDER BY not allowed in subqueries of compound statements.') parts = () for compiler in compilers: try: # If the columns list is limited, then all combined queries # must have the same columns list. Set the selects defined on # the query on all combined queries, if not already set. if not compiler.query.values_select and self.query.values_select: compiler.query = compiler.query.clone() compiler.query.set_values(( *self.query.extra_select, *self.query.values_select, *self.query.annotation_select, )) part_sql, part_args = compiler.as_sql() if compiler.query.combinator: # Wrap in a subquery if wrapping in parentheses isn't # supported. if not features.supports_parentheses_in_compound: part_sql = 'SELECT * FROM ({})'.format(part_sql) # Add parentheses when combining with compound query if not # already added for all compound queries. elif not features.supports_slicing_ordering_in_compound: part_sql = '({})'.format(part_sql) parts += ((part_sql, part_args),) except EmptyResultSet: # Omit the empty queryset with UNION and with DIFFERENCE if the # first queryset is nonempty. if combinator == 'union' or (combinator == 'difference' and parts): continue raise if not parts: raise EmptyResultSet combinator_sql = self.connection.ops.set_operators[combinator] if all and combinator == 'union': combinator_sql += ' ALL' braces = '({})' if features.supports_slicing_ordering_in_compound else '{}' sql_parts, args_parts = zip(*((braces.format(sql), args) for sql, args in parts)) result = [' {} '.format(combinator_sql).join(sql_parts)] params = [] for part in args_parts: params.extend(part) return result, params def as_sql(self, with_limits=True, with_col_aliases=False): """ Create the SQL for this query. Return the SQL string and list of parameters. If 'with_limits' is False, any limit/offset information is not included in the query. """ refcounts_before = self.query.alias_refcount.copy() try: extra_select, order_by, group_by = self.pre_sql_setup() for_update_part = None # Is a LIMIT/OFFSET clause needed? with_limit_offset = with_limits and (self.query.high_mark is not None or self.query.low_mark) combinator = self.query.combinator features = self.connection.features if combinator: if not getattr(features, 'supports_select_{}'.format(combinator)): raise NotSupportedError('{} is not supported on this database backend.'.format(combinator)) result, params = self.get_combinator_sql(combinator, self.query.combinator_all) else: distinct_fields, distinct_params = self.get_distinct() # This must come after 'select', 'ordering', and 'distinct' # (see docstring of get_from_clause() for details). from_, f_params = self.get_from_clause() where, w_params = self.compile(self.where) if self.where is not None else ("", []) having, h_params = self.compile(self.having) if self.having is not None else ("", []) result = ['SELECT'] params = [] if self.query.distinct: distinct_result, distinct_params = self.connection.ops.distinct_sql( distinct_fields, distinct_params, ) result += distinct_result params += distinct_params out_cols = [] col_idx = 1 for _, (s_sql, s_params), alias in self.select + extra_select: if alias: s_sql = '%s AS %s' % (s_sql, self.connection.ops.quote_name(alias)) elif with_col_aliases: s_sql = '%s AS %s' % ( s_sql, self.connection.ops.quote_name('col%d' % col_idx), ) col_idx += 1 params.extend(s_params) out_cols.append(s_sql) result += [', '.join(out_cols), 'FROM', *from_] params.extend(f_params) if self.query.select_for_update and self.connection.features.has_select_for_update: if self.connection.get_autocommit(): raise TransactionManagementError('select_for_update cannot be used outside of a transaction.') if with_limit_offset and not self.connection.features.supports_select_for_update_with_limit: raise NotSupportedError( 'LIMIT/OFFSET is not supported with ' 'select_for_update on this database backend.' ) nowait = self.query.select_for_update_nowait skip_locked = self.query.select_for_update_skip_locked of = self.query.select_for_update_of no_key = self.query.select_for_no_key_update # If it's a NOWAIT/SKIP LOCKED/OF/NO KEY query but the # backend doesn't support it, raise NotSupportedError to # prevent a possible deadlock. if nowait and not self.connection.features.has_select_for_update_nowait: raise NotSupportedError('NOWAIT is not supported on this database backend.') elif skip_locked and not self.connection.features.has_select_for_update_skip_locked: raise NotSupportedError('SKIP LOCKED is not supported on this database backend.') elif of and not self.connection.features.has_select_for_update_of: raise NotSupportedError('FOR UPDATE OF is not supported on this database backend.') elif no_key and not self.connection.features.has_select_for_no_key_update: raise NotSupportedError( 'FOR NO KEY UPDATE is not supported on this ' 'database backend.' ) for_update_part = self.connection.ops.for_update_sql( nowait=nowait, skip_locked=skip_locked, of=self.get_select_for_update_of_arguments(), no_key=no_key, ) if for_update_part and self.connection.features.for_update_after_from: result.append(for_update_part) if where: result.append('WHERE %s' % where) params.extend(w_params) grouping = [] for g_sql, g_params in group_by: grouping.append(g_sql) params.extend(g_params) if grouping: if distinct_fields: raise NotImplementedError('annotate() + distinct(fields) is not implemented.') order_by = order_by or self.connection.ops.force_no_ordering() result.append('GROUP BY %s' % ', '.join(grouping)) if self._meta_ordering: order_by = None if having: result.append('HAVING %s' % having) params.extend(h_params) if self.query.explain_query: result.insert(0, self.connection.ops.explain_query_prefix( self.query.explain_format, **self.query.explain_options )) if order_by: ordering = [] for _, (o_sql, o_params, _) in order_by: ordering.append(o_sql) params.extend(o_params) result.append('ORDER BY %s' % ', '.join(ordering)) if with_limit_offset: result.append(self.connection.ops.limit_offset_sql(self.query.low_mark, self.query.high_mark)) if for_update_part and not self.connection.features.for_update_after_from: result.append(for_update_part) if self.query.subquery and extra_select: # If the query is used as a subquery, the extra selects would # result in more columns than the left-hand side expression is # expecting. This can happen when a subquery uses a combination # of order_by() and distinct(), forcing the ordering expressions # to be selected as well. Wrap the query in another subquery # to exclude extraneous selects. sub_selects = [] sub_params = [] for index, (select, _, alias) in enumerate(self.select, start=1): if not alias and with_col_aliases: alias = 'col%d' % index if alias: sub_selects.append("%s.%s" % ( self.connection.ops.quote_name('subquery'), self.connection.ops.quote_name(alias), )) else: select_clone = select.relabeled_clone({select.alias: 'subquery'}) subselect, subparams = select_clone.as_sql(self, self.connection) sub_selects.append(subselect) sub_params.extend(subparams) return 'SELECT %s FROM (%s) subquery' % ( ', '.join(sub_selects), ' '.join(result), ), tuple(sub_params + params) return ' '.join(result), tuple(params) finally: # Finally do cleanup - get rid of the joins we created above. self.query.reset_refcounts(refcounts_before) def get_default_columns(self, start_alias=None, opts=None, from_parent=None): """ Compute the default columns for selecting every field in the base model. Will sometimes be called to pull in related models (e.g. via select_related), in which case "opts" and "start_alias" will be given to provide a starting point for the traversal. Return a list of strings, quoted appropriately for use in SQL directly, as well as a set of aliases used in the select statement (if 'as_pairs' is True, return a list of (alias, col_name) pairs instead of strings as the first component and None as the second component). """ result = [] if opts is None: opts = self.query.get_meta() only_load = self.deferred_to_columns() start_alias = start_alias or self.query.get_initial_alias() # The 'seen_models' is used to optimize checking the needed parent # alias for a given field. This also includes None -> start_alias to # be used by local fields. seen_models = {None: start_alias} for field in opts.concrete_fields: model = field.model._meta.concrete_model # A proxy model will have a different model and concrete_model. We # will assign None if the field belongs to this model. if model == opts.model: model = None if from_parent and model is not None and issubclass( from_parent._meta.concrete_model, model._meta.concrete_model): # Avoid loading data for already loaded parents. # We end up here in the case select_related() resolution # proceeds from parent model to child model. In that case the # parent model data is already present in the SELECT clause, # and we want to avoid reloading the same data again. continue if field.model in only_load and field.attname not in only_load[field.model]: continue alias = self.query.join_parent_model(opts, model, start_alias, seen_models) column = field.get_col(alias) result.append(column) return result def get_distinct(self): """ Return a quoted list of fields to use in DISTINCT ON part of the query. This method can alter the tables in the query, and thus it must be called before get_from_clause(). """ result = [] params = [] opts = self.query.get_meta() for name in self.query.distinct_fields: parts = name.split(LOOKUP_SEP) _, targets, alias, joins, path, _, transform_function = self._setup_joins(parts, opts, None) targets, alias, _ = self.query.trim_joins(targets, joins, path) for target in targets: if name in self.query.annotation_select: result.append(name) else: r, p = self.compile(transform_function(target, alias)) result.append(r) params.append(p) return result, params def find_ordering_name(self, name, opts, alias=None, default_order='ASC', already_seen=None): """ Return the table alias (the name might be ambiguous, the alias will not be) and column name for ordering by the given 'name' parameter. The 'name' is of the form 'field1__field2__...__fieldN'. """ name, order = get_order_dir(name, default_order) descending = order == 'DESC' pieces = name.split(LOOKUP_SEP) field, targets, alias, joins, path, opts, transform_function = self._setup_joins(pieces, opts, alias) # If we get to this point and the field is a relation to another model, # append the default ordering for that model unless it is the pk # shortcut or the attribute name of the field that is specified. if ( field.is_relation and opts.ordering and getattr(field, 'attname', None) != pieces[-1] and name != 'pk' ): # Firstly, avoid infinite loops. already_seen = already_seen or set() join_tuple = tuple(getattr(self.query.alias_map[j], 'join_cols', None) for j in joins) if join_tuple in already_seen: raise FieldError('Infinite loop caused by ordering.') already_seen.add(join_tuple) results = [] for item in opts.ordering: if hasattr(item, 'resolve_expression') and not isinstance(item, OrderBy): item = item.desc() if descending else item.asc() if isinstance(item, OrderBy): results.append((item, False)) continue results.extend(self.find_ordering_name(item, opts, alias, order, already_seen)) return results targets, alias, _ = self.query.trim_joins(targets, joins, path) return [(OrderBy(transform_function(t, alias), descending=descending), False) for t in targets] def _setup_joins(self, pieces, opts, alias): """ Helper method for get_order_by() and get_distinct(). get_ordering() and get_distinct() must produce same target columns on same input, as the prefixes of get_ordering() and get_distinct() must match. Executing SQL where this is not true is an error. """ alias = alias or self.query.get_initial_alias() field, targets, opts, joins, path, transform_function = self.query.setup_joins(pieces, opts, alias) alias = joins[-1] return field, targets, alias, joins, path, opts, transform_function def get_from_clause(self): """ Return a list of strings that are joined together to go after the "FROM" part of the query, as well as a list any extra parameters that need to be included. Subclasses, can override this to create a from-clause via a "select". This should only be called after any SQL construction methods that might change the tables that are needed. This means the select columns, ordering, and distinct must be done first. """ result = [] params = [] for alias in tuple(self.query.alias_map): if not self.query.alias_refcount[alias]: continue try: from_clause = self.query.alias_map[alias] except KeyError: # Extra tables can end up in self.tables, but not in the # alias_map if they aren't in a join. That's OK. We skip them. continue clause_sql, clause_params = self.compile(from_clause) result.append(clause_sql) params.extend(clause_params) for t in self.query.extra_tables: alias, _ = self.query.table_alias(t) # Only add the alias if it's not already present (the table_alias() # call increments the refcount, so an alias refcount of one means # this is the only reference). if alias not in self.query.alias_map or self.query.alias_refcount[alias] == 1: result.append(', %s' % self.quote_name_unless_alias(alias)) return result, params def get_related_selections(self, select, opts=None, root_alias=None, cur_depth=1, requested=None, restricted=None): """ Fill in the information needed for a select_related query. The current depth is measured as the number of connections away from the root model (for example, cur_depth=1 means we are looking at models with direct connections to the root model). """ def _get_field_choices(): direct_choices = (f.name for f in opts.fields if f.is_relation) reverse_choices = ( f.field.related_query_name() for f in opts.related_objects if f.field.unique ) return chain(direct_choices, reverse_choices, self.query._filtered_relations) related_klass_infos = [] if not restricted and cur_depth > self.query.max_depth: # We've recursed far enough; bail out. return related_klass_infos if not opts: opts = self.query.get_meta() root_alias = self.query.get_initial_alias() only_load = self.query.get_loaded_field_names() # Setup for the case when only particular related fields should be # included in the related selection. fields_found = set() if requested is None: restricted = isinstance(self.query.select_related, dict) if restricted: requested = self.query.select_related def get_related_klass_infos(klass_info, related_klass_infos): klass_info['related_klass_infos'] = related_klass_infos for f in opts.fields: field_model = f.model._meta.concrete_model fields_found.add(f.name) if restricted: next = requested.get(f.name, {}) if not f.is_relation: # If a non-related field is used like a relation, # or if a single non-relational field is given. if next or f.name in requested: raise FieldError( "Non-relational field given in select_related: '%s'. " "Choices are: %s" % ( f.name, ", ".join(_get_field_choices()) or '(none)', ) ) else: next = False if not select_related_descend(f, restricted, requested, only_load.get(field_model)): continue klass_info = { 'model': f.remote_field.model, 'field': f, 'reverse': False, 'local_setter': f.set_cached_value, 'remote_setter': f.remote_field.set_cached_value if f.unique else lambda x, y: None, 'from_parent': False, } related_klass_infos.append(klass_info) select_fields = [] _, _, _, joins, _, _ = self.query.setup_joins( [f.name], opts, root_alias) alias = joins[-1] columns = self.get_default_columns(start_alias=alias, opts=f.remote_field.model._meta) for col in columns: select_fields.append(len(select)) select.append((col, None)) klass_info['select_fields'] = select_fields next_klass_infos = self.get_related_selections( select, f.remote_field.model._meta, alias, cur_depth + 1, next, restricted) get_related_klass_infos(klass_info, next_klass_infos) if restricted: related_fields = [ (o.field, o.related_model) for o in opts.related_objects if o.field.unique and not o.many_to_many ] for f, model in related_fields: if not select_related_descend(f, restricted, requested, only_load.get(model), reverse=True): continue related_field_name = f.related_query_name() fields_found.add(related_field_name) join_info = self.query.setup_joins([related_field_name], opts, root_alias) alias = join_info.joins[-1] from_parent = issubclass(model, opts.model) and model is not opts.model klass_info = { 'model': model, 'field': f, 'reverse': True, 'local_setter': f.remote_field.set_cached_value, 'remote_setter': f.set_cached_value, 'from_parent': from_parent, } related_klass_infos.append(klass_info) select_fields = [] columns = self.get_default_columns( start_alias=alias, opts=model._meta, from_parent=opts.model) for col in columns: select_fields.append(len(select)) select.append((col, None)) klass_info['select_fields'] = select_fields next = requested.get(f.related_query_name(), {}) next_klass_infos = self.get_related_selections( select, model._meta, alias, cur_depth + 1, next, restricted) get_related_klass_infos(klass_info, next_klass_infos) def local_setter(obj, from_obj): # Set a reverse fk object when relation is non-empty. if from_obj: f.remote_field.set_cached_value(from_obj, obj) def remote_setter(name, obj, from_obj): setattr(from_obj, name, obj) for name in list(requested): # Filtered relations work only on the topmost level. if cur_depth > 1: break if name in self.query._filtered_relations: fields_found.add(name) f, _, join_opts, joins, _, _ = self.query.setup_joins([name], opts, root_alias) model = join_opts.model alias = joins[-1] from_parent = issubclass(model, opts.model) and model is not opts.model klass_info = { 'model': model, 'field': f, 'reverse': True, 'local_setter': local_setter, 'remote_setter': partial(remote_setter, name), 'from_parent': from_parent, } related_klass_infos.append(klass_info) select_fields = [] columns = self.get_default_columns( start_alias=alias, opts=model._meta, from_parent=opts.model, ) for col in columns: select_fields.append(len(select)) select.append((col, None)) klass_info['select_fields'] = select_fields next_requested = requested.get(name, {}) next_klass_infos = self.get_related_selections( select, opts=model._meta, root_alias=alias, cur_depth=cur_depth + 1, requested=next_requested, restricted=restricted, ) get_related_klass_infos(klass_info, next_klass_infos) fields_not_found = set(requested).difference(fields_found) if fields_not_found: invalid_fields = ("'%s'" % s for s in fields_not_found) raise FieldError( 'Invalid field name(s) given in select_related: %s. ' 'Choices are: %s' % ( ', '.join(invalid_fields), ', '.join(_get_field_choices()) or '(none)', ) ) return related_klass_infos def get_select_for_update_of_arguments(self): """ Return a quoted list of arguments for the SELECT FOR UPDATE OF part of the query. """ def _get_parent_klass_info(klass_info): concrete_model = klass_info['model']._meta.concrete_model for parent_model, parent_link in concrete_model._meta.parents.items(): parent_list = parent_model._meta.get_parent_list() yield { 'model': parent_model, 'field': parent_link, 'reverse': False, 'select_fields': [ select_index for select_index in klass_info['select_fields'] # Selected columns from a model or its parents. if ( self.select[select_index][0].target.model == parent_model or self.select[select_index][0].target.model in parent_list ) ], } def _get_first_selected_col_from_model(klass_info): """ Find the first selected column from a model. If it doesn't exist, don't lock a model. select_fields is filled recursively, so it also contains fields from the parent models. """ concrete_model = klass_info['model']._meta.concrete_model for select_index in klass_info['select_fields']: if self.select[select_index][0].target.model == concrete_model: return self.select[select_index][0] def _get_field_choices(): """Yield all allowed field paths in breadth-first search order.""" queue = collections.deque([(None, self.klass_info)]) while queue: parent_path, klass_info = queue.popleft() if parent_path is None: path = [] yield 'self' else: field = klass_info['field'] if klass_info['reverse']: field = field.remote_field path = parent_path + [field.name] yield LOOKUP_SEP.join(path) queue.extend( (path, klass_info) for klass_info in _get_parent_klass_info(klass_info) ) queue.extend( (path, klass_info) for klass_info in klass_info.get('related_klass_infos', []) ) result = [] invalid_names = [] for name in self.query.select_for_update_of: klass_info = self.klass_info if name == 'self': col = _get_first_selected_col_from_model(klass_info) else: for part in name.split(LOOKUP_SEP): klass_infos = ( *klass_info.get('related_klass_infos', []), *_get_parent_klass_info(klass_info), ) for related_klass_info in klass_infos: field = related_klass_info['field'] if related_klass_info['reverse']: field = field.remote_field if field.name == part: klass_info = related_klass_info break else: klass_info = None break if klass_info is None: invalid_names.append(name) continue col = _get_first_selected_col_from_model(klass_info) if col is not None: if self.connection.features.select_for_update_of_column: result.append(self.compile(col)[0]) else: result.append(self.quote_name_unless_alias(col.alias)) if invalid_names: raise FieldError( 'Invalid field name(s) given in select_for_update(of=(...)): %s. ' 'Only relational fields followed in the query are allowed. ' 'Choices are: %s.' % ( ', '.join(invalid_names), ', '.join(_get_field_choices()), ) ) return result def deferred_to_columns(self): """ Convert the self.deferred_loading data structure to mapping of table names to sets of column names which are to be loaded. Return the dictionary. """ columns = {} self.query.deferred_to_data(columns, self.query.get_loaded_field_names_cb) return columns def get_converters(self, expressions): converters = {} for i, expression in enumerate(expressions): if expression: backend_converters = self.connection.ops.get_db_converters(expression) field_converters = expression.get_db_converters(self.connection) if backend_converters or field_converters: converters[i] = (backend_converters + field_converters, expression) return converters def apply_converters(self, rows, converters): connection = self.connection converters = list(converters.items()) for row in map(list, rows): for pos, (convs, expression) in converters: value = row[pos] for converter in convs: value = converter(value, expression, connection) row[pos] = value yield row def results_iter(self, results=None, tuple_expected=False, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE): """Return an iterator over the results from executing this query.""" if results is None: results = self.execute_sql(MULTI, chunked_fetch=chunked_fetch, chunk_size=chunk_size) fields = [s[0] for s in self.select[0:self.col_count]] converters = self.get_converters(fields) rows = chain.from_iterable(results) if converters: rows = self.apply_converters(rows, converters) if tuple_expected: rows = map(tuple, rows) return rows def has_results(self): """ Backends (e.g. NoSQL) can override this in order to use optimized versions of "query has any results." """ return bool(self.execute_sql(SINGLE)) def execute_sql(self, result_type=MULTI, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE): """ Run the query against the database and return the result(s). The return value is a single data item if result_type is SINGLE, or an iterator over the results if the result_type is MULTI. result_type is either MULTI (use fetchmany() to retrieve all rows), SINGLE (only retrieve a single row), or None. In this last case, the cursor is returned if any query is executed, since it's used by subclasses such as InsertQuery). It's possible, however, that no query is needed, as the filters describe an empty set. In that case, None is returned, to avoid any unnecessary database interaction. """ result_type = result_type or NO_RESULTS try: sql, params = self.as_sql() if not sql: raise EmptyResultSet except EmptyResultSet: if result_type == MULTI: return iter([]) else: return if chunked_fetch: cursor = self.connection.chunked_cursor() else: cursor = self.connection.cursor() try: cursor.execute(sql, params) except Exception: # Might fail for server-side cursors (e.g. connection closed) cursor.close() raise if result_type == CURSOR: # Give the caller the cursor to process and close. return cursor if result_type == SINGLE: try: val = cursor.fetchone() if val: return val[0:self.col_count] return val finally: # done with the cursor cursor.close() if result_type == NO_RESULTS: cursor.close() return result = cursor_iter( cursor, self.connection.features.empty_fetchmany_value, self.col_count if self.has_extra_select else None, chunk_size, ) if not chunked_fetch or not self.connection.features.can_use_chunked_reads: try: # If we are using non-chunked reads, we return the same data # structure as normally, but ensure it is all read into memory # before going any further. Use chunked_fetch if requested, # unless the database doesn't support it. return list(result) finally: # done with the cursor cursor.close() return result def as_subquery_condition(self, alias, columns, compiler): qn = compiler.quote_name_unless_alias qn2 = self.connection.ops.quote_name for index, select_col in enumerate(self.query.select): lhs_sql, lhs_params = self.compile(select_col) rhs = '%s.%s' % (qn(alias), qn2(columns[index])) self.query.where.add( RawSQL('%s = %s' % (lhs_sql, rhs), lhs_params), 'AND') sql, params = self.as_sql() return 'EXISTS (%s)' % sql, params def explain_query(self): result = list(self.execute_sql()) # Some backends return 1 item tuples with strings, and others return # tuples with integers and strings. Flatten them out into strings. for row in result[0]: if not isinstance(row, str): yield ' '.join(str(c) for c in row) else: yield row class SQLInsertCompiler(SQLCompiler): returning_fields = None returning_params = tuple() def field_as_sql(self, field, val): """ Take a field and a value intended to be saved on that field, and return placeholder SQL and accompanying params. Check for raw values, expressions, and fields with get_placeholder() defined in that order. When field is None, consider the value raw and use it as the placeholder, with no corresponding parameters returned. """ if field is None: # A field value of None means the value is raw. sql, params = val, [] elif hasattr(val, 'as_sql'): # This is an expression, let's compile it. sql, params = self.compile(val) elif hasattr(field, 'get_placeholder'): # Some fields (e.g. geo fields) need special munging before # they can be inserted. sql, params = field.get_placeholder(val, self, self.connection), [val] else: # Return the common case for the placeholder sql, params = '%s', [val] # The following hook is only used by Oracle Spatial, which sometimes # needs to yield 'NULL' and [] as its placeholder and params instead # of '%s' and [None]. The 'NULL' placeholder is produced earlier by # OracleOperations.get_geom_placeholder(). The following line removes # the corresponding None parameter. See ticket #10888. params = self.connection.ops.modify_insert_params(sql, params) return sql, params def prepare_value(self, field, value): """ Prepare a value to be used in a query by resolving it if it is an expression and otherwise calling the field's get_db_prep_save(). """ if hasattr(value, 'resolve_expression'): value = value.resolve_expression(self.query, allow_joins=False, for_save=True) # Don't allow values containing Col expressions. They refer to # existing columns on a row, but in the case of insert the row # doesn't exist yet. if value.contains_column_references: raise ValueError( 'Failed to insert expression "%s" on %s. F() expressions ' 'can only be used to update, not to insert.' % (value, field) ) if value.contains_aggregate: raise FieldError( 'Aggregate functions are not allowed in this query ' '(%s=%r).' % (field.name, value) ) if value.contains_over_clause: raise FieldError( 'Window expressions are not allowed in this query (%s=%r).' % (field.name, value) ) else: value = field.get_db_prep_save(value, connection=self.connection) return value def pre_save_val(self, field, obj): """ Get the given field's value off the given obj. pre_save() is used for things like auto_now on DateTimeField. Skip it if this is a raw query. """ if self.query.raw: return getattr(obj, field.attname) return field.pre_save(obj, add=True) def assemble_as_sql(self, fields, value_rows): """ Take a sequence of N fields and a sequence of M rows of values, and generate placeholder SQL and parameters for each field and value. Return a pair containing: * a sequence of M rows of N SQL placeholder strings, and * a sequence of M rows of corresponding parameter values. Each placeholder string may contain any number of '%s' interpolation strings, and each parameter row will contain exactly as many params as the total number of '%s's in the corresponding placeholder row. """ if not value_rows: return [], [] # list of (sql, [params]) tuples for each object to be saved # Shape: [n_objs][n_fields][2] rows_of_fields_as_sql = ( (self.field_as_sql(field, v) for field, v in zip(fields, row)) for row in value_rows ) # tuple like ([sqls], [[params]s]) for each object to be saved # Shape: [n_objs][2][n_fields] sql_and_param_pair_rows = (zip(*row) for row in rows_of_fields_as_sql) # Extract separate lists for placeholders and params. # Each of these has shape [n_objs][n_fields] placeholder_rows, param_rows = zip(*sql_and_param_pair_rows) # Params for each field are still lists, and need to be flattened. param_rows = [[p for ps in row for p in ps] for row in param_rows] return placeholder_rows, param_rows def as_sql(self): # We don't need quote_name_unless_alias() here, since these are all # going to be column names (so we can avoid the extra overhead). qn = self.connection.ops.quote_name opts = self.query.get_meta() insert_statement = self.connection.ops.insert_statement(ignore_conflicts=self.query.ignore_conflicts) result = ['%s %s' % (insert_statement, qn(opts.db_table))] fields = self.query.fields or [opts.pk] result.append('(%s)' % ', '.join(qn(f.column) for f in fields)) if self.query.fields: value_rows = [ [self.prepare_value(field, self.pre_save_val(field, obj)) for field in fields] for obj in self.query.objs ] else: # An empty object. value_rows = [[self.connection.ops.pk_default_value()] for _ in self.query.objs] fields = [None] # Currently the backends just accept values when generating bulk # queries and generate their own placeholders. Doing that isn't # necessary and it should be possible to use placeholders and # expressions in bulk inserts too. can_bulk = (not self.returning_fields and self.connection.features.has_bulk_insert) placeholder_rows, param_rows = self.assemble_as_sql(fields, value_rows) ignore_conflicts_suffix_sql = self.connection.ops.ignore_conflicts_suffix_sql( ignore_conflicts=self.query.ignore_conflicts ) if self.returning_fields and self.connection.features.can_return_columns_from_insert: if self.connection.features.can_return_rows_from_bulk_insert: result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows)) params = param_rows else: result.append("VALUES (%s)" % ", ".join(placeholder_rows[0])) params = [param_rows[0]] if ignore_conflicts_suffix_sql: result.append(ignore_conflicts_suffix_sql) # Skip empty r_sql to allow subclasses to customize behavior for # 3rd party backends. Refs #19096. r_sql, self.returning_params = self.connection.ops.return_insert_columns(self.returning_fields) if r_sql: result.append(r_sql) params += [self.returning_params] return [(" ".join(result), tuple(chain.from_iterable(params)))] if can_bulk: result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows)) if ignore_conflicts_suffix_sql: result.append(ignore_conflicts_suffix_sql) return [(" ".join(result), tuple(p for ps in param_rows for p in ps))] else: if ignore_conflicts_suffix_sql: result.append(ignore_conflicts_suffix_sql) return [ (" ".join(result + ["VALUES (%s)" % ", ".join(p)]), vals) for p, vals in zip(placeholder_rows, param_rows) ] def execute_sql(self, returning_fields=None): assert not ( returning_fields and len(self.query.objs) != 1 and not self.connection.features.can_return_rows_from_bulk_insert ) opts = self.query.get_meta() self.returning_fields = returning_fields with self.connection.cursor() as cursor: for sql, params in self.as_sql(): cursor.execute(sql, params) if not self.returning_fields: return [] if self.connection.features.can_return_rows_from_bulk_insert and len(self.query.objs) > 1: rows = self.connection.ops.fetch_returned_insert_rows(cursor) elif self.connection.features.can_return_columns_from_insert: assert len(self.query.objs) == 1 rows = [self.connection.ops.fetch_returned_insert_columns( cursor, self.returning_params, )] else: rows = [(self.connection.ops.last_insert_id( cursor, opts.db_table, opts.pk.column, ),)] cols = [field.get_col(opts.db_table) for field in self.returning_fields] converters = self.get_converters(cols) if converters: rows = list(self.apply_converters(rows, converters)) return rows class SQLDeleteCompiler(SQLCompiler): @cached_property def single_alias(self): # Ensure base table is in aliases. self.query.get_initial_alias() return sum(self.query.alias_refcount[t] > 0 for t in self.query.alias_map) == 1 @classmethod def _expr_refs_base_model(cls, expr, base_model): if isinstance(expr, Query): return expr.model == base_model if not hasattr(expr, 'get_source_expressions'): return False return any( cls._expr_refs_base_model(source_expr, base_model) for source_expr in expr.get_source_expressions() ) @cached_property def contains_self_reference_subquery(self): return any( self._expr_refs_base_model(expr, self.query.model) for expr in chain(self.query.annotations.values(), self.query.where.children) ) def _as_sql(self, query): result = [ 'DELETE FROM %s' % self.quote_name_unless_alias(query.base_table) ] where, params = self.compile(query.where) if where: result.append('WHERE %s' % where) return ' '.join(result), tuple(params) def as_sql(self): """ Create the SQL for this query. Return the SQL string and list of parameters. """ if self.single_alias and not self.contains_self_reference_subquery: return self._as_sql(self.query) innerq = self.query.clone() innerq.__class__ = Query innerq.clear_select_clause() pk = self.query.model._meta.pk innerq.select = [ pk.get_col(self.query.get_initial_alias()) ] outerq = Query(self.query.model) outerq.where = self.query.where_class() if not self.connection.features.update_can_self_select: # Force the materialization of the inner query to allow reference # to the target table on MySQL. sql, params = innerq.get_compiler(connection=self.connection).as_sql() innerq = RawSQL('SELECT * FROM (%s) subquery' % sql, params) outerq.add_q(Q(pk__in=innerq)) return self._as_sql(outerq) class SQLUpdateCompiler(SQLCompiler): def as_sql(self): """ Create the SQL for this query. Return the SQL string and list of parameters. """ self.pre_sql_setup() if not self.query.values: return '', () qn = self.quote_name_unless_alias values, update_params = [], [] for field, model, val in self.query.values: if hasattr(val, 'resolve_expression'): val = val.resolve_expression(self.query, allow_joins=False, for_save=True) if val.contains_aggregate: raise FieldError( 'Aggregate functions are not allowed in this query ' '(%s=%r).' % (field.name, val) ) if val.contains_over_clause: raise FieldError( 'Window expressions are not allowed in this query ' '(%s=%r).' % (field.name, val) ) elif hasattr(val, 'prepare_database_save'): if field.remote_field: val = field.get_db_prep_save( val.prepare_database_save(field), connection=self.connection, ) else: raise TypeError( "Tried to update field %s with a model instance, %r. " "Use a value compatible with %s." % (field, val, field.__class__.__name__) ) else: val = field.get_db_prep_save(val, connection=self.connection) # Getting the placeholder for the field. if hasattr(field, 'get_placeholder'): placeholder = field.get_placeholder(val, self, self.connection) else: placeholder = '%s' name = field.column if hasattr(val, 'as_sql'): sql, params = self.compile(val) values.append('%s = %s' % (qn(name), placeholder % sql)) update_params.extend(params) elif val is not None: values.append('%s = %s' % (qn(name), placeholder)) update_params.append(val) else: values.append('%s = NULL' % qn(name)) table = self.query.base_table result = [ 'UPDATE %s SET' % qn(table), ', '.join(values), ] where, params = self.compile(self.query.where) if where: result.append('WHERE %s' % where) return ' '.join(result), tuple(update_params + params) def execute_sql(self, result_type): """ Execute the specified update. Return the number of rows affected by the primary update query. The "primary update query" is the first non-empty query that is executed. Row counts for any subsequent, related queries are not available. """ cursor = super().execute_sql(result_type) try: rows = cursor.rowcount if cursor else 0 is_empty = cursor is None finally: if cursor: cursor.close() for query in self.query.get_related_updates(): aux_rows = query.get_compiler(self.using).execute_sql(result_type) if is_empty and aux_rows: rows = aux_rows is_empty = False return rows def pre_sql_setup(self): """ If the update depends on results from other tables, munge the "where" conditions to match the format required for (portable) SQL updates. If multiple updates are required, pull out the id values to update at this point so that they don't change as a result of the progressive updates. """ refcounts_before = self.query.alias_refcount.copy() # Ensure base table is in the query self.query.get_initial_alias() count = self.query.count_active_tables() if not self.query.related_updates and count == 1: return query = self.query.chain(klass=Query) query.select_related = False query.clear_ordering(True) query.extra = {} query.select = [] query.add_fields([query.get_meta().pk.name]) super().pre_sql_setup() must_pre_select = count > 1 and not self.connection.features.update_can_self_select # Now we adjust the current query: reset the where clause and get rid # of all the tables we don't need (since they're in the sub-select). self.query.where = self.query.where_class() if self.query.related_updates or must_pre_select: # Either we're using the idents in multiple update queries (so # don't want them to change), or the db backend doesn't support # selecting from the updating table (e.g. MySQL). idents = [] for rows in query.get_compiler(self.using).execute_sql(MULTI): idents.extend(r[0] for r in rows) self.query.add_filter(('pk__in', idents)) self.query.related_ids = idents else: # The fast path. Filters and updates in one query. self.query.add_filter(('pk__in', query)) self.query.reset_refcounts(refcounts_before) class SQLAggregateCompiler(SQLCompiler): def as_sql(self): """ Create the SQL for this query. Return the SQL string and list of parameters. """ sql, params = [], [] for annotation in self.query.annotation_select.values(): ann_sql, ann_params = self.compile(annotation) ann_sql, ann_params = annotation.select_format(self, ann_sql, ann_params) sql.append(ann_sql) params.extend(ann_params) self.col_count = len(self.query.annotation_select) sql = ', '.join(sql) params = tuple(params) inner_query_sql, inner_query_params = self.query.inner_query.get_compiler( self.using ).as_sql(with_col_aliases=True) sql = 'SELECT %s FROM (%s) subquery' % (sql, inner_query_sql) params = params + inner_query_params return sql, params def cursor_iter(cursor, sentinel, col_count, itersize): """ Yield blocks of rows from a cursor and ensure the cursor is closed when done. """ try: for rows in iter((lambda: cursor.fetchmany(itersize)), sentinel): yield rows if col_count is None else [r[:col_count] for r in rows] finally: cursor.close()
6fe99ade5793f82e8070ae32c3bc31cf09b281e11630c81b8656bbebc2ec183f
""" Useful auxiliary data structures for query construction. Not useful outside the SQL domain. """ from django.db.models.sql.constants import INNER, LOUTER class MultiJoin(Exception): """ Used by join construction code to indicate the point at which a multi-valued join was attempted (if the caller wants to treat that exceptionally). """ def __init__(self, names_pos, path_with_names): self.level = names_pos # The path travelled, this includes the path to the multijoin. self.names_with_path = path_with_names class Empty: pass class Join: """ Used by sql.Query and sql.SQLCompiler to generate JOIN clauses into the FROM entry. For example, the SQL generated could be LEFT OUTER JOIN "sometable" T1 ON ("othertable"."sometable_id" = "sometable"."id") This class is primarily used in Query.alias_map. All entries in alias_map must be Join compatible by providing the following attributes and methods: - table_name (string) - table_alias (possible alias for the table, can be None) - join_type (can be None for those entries that aren't joined from anything) - parent_alias (which table is this join's parent, can be None similarly to join_type) - as_sql() - relabeled_clone() """ def __init__(self, table_name, parent_alias, table_alias, join_type, join_field, nullable, filtered_relation=None): # Join table self.table_name = table_name self.parent_alias = parent_alias # Note: table_alias is not necessarily known at instantiation time. self.table_alias = table_alias # LOUTER or INNER self.join_type = join_type # A list of 2-tuples to use in the ON clause of the JOIN. # Each 2-tuple will create one join condition in the ON clause. self.join_cols = join_field.get_joining_columns() # Along which field (or ForeignObjectRel in the reverse join case) self.join_field = join_field # Is this join nullabled? self.nullable = nullable self.filtered_relation = filtered_relation def as_sql(self, compiler, connection): """ Generate the full LEFT OUTER JOIN sometable ON sometable.somecol = othertable.othercol, params clause for this join. """ join_conditions = [] params = [] qn = compiler.quote_name_unless_alias qn2 = connection.ops.quote_name # Add a join condition for each pair of joining columns. for lhs_col, rhs_col in self.join_cols: join_conditions.append('%s.%s = %s.%s' % ( qn(self.parent_alias), qn2(lhs_col), qn(self.table_alias), qn2(rhs_col), )) # Add a single condition inside parentheses for whatever # get_extra_restriction() returns. extra_cond = self.join_field.get_extra_restriction( compiler.query.where_class, self.table_alias, self.parent_alias) if extra_cond: extra_sql, extra_params = compiler.compile(extra_cond) join_conditions.append('(%s)' % extra_sql) params.extend(extra_params) if self.filtered_relation: extra_sql, extra_params = compiler.compile(self.filtered_relation) if extra_sql: join_conditions.append('(%s)' % extra_sql) params.extend(extra_params) if not join_conditions: # This might be a rel on the other end of an actual declared field. declared_field = getattr(self.join_field, 'field', self.join_field) raise ValueError( "Join generated an empty ON clause. %s did not yield either " "joining columns or extra restrictions." % declared_field.__class__ ) on_clause_sql = ' AND '.join(join_conditions) alias_str = '' if self.table_alias == self.table_name else (' %s' % self.table_alias) sql = '%s %s%s ON (%s)' % (self.join_type, qn(self.table_name), alias_str, on_clause_sql) return sql, params def relabeled_clone(self, change_map): new_parent_alias = change_map.get(self.parent_alias, self.parent_alias) new_table_alias = change_map.get(self.table_alias, self.table_alias) if self.filtered_relation is not None: filtered_relation = self.filtered_relation.clone() filtered_relation.path = [change_map.get(p, p) for p in self.filtered_relation.path] else: filtered_relation = None return self.__class__( self.table_name, new_parent_alias, new_table_alias, self.join_type, self.join_field, self.nullable, filtered_relation=filtered_relation, ) @property def identity(self): return ( self.__class__, self.table_name, self.parent_alias, self.join_field, self.filtered_relation, ) def __eq__(self, other): if not isinstance(other, Join): return NotImplemented return self.identity == other.identity def __hash__(self): return hash(self.identity) def equals(self, other): # Ignore filtered_relation in equality check. return self.identity[:-1] == other.identity[:-1] def demote(self): new = self.relabeled_clone({}) new.join_type = INNER return new def promote(self): new = self.relabeled_clone({}) new.join_type = LOUTER return new class BaseTable: """ The BaseTable class is used for base table references in FROM clause. For example, the SQL "foo" in SELECT * FROM "foo" WHERE somecond could be generated by this class. """ join_type = None parent_alias = None filtered_relation = None def __init__(self, table_name, alias): self.table_name = table_name self.table_alias = alias def as_sql(self, compiler, connection): alias_str = '' if self.table_alias == self.table_name else (' %s' % self.table_alias) base_sql = compiler.quote_name_unless_alias(self.table_name) return base_sql + alias_str, [] def relabeled_clone(self, change_map): return self.__class__(self.table_name, change_map.get(self.table_alias, self.table_alias)) @property def identity(self): return self.__class__, self.table_name, self.table_alias def __eq__(self, other): if not isinstance(other, BaseTable): return NotImplemented return self.identity == other.identity def __hash__(self): return hash(self.identity) def equals(self, other): return self.identity == other.identity
e8e1a35df8f3d36394f274aa3b0c2860266071d0786e039e536aba4e8c6d62a8
import datetime import uuid from functools import lru_cache from django.conf import settings from django.db import DatabaseError, NotSupportedError from django.db.backends.base.operations import BaseDatabaseOperations from django.db.backends.utils import strip_quotes, truncate_name from django.db.models import AutoField, Exists, ExpressionWrapper from django.db.models.expressions import RawSQL from django.db.models.sql.where import WhereNode from django.utils import timezone from django.utils.encoding import force_bytes, force_str from django.utils.functional import cached_property from django.utils.regex_helper import _lazy_re_compile from .base import Database from .utils import BulkInsertMapper, InsertVar, Oracle_datetime class DatabaseOperations(BaseDatabaseOperations): # Oracle uses NUMBER(5), NUMBER(11), and NUMBER(19) for integer fields. # SmallIntegerField uses NUMBER(11) instead of NUMBER(5), which is used by # SmallAutoField, to preserve backward compatibility. integer_field_ranges = { 'SmallIntegerField': (-99999999999, 99999999999), 'IntegerField': (-99999999999, 99999999999), 'BigIntegerField': (-9999999999999999999, 9999999999999999999), 'PositiveBigIntegerField': (0, 9999999999999999999), 'PositiveSmallIntegerField': (0, 99999999999), 'PositiveIntegerField': (0, 99999999999), 'SmallAutoField': (-99999, 99999), 'AutoField': (-99999999999, 99999999999), 'BigAutoField': (-9999999999999999999, 9999999999999999999), } set_operators = {**BaseDatabaseOperations.set_operators, 'difference': 'MINUS'} # TODO: colorize this SQL code with style.SQL_KEYWORD(), etc. _sequence_reset_sql = """ DECLARE table_value integer; seq_value integer; seq_name user_tab_identity_cols.sequence_name%%TYPE; BEGIN BEGIN SELECT sequence_name INTO seq_name FROM user_tab_identity_cols WHERE table_name = '%(table_name)s' AND column_name = '%(column_name)s'; EXCEPTION WHEN NO_DATA_FOUND THEN seq_name := '%(no_autofield_sequence_name)s'; END; SELECT NVL(MAX(%(column)s), 0) INTO table_value FROM %(table)s; SELECT NVL(last_number - cache_size, 0) INTO seq_value FROM user_sequences WHERE sequence_name = seq_name; WHILE table_value > seq_value LOOP EXECUTE IMMEDIATE 'SELECT "'||seq_name||'".nextval FROM DUAL' INTO seq_value; END LOOP; END; /""" # Oracle doesn't support string without precision; use the max string size. cast_char_field_without_max_length = 'NVARCHAR2(2000)' cast_data_types = { 'AutoField': 'NUMBER(11)', 'BigAutoField': 'NUMBER(19)', 'SmallAutoField': 'NUMBER(5)', 'TextField': cast_char_field_without_max_length, } def cache_key_culling_sql(self): return 'SELECT cache_key FROM %s ORDER BY cache_key OFFSET %%s ROWS FETCH FIRST 1 ROWS ONLY' def date_extract_sql(self, lookup_type, field_name): if lookup_type == 'week_day': # TO_CHAR(field, 'D') returns an integer from 1-7, where 1=Sunday. return "TO_CHAR(%s, 'D')" % field_name elif lookup_type == 'iso_week_day': return "TO_CHAR(%s - 1, 'D')" % field_name elif lookup_type == 'week': # IW = ISO week number return "TO_CHAR(%s, 'IW')" % field_name elif lookup_type == 'quarter': return "TO_CHAR(%s, 'Q')" % field_name elif lookup_type == 'iso_year': return "TO_CHAR(%s, 'IYYY')" % field_name else: # https://docs.oracle.com/en/database/oracle/oracle-database/18/sqlrf/EXTRACT-datetime.html return "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name) def date_trunc_sql(self, lookup_type, field_name, tzname=None): field_name = self._convert_field_to_tz(field_name, tzname) # https://docs.oracle.com/en/database/oracle/oracle-database/18/sqlrf/ROUND-and-TRUNC-Date-Functions.html if lookup_type in ('year', 'month'): return "TRUNC(%s, '%s')" % (field_name, lookup_type.upper()) elif lookup_type == 'quarter': return "TRUNC(%s, 'Q')" % field_name elif lookup_type == 'week': return "TRUNC(%s, 'IW')" % field_name else: return "TRUNC(%s)" % field_name # Oracle crashes with "ORA-03113: end-of-file on communication channel" # if the time zone name is passed in parameter. Use interpolation instead. # https://groups.google.com/forum/#!msg/django-developers/zwQju7hbG78/9l934yelwfsJ # This regexp matches all time zone names from the zoneinfo database. _tzname_re = _lazy_re_compile(r'^[\w/:+-]+$') def _prepare_tzname_delta(self, tzname): if '+' in tzname: return tzname[tzname.find('+'):] elif '-' in tzname: return tzname[tzname.find('-'):] return tzname def _convert_field_to_tz(self, field_name, tzname): if not (settings.USE_TZ and tzname): return field_name if not self._tzname_re.match(tzname): raise ValueError("Invalid time zone name: %s" % tzname) # Convert from connection timezone to the local time, returning # TIMESTAMP WITH TIME ZONE and cast it back to TIMESTAMP to strip the # TIME ZONE details. if self.connection.timezone_name != tzname: return "CAST((FROM_TZ(%s, '%s') AT TIME ZONE '%s') AS TIMESTAMP)" % ( field_name, self.connection.timezone_name, self._prepare_tzname_delta(tzname), ) return field_name def datetime_cast_date_sql(self, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) return 'TRUNC(%s)' % field_name def datetime_cast_time_sql(self, field_name, tzname): # Since `TimeField` values are stored as TIMESTAMP change to the # default date and convert the field to the specified timezone. convert_datetime_sql = ( "TO_TIMESTAMP(CONCAT('1900-01-01 ', TO_CHAR(%s, 'HH24:MI:SS.FF')), " "'YYYY-MM-DD HH24:MI:SS.FF')" ) % self._convert_field_to_tz(field_name, tzname) return "CASE WHEN %s IS NOT NULL THEN %s ELSE NULL END" % ( field_name, convert_datetime_sql, ) def datetime_extract_sql(self, lookup_type, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) return self.date_extract_sql(lookup_type, field_name) def datetime_trunc_sql(self, lookup_type, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) # https://docs.oracle.com/en/database/oracle/oracle-database/18/sqlrf/ROUND-and-TRUNC-Date-Functions.html if lookup_type in ('year', 'month'): sql = "TRUNC(%s, '%s')" % (field_name, lookup_type.upper()) elif lookup_type == 'quarter': sql = "TRUNC(%s, 'Q')" % field_name elif lookup_type == 'week': sql = "TRUNC(%s, 'IW')" % field_name elif lookup_type == 'day': sql = "TRUNC(%s)" % field_name elif lookup_type == 'hour': sql = "TRUNC(%s, 'HH24')" % field_name elif lookup_type == 'minute': sql = "TRUNC(%s, 'MI')" % field_name else: sql = "CAST(%s AS DATE)" % field_name # Cast to DATE removes sub-second precision. return sql def time_trunc_sql(self, lookup_type, field_name, tzname=None): # The implementation is similar to `datetime_trunc_sql` as both # `DateTimeField` and `TimeField` are stored as TIMESTAMP where # the date part of the later is ignored. field_name = self._convert_field_to_tz(field_name, tzname) if lookup_type == 'hour': sql = "TRUNC(%s, 'HH24')" % field_name elif lookup_type == 'minute': sql = "TRUNC(%s, 'MI')" % field_name elif lookup_type == 'second': sql = "CAST(%s AS DATE)" % field_name # Cast to DATE removes sub-second precision. return sql def get_db_converters(self, expression): converters = super().get_db_converters(expression) internal_type = expression.output_field.get_internal_type() if internal_type in ['JSONField', 'TextField']: converters.append(self.convert_textfield_value) elif internal_type == 'BinaryField': converters.append(self.convert_binaryfield_value) elif internal_type == 'BooleanField': converters.append(self.convert_booleanfield_value) elif internal_type == 'DateTimeField': if settings.USE_TZ: converters.append(self.convert_datetimefield_value) elif internal_type == 'DateField': converters.append(self.convert_datefield_value) elif internal_type == 'TimeField': converters.append(self.convert_timefield_value) elif internal_type == 'UUIDField': converters.append(self.convert_uuidfield_value) # Oracle stores empty strings as null. If the field accepts the empty # string, undo this to adhere to the Django convention of using # the empty string instead of null. if expression.field.empty_strings_allowed: converters.append( self.convert_empty_bytes if internal_type == 'BinaryField' else self.convert_empty_string ) return converters def convert_textfield_value(self, value, expression, connection): if isinstance(value, Database.LOB): value = value.read() return value def convert_binaryfield_value(self, value, expression, connection): if isinstance(value, Database.LOB): value = force_bytes(value.read()) return value def convert_booleanfield_value(self, value, expression, connection): if value in (0, 1): value = bool(value) return value # cx_Oracle always returns datetime.datetime objects for # DATE and TIMESTAMP columns, but Django wants to see a # python datetime.date, .time, or .datetime. def convert_datetimefield_value(self, value, expression, connection): if value is not None: value = timezone.make_aware(value, self.connection.timezone) return value def convert_datefield_value(self, value, expression, connection): if isinstance(value, Database.Timestamp): value = value.date() return value def convert_timefield_value(self, value, expression, connection): if isinstance(value, Database.Timestamp): value = value.time() return value def convert_uuidfield_value(self, value, expression, connection): if value is not None: value = uuid.UUID(value) return value @staticmethod def convert_empty_string(value, expression, connection): return '' if value is None else value @staticmethod def convert_empty_bytes(value, expression, connection): return b'' if value is None else value def deferrable_sql(self): return " DEFERRABLE INITIALLY DEFERRED" def fetch_returned_insert_columns(self, cursor, returning_params): columns = [] for param in returning_params: value = param.get_value() if value == []: raise DatabaseError( 'The database did not return a new row id. Probably ' '"ORA-1403: no data found" was raised internally but was ' 'hidden by the Oracle OCI library (see ' 'https://code.djangoproject.com/ticket/28859).' ) columns.append(value[0]) return tuple(columns) def field_cast_sql(self, db_type, internal_type): if db_type and db_type.endswith('LOB') and internal_type != 'JSONField': return "DBMS_LOB.SUBSTR(%s)" else: return "%s" def no_limit_value(self): return None def limit_offset_sql(self, low_mark, high_mark): fetch, offset = self._get_limit_offset_params(low_mark, high_mark) return ' '.join(sql for sql in ( ('OFFSET %d ROWS' % offset) if offset else None, ('FETCH FIRST %d ROWS ONLY' % fetch) if fetch else None, ) if sql) def last_executed_query(self, cursor, sql, params): # https://cx-oracle.readthedocs.io/en/latest/cursor.html#Cursor.statement # The DB API definition does not define this attribute. statement = cursor.statement # Unlike Psycopg's `query` and MySQLdb`'s `_executed`, cx_Oracle's # `statement` doesn't contain the query parameters. Substitute # parameters manually. if isinstance(params, (tuple, list)): for i, param in enumerate(params): statement = statement.replace(':arg%d' % i, force_str(param, errors='replace')) elif isinstance(params, dict): for key, param in params.items(): statement = statement.replace(':%s' % key, force_str(param, errors='replace')) return statement def last_insert_id(self, cursor, table_name, pk_name): sq_name = self._get_sequence_name(cursor, strip_quotes(table_name), pk_name) cursor.execute('"%s".currval' % sq_name) return cursor.fetchone()[0] def lookup_cast(self, lookup_type, internal_type=None): if lookup_type in ('iexact', 'icontains', 'istartswith', 'iendswith'): return "UPPER(%s)" if internal_type == 'JSONField' and lookup_type == 'exact': return 'DBMS_LOB.SUBSTR(%s)' return "%s" def max_in_list_size(self): return 1000 def max_name_length(self): return 30 def pk_default_value(self): return "NULL" def prep_for_iexact_query(self, x): return x def process_clob(self, value): if value is None: return '' return value.read() def quote_name(self, name): # SQL92 requires delimited (quoted) names to be case-sensitive. When # not quoted, Oracle has case-insensitive behavior for identifiers, but # always defaults to uppercase. # We simplify things by making Oracle identifiers always uppercase. if not name.startswith('"') and not name.endswith('"'): name = '"%s"' % truncate_name(name, self.max_name_length()) # Oracle puts the query text into a (query % args) construct, so % signs # in names need to be escaped. The '%%' will be collapsed back to '%' at # that stage so we aren't really making the name longer here. name = name.replace('%', '%%') return name.upper() def regex_lookup(self, lookup_type): if lookup_type == 'regex': match_option = "'c'" else: match_option = "'i'" return 'REGEXP_LIKE(%%s, %%s, %s)' % match_option def return_insert_columns(self, fields): if not fields: return '', () field_names = [] params = [] for field in fields: field_names.append('%s.%s' % ( self.quote_name(field.model._meta.db_table), self.quote_name(field.column), )) params.append(InsertVar(field)) return 'RETURNING %s INTO %s' % ( ', '.join(field_names), ', '.join(['%s'] * len(params)), ), tuple(params) def __foreign_key_constraints(self, table_name, recursive): with self.connection.cursor() as cursor: if recursive: cursor.execute(""" SELECT user_tables.table_name, rcons.constraint_name FROM user_tables JOIN user_constraints cons ON (user_tables.table_name = cons.table_name AND cons.constraint_type = ANY('P', 'U')) LEFT JOIN user_constraints rcons ON (user_tables.table_name = rcons.table_name AND rcons.constraint_type = 'R') START WITH user_tables.table_name = UPPER(%s) CONNECT BY NOCYCLE PRIOR cons.constraint_name = rcons.r_constraint_name GROUP BY user_tables.table_name, rcons.constraint_name HAVING user_tables.table_name != UPPER(%s) ORDER BY MAX(level) DESC """, (table_name, table_name)) else: cursor.execute(""" SELECT cons.table_name, cons.constraint_name FROM user_constraints cons WHERE cons.constraint_type = 'R' AND cons.table_name = UPPER(%s) """, (table_name,)) return cursor.fetchall() @cached_property def _foreign_key_constraints(self): # 512 is large enough to fit the ~330 tables (as of this writing) in # Django's test suite. return lru_cache(maxsize=512)(self.__foreign_key_constraints) def sql_flush(self, style, tables, *, reset_sequences=False, allow_cascade=False): if not tables: return [] truncated_tables = {table.upper() for table in tables} constraints = set() # Oracle's TRUNCATE CASCADE only works with ON DELETE CASCADE foreign # keys which Django doesn't define. Emulate the PostgreSQL behavior # which truncates all dependent tables by manually retrieving all # foreign key constraints and resolving dependencies. for table in tables: for foreign_table, constraint in self._foreign_key_constraints(table, recursive=allow_cascade): if allow_cascade: truncated_tables.add(foreign_table) constraints.add((foreign_table, constraint)) sql = [ '%s %s %s %s %s %s %s %s;' % ( style.SQL_KEYWORD('ALTER'), style.SQL_KEYWORD('TABLE'), style.SQL_FIELD(self.quote_name(table)), style.SQL_KEYWORD('DISABLE'), style.SQL_KEYWORD('CONSTRAINT'), style.SQL_FIELD(self.quote_name(constraint)), style.SQL_KEYWORD('KEEP'), style.SQL_KEYWORD('INDEX'), ) for table, constraint in constraints ] + [ '%s %s %s;' % ( style.SQL_KEYWORD('TRUNCATE'), style.SQL_KEYWORD('TABLE'), style.SQL_FIELD(self.quote_name(table)), ) for table in truncated_tables ] + [ '%s %s %s %s %s %s;' % ( style.SQL_KEYWORD('ALTER'), style.SQL_KEYWORD('TABLE'), style.SQL_FIELD(self.quote_name(table)), style.SQL_KEYWORD('ENABLE'), style.SQL_KEYWORD('CONSTRAINT'), style.SQL_FIELD(self.quote_name(constraint)), ) for table, constraint in constraints ] if reset_sequences: sequences = [ sequence for sequence in self.connection.introspection.sequence_list() if sequence['table'].upper() in truncated_tables ] # Since we've just deleted all the rows, running our sequence ALTER # code will reset the sequence to 0. sql.extend(self.sequence_reset_by_name_sql(style, sequences)) return sql def sequence_reset_by_name_sql(self, style, sequences): sql = [] for sequence_info in sequences: no_autofield_sequence_name = self._get_no_autofield_sequence_name(sequence_info['table']) table = self.quote_name(sequence_info['table']) column = self.quote_name(sequence_info['column'] or 'id') query = self._sequence_reset_sql % { 'no_autofield_sequence_name': no_autofield_sequence_name, 'table': table, 'column': column, 'table_name': strip_quotes(table), 'column_name': strip_quotes(column), } sql.append(query) return sql def sequence_reset_sql(self, style, model_list): output = [] query = self._sequence_reset_sql for model in model_list: for f in model._meta.local_fields: if isinstance(f, AutoField): no_autofield_sequence_name = self._get_no_autofield_sequence_name(model._meta.db_table) table = self.quote_name(model._meta.db_table) column = self.quote_name(f.column) output.append(query % { 'no_autofield_sequence_name': no_autofield_sequence_name, 'table': table, 'column': column, 'table_name': strip_quotes(table), 'column_name': strip_quotes(column), }) # Only one AutoField is allowed per model, so don't # continue to loop break return output def start_transaction_sql(self): return '' def tablespace_sql(self, tablespace, inline=False): if inline: return "USING INDEX TABLESPACE %s" % self.quote_name(tablespace) else: return "TABLESPACE %s" % self.quote_name(tablespace) def adapt_datefield_value(self, value): """ Transform a date value to an object compatible with what is expected by the backend driver for date columns. The default implementation transforms the date to text, but that is not necessary for Oracle. """ return value def adapt_datetimefield_value(self, value): """ Transform a datetime value to an object compatible with what is expected by the backend driver for datetime columns. If naive datetime is passed assumes that is in UTC. Normally Django models.DateTimeField makes sure that if USE_TZ is True passed datetime is timezone aware. """ if value is None: return None # Expression values are adapted by the database. if hasattr(value, 'resolve_expression'): return value # cx_Oracle doesn't support tz-aware datetimes if timezone.is_aware(value): if settings.USE_TZ: value = timezone.make_naive(value, self.connection.timezone) else: raise ValueError("Oracle backend does not support timezone-aware datetimes when USE_TZ is False.") return Oracle_datetime.from_datetime(value) def adapt_timefield_value(self, value): if value is None: return None # Expression values are adapted by the database. if hasattr(value, 'resolve_expression'): return value if isinstance(value, str): return datetime.datetime.strptime(value, '%H:%M:%S') # Oracle doesn't support tz-aware times if timezone.is_aware(value): raise ValueError("Oracle backend does not support timezone-aware times.") return Oracle_datetime(1900, 1, 1, value.hour, value.minute, value.second, value.microsecond) def adapt_decimalfield_value(self, value, max_digits=None, decimal_places=None): return value def combine_expression(self, connector, sub_expressions): lhs, rhs = sub_expressions if connector == '%%': return 'MOD(%s)' % ','.join(sub_expressions) elif connector == '&': return 'BITAND(%s)' % ','.join(sub_expressions) elif connector == '|': return 'BITAND(-%(lhs)s-1,%(rhs)s)+%(lhs)s' % {'lhs': lhs, 'rhs': rhs} elif connector == '<<': return '(%(lhs)s * POWER(2, %(rhs)s))' % {'lhs': lhs, 'rhs': rhs} elif connector == '>>': return 'FLOOR(%(lhs)s / POWER(2, %(rhs)s))' % {'lhs': lhs, 'rhs': rhs} elif connector == '^': return 'POWER(%s)' % ','.join(sub_expressions) elif connector == '#': raise NotSupportedError('Bitwise XOR is not supported in Oracle.') return super().combine_expression(connector, sub_expressions) def _get_no_autofield_sequence_name(self, table): """ Manually created sequence name to keep backward compatibility for AutoFields that aren't Oracle identity columns. """ name_length = self.max_name_length() - 3 return '%s_SQ' % truncate_name(strip_quotes(table), name_length).upper() def _get_sequence_name(self, cursor, table, pk_name): cursor.execute(""" SELECT sequence_name FROM user_tab_identity_cols WHERE table_name = UPPER(%s) AND column_name = UPPER(%s)""", [table, pk_name]) row = cursor.fetchone() return self._get_no_autofield_sequence_name(table) if row is None else row[0] def bulk_insert_sql(self, fields, placeholder_rows): query = [] for row in placeholder_rows: select = [] for i, placeholder in enumerate(row): # A model without any fields has fields=[None]. if fields[i]: internal_type = getattr(fields[i], 'target_field', fields[i]).get_internal_type() placeholder = BulkInsertMapper.types.get(internal_type, '%s') % placeholder # Add columns aliases to the first select to avoid "ORA-00918: # column ambiguously defined" when two or more columns in the # first select have the same value. if not query: placeholder = '%s col_%s' % (placeholder, i) select.append(placeholder) query.append('SELECT %s FROM DUAL' % ', '.join(select)) # Bulk insert to tables with Oracle identity columns causes Oracle to # add sequence.nextval to it. Sequence.nextval cannot be used with the # UNION operator. To prevent incorrect SQL, move UNION to a subquery. return 'SELECT * FROM (%s)' % ' UNION ALL '.join(query) def subtract_temporals(self, internal_type, lhs, rhs): if internal_type == 'DateField': lhs_sql, lhs_params = lhs rhs_sql, rhs_params = rhs params = (*lhs_params, *rhs_params) return "NUMTODSINTERVAL(TO_NUMBER(%s - %s), 'DAY')" % (lhs_sql, rhs_sql), params return super().subtract_temporals(internal_type, lhs, rhs) def bulk_batch_size(self, fields, objs): """Oracle restricts the number of parameters in a query.""" if fields: return self.connection.features.max_query_params // len(fields) return len(objs) def conditional_expression_supported_in_where_clause(self, expression): """ Oracle supports only EXISTS(...) or filters in the WHERE clause, others must be compared with True. """ if isinstance(expression, (Exists, WhereNode)): return True if isinstance(expression, ExpressionWrapper) and expression.conditional: return self.conditional_expression_supported_in_where_clause(expression.expression) if isinstance(expression, RawSQL) and expression.conditional: return True return False
690da4b41c69b2270d8cb9a3c953aba1b314b93f35569a1110083769e667fc49
import logging from datetime import datetime from django.db.backends.ddl_references import ( Columns, Expressions, ForeignKeyName, IndexName, Statement, Table, ) from django.db.backends.utils import names_digest, split_identifier from django.db.models import Deferrable, Index from django.db.models.sql import Query from django.db.transaction import TransactionManagementError, atomic from django.utils import timezone logger = logging.getLogger('django.db.backends.schema') def _is_relevant_relation(relation, altered_field): """ When altering the given field, must constraints on its model from the given relation be temporarily dropped? """ field = relation.field if field.many_to_many: # M2M reverse field return False if altered_field.primary_key and field.to_fields == [None]: # Foreign key constraint on the primary key, which is being altered. return True # Is the constraint targeting the field being altered? return altered_field.name in field.to_fields def _all_related_fields(model): return model._meta._get_fields(forward=False, reverse=True, include_hidden=True) def _related_non_m2m_objects(old_field, new_field): # Filter out m2m objects from reverse relations. # Return (old_relation, new_relation) tuples. return zip( (obj for obj in _all_related_fields(old_field.model) if _is_relevant_relation(obj, old_field)), (obj for obj in _all_related_fields(new_field.model) if _is_relevant_relation(obj, new_field)), ) class BaseDatabaseSchemaEditor: """ This class and its subclasses are responsible for emitting schema-changing statements to the databases - model creation/removal/alteration, field renaming, index fiddling, and so on. """ # Overrideable SQL templates sql_create_table = "CREATE TABLE %(table)s (%(definition)s)" sql_rename_table = "ALTER TABLE %(old_table)s RENAME TO %(new_table)s" sql_retablespace_table = "ALTER TABLE %(table)s SET TABLESPACE %(new_tablespace)s" sql_delete_table = "DROP TABLE %(table)s CASCADE" sql_create_column = "ALTER TABLE %(table)s ADD COLUMN %(column)s %(definition)s" sql_alter_column = "ALTER TABLE %(table)s %(changes)s" sql_alter_column_type = "ALTER COLUMN %(column)s TYPE %(type)s" sql_alter_column_null = "ALTER COLUMN %(column)s DROP NOT NULL" sql_alter_column_not_null = "ALTER COLUMN %(column)s SET NOT NULL" sql_alter_column_default = "ALTER COLUMN %(column)s SET DEFAULT %(default)s" sql_alter_column_no_default = "ALTER COLUMN %(column)s DROP DEFAULT" sql_alter_column_no_default_null = sql_alter_column_no_default sql_alter_column_collate = "ALTER COLUMN %(column)s TYPE %(type)s%(collation)s" sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s CASCADE" sql_rename_column = "ALTER TABLE %(table)s RENAME COLUMN %(old_column)s TO %(new_column)s" sql_update_with_default = "UPDATE %(table)s SET %(column)s = %(default)s WHERE %(column)s IS NULL" sql_unique_constraint = "UNIQUE (%(columns)s)%(deferrable)s" sql_check_constraint = "CHECK (%(check)s)" sql_delete_constraint = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s" sql_constraint = "CONSTRAINT %(name)s %(constraint)s" sql_create_check = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s CHECK (%(check)s)" sql_delete_check = sql_delete_constraint sql_create_unique = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s UNIQUE (%(columns)s)%(deferrable)s" sql_delete_unique = sql_delete_constraint sql_create_fk = ( "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) " "REFERENCES %(to_table)s (%(to_column)s)%(deferrable)s" ) sql_create_inline_fk = None sql_create_column_inline_fk = None sql_delete_fk = sql_delete_constraint sql_create_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s)%(include)s%(extra)s%(condition)s" sql_create_unique_index = "CREATE UNIQUE INDEX %(name)s ON %(table)s (%(columns)s)%(include)s%(condition)s" sql_delete_index = "DROP INDEX %(name)s" sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)" sql_delete_pk = sql_delete_constraint sql_delete_procedure = 'DROP PROCEDURE %(procedure)s' def __init__(self, connection, collect_sql=False, atomic=True): self.connection = connection self.collect_sql = collect_sql if self.collect_sql: self.collected_sql = [] self.atomic_migration = self.connection.features.can_rollback_ddl and atomic # State-managing methods def __enter__(self): self.deferred_sql = [] if self.atomic_migration: self.atomic = atomic(self.connection.alias) self.atomic.__enter__() return self def __exit__(self, exc_type, exc_value, traceback): if exc_type is None: for sql in self.deferred_sql: self.execute(sql) if self.atomic_migration: self.atomic.__exit__(exc_type, exc_value, traceback) # Core utility functions def execute(self, sql, params=()): """Execute the given SQL statement, with optional parameters.""" # Don't perform the transactional DDL check if SQL is being collected # as it's not going to be executed anyway. if not self.collect_sql and self.connection.in_atomic_block and not self.connection.features.can_rollback_ddl: raise TransactionManagementError( "Executing DDL statements while in a transaction on databases " "that can't perform a rollback is prohibited." ) # Account for non-string statement objects. sql = str(sql) # Log the command we're running, then run it logger.debug("%s; (params %r)", sql, params, extra={'params': params, 'sql': sql}) if self.collect_sql: ending = "" if sql.rstrip().endswith(";") else ";" if params is not None: self.collected_sql.append((sql % tuple(map(self.quote_value, params))) + ending) else: self.collected_sql.append(sql + ending) else: with self.connection.cursor() as cursor: cursor.execute(sql, params) def quote_name(self, name): return self.connection.ops.quote_name(name) def table_sql(self, model): """Take a model and return its table definition.""" # Add any unique_togethers (always deferred, as some fields might be # created afterwards, like geometry fields with some backends). for fields in model._meta.unique_together: columns = [model._meta.get_field(field).column for field in fields] self.deferred_sql.append(self._create_unique_sql(model, columns)) # Create column SQL, add FK deferreds if needed. column_sqls = [] params = [] for field in model._meta.local_fields: # SQL. definition, extra_params = self.column_sql(model, field) if definition is None: continue # Check constraints can go on the column SQL here. db_params = field.db_parameters(connection=self.connection) if db_params['check']: definition += ' ' + self.sql_check_constraint % db_params # Autoincrement SQL (for backends with inline variant). col_type_suffix = field.db_type_suffix(connection=self.connection) if col_type_suffix: definition += ' %s' % col_type_suffix params.extend(extra_params) # FK. if field.remote_field and field.db_constraint: to_table = field.remote_field.model._meta.db_table to_column = field.remote_field.model._meta.get_field(field.remote_field.field_name).column if self.sql_create_inline_fk: definition += ' ' + self.sql_create_inline_fk % { 'to_table': self.quote_name(to_table), 'to_column': self.quote_name(to_column), } elif self.connection.features.supports_foreign_keys: self.deferred_sql.append(self._create_fk_sql(model, field, '_fk_%(to_table)s_%(to_column)s')) # Add the SQL to our big list. column_sqls.append('%s %s' % ( self.quote_name(field.column), definition, )) # Autoincrement SQL (for backends with post table definition # variant). if field.get_internal_type() in ('AutoField', 'BigAutoField', 'SmallAutoField'): autoinc_sql = self.connection.ops.autoinc_sql(model._meta.db_table, field.column) if autoinc_sql: self.deferred_sql.extend(autoinc_sql) constraints = [constraint.constraint_sql(model, self) for constraint in model._meta.constraints] sql = self.sql_create_table % { 'table': self.quote_name(model._meta.db_table), 'definition': ', '.join(constraint for constraint in (*column_sqls, *constraints) if constraint), } if model._meta.db_tablespace: tablespace_sql = self.connection.ops.tablespace_sql(model._meta.db_tablespace) if tablespace_sql: sql += ' ' + tablespace_sql return sql, params # Field <-> database mapping functions def column_sql(self, model, field, include_default=False): """ Take a field and return its column definition. The field must already have had set_attributes_from_name() called. """ # Get the column's type and use that as the basis of the SQL db_params = field.db_parameters(connection=self.connection) sql = db_params['type'] params = [] # Check for fields that aren't actually columns (e.g. M2M) if sql is None: return None, None # Collation. collation = getattr(field, 'db_collation', None) if collation: sql += self._collate_sql(collation) # Work out nullability null = field.null # If we were told to include a default value, do so include_default = include_default and not self.skip_default(field) if include_default: default_value = self.effective_default(field) column_default = ' DEFAULT ' + self._column_default_sql(field) if default_value is not None: if self.connection.features.requires_literal_defaults: # Some databases can't take defaults as a parameter (oracle) # If this is the case, the individual schema backend should # implement prepare_default sql += column_default % self.prepare_default(default_value) else: sql += column_default params += [default_value] # Oracle treats the empty string ('') as null, so coerce the null # option whenever '' is a possible value. if (field.empty_strings_allowed and not field.primary_key and self.connection.features.interprets_empty_strings_as_nulls): null = True if null and not self.connection.features.implied_column_null: sql += " NULL" elif not null: sql += " NOT NULL" # Primary key/unique outputs if field.primary_key: sql += " PRIMARY KEY" elif field.unique: sql += " UNIQUE" # Optionally add the tablespace if it's an implicitly indexed column tablespace = field.db_tablespace or model._meta.db_tablespace if tablespace and self.connection.features.supports_tablespaces and field.unique: sql += " %s" % self.connection.ops.tablespace_sql(tablespace, inline=True) # Return the sql return sql, params def skip_default(self, field): """ Some backends don't accept default values for certain columns types (i.e. MySQL longtext and longblob). """ return False def skip_default_on_alter(self, field): """ Some backends don't accept default values for certain columns types (i.e. MySQL longtext and longblob) in the ALTER COLUMN statement. """ return False def prepare_default(self, value): """ Only used for backends which have requires_literal_defaults feature """ raise NotImplementedError( 'subclasses of BaseDatabaseSchemaEditor for backends which have ' 'requires_literal_defaults must provide a prepare_default() method' ) def _column_default_sql(self, field): """ Return the SQL to use in a DEFAULT clause. The resulting string should contain a '%s' placeholder for a default value. """ return '%s' @staticmethod def _effective_default(field): # This method allows testing its logic without a connection. if field.has_default(): default = field.get_default() elif not field.null and field.blank and field.empty_strings_allowed: if field.get_internal_type() == "BinaryField": default = b'' else: default = '' elif getattr(field, 'auto_now', False) or getattr(field, 'auto_now_add', False): default = datetime.now() internal_type = field.get_internal_type() if internal_type == 'DateField': default = default.date() elif internal_type == 'TimeField': default = default.time() elif internal_type == 'DateTimeField': default = timezone.now() else: default = None return default def effective_default(self, field): """Return a field's effective database default value.""" return field.get_db_prep_save(self._effective_default(field), self.connection) def quote_value(self, value): """ Return a quoted version of the value so it's safe to use in an SQL string. This is not safe against injection from user code; it is intended only for use in making SQL scripts or preparing default values for particularly tricky backends (defaults are not user-defined, though, so this is safe). """ raise NotImplementedError() # Actions def create_model(self, model): """ Create a table and any accompanying indexes or unique constraints for the given `model`. """ sql, params = self.table_sql(model) # Prevent using [] as params, in the case a literal '%' is used in the definition self.execute(sql, params or None) # Add any field index and index_together's (deferred as SQLite _remake_table needs it) self.deferred_sql.extend(self._model_indexes_sql(model)) # Make M2M tables for field in model._meta.local_many_to_many: if field.remote_field.through._meta.auto_created: self.create_model(field.remote_field.through) def delete_model(self, model): """Delete a model from the database.""" # Handle auto-created intermediary models for field in model._meta.local_many_to_many: if field.remote_field.through._meta.auto_created: self.delete_model(field.remote_field.through) # Delete the table self.execute(self.sql_delete_table % { "table": self.quote_name(model._meta.db_table), }) # Remove all deferred statements referencing the deleted table. for sql in list(self.deferred_sql): if isinstance(sql, Statement) and sql.references_table(model._meta.db_table): self.deferred_sql.remove(sql) def add_index(self, model, index): """Add an index on a model.""" if ( index.contains_expressions and not self.connection.features.supports_expression_indexes ): return None # Index.create_sql returns interpolated SQL which makes params=None a # necessity to avoid escaping attempts on execution. self.execute(index.create_sql(model, self), params=None) def remove_index(self, model, index): """Remove an index from a model.""" if ( index.contains_expressions and not self.connection.features.supports_expression_indexes ): return None self.execute(index.remove_sql(model, self)) def add_constraint(self, model, constraint): """Add a constraint to a model.""" sql = constraint.create_sql(model, self) if sql: # Constraint.create_sql returns interpolated SQL which makes # params=None a necessity to avoid escaping attempts on execution. self.execute(sql, params=None) def remove_constraint(self, model, constraint): """Remove a constraint from a model.""" sql = constraint.remove_sql(model, self) if sql: self.execute(sql) def alter_unique_together(self, model, old_unique_together, new_unique_together): """ Deal with a model changing its unique_together. The input unique_togethers must be doubly-nested, not the single-nested ["foo", "bar"] format. """ olds = {tuple(fields) for fields in old_unique_together} news = {tuple(fields) for fields in new_unique_together} # Deleted uniques for fields in olds.difference(news): self._delete_composed_index(model, fields, {'unique': True}, self.sql_delete_unique) # Created uniques for fields in news.difference(olds): columns = [model._meta.get_field(field).column for field in fields] self.execute(self._create_unique_sql(model, columns)) def alter_index_together(self, model, old_index_together, new_index_together): """ Deal with a model changing its index_together. The input index_togethers must be doubly-nested, not the single-nested ["foo", "bar"] format. """ olds = {tuple(fields) for fields in old_index_together} news = {tuple(fields) for fields in new_index_together} # Deleted indexes for fields in olds.difference(news): self._delete_composed_index( model, fields, {'index': True, 'unique': False}, self.sql_delete_index, ) # Created indexes for field_names in news.difference(olds): fields = [model._meta.get_field(field) for field in field_names] self.execute(self._create_index_sql(model, fields=fields, suffix='_idx')) def _delete_composed_index(self, model, fields, constraint_kwargs, sql): meta_constraint_names = {constraint.name for constraint in model._meta.constraints} meta_index_names = {constraint.name for constraint in model._meta.indexes} columns = [model._meta.get_field(field).column for field in fields] constraint_names = self._constraint_names( model, columns, exclude=meta_constraint_names | meta_index_names, **constraint_kwargs ) if len(constraint_names) != 1: raise ValueError("Found wrong number (%s) of constraints for %s(%s)" % ( len(constraint_names), model._meta.db_table, ", ".join(columns), )) self.execute(self._delete_constraint_sql(sql, model, constraint_names[0])) def alter_db_table(self, model, old_db_table, new_db_table): """Rename the table a model points to.""" if (old_db_table == new_db_table or (self.connection.features.ignores_table_name_case and old_db_table.lower() == new_db_table.lower())): return self.execute(self.sql_rename_table % { "old_table": self.quote_name(old_db_table), "new_table": self.quote_name(new_db_table), }) # Rename all references to the old table name. for sql in self.deferred_sql: if isinstance(sql, Statement): sql.rename_table_references(old_db_table, new_db_table) def alter_db_tablespace(self, model, old_db_tablespace, new_db_tablespace): """Move a model's table between tablespaces.""" self.execute(self.sql_retablespace_table % { "table": self.quote_name(model._meta.db_table), "old_tablespace": self.quote_name(old_db_tablespace), "new_tablespace": self.quote_name(new_db_tablespace), }) def add_field(self, model, field): """ Create a field on a model. Usually involves adding a column, but may involve adding a table instead (for M2M fields). """ # Special-case implicit M2M tables if field.many_to_many and field.remote_field.through._meta.auto_created: return self.create_model(field.remote_field.through) # Get the column's definition definition, params = self.column_sql(model, field, include_default=True) # It might not actually have a column behind it if definition is None: return # Check constraints can go on the column SQL here db_params = field.db_parameters(connection=self.connection) if db_params['check']: definition += " " + self.sql_check_constraint % db_params if field.remote_field and self.connection.features.supports_foreign_keys and field.db_constraint: constraint_suffix = '_fk_%(to_table)s_%(to_column)s' # Add FK constraint inline, if supported. if self.sql_create_column_inline_fk: to_table = field.remote_field.model._meta.db_table to_column = field.remote_field.model._meta.get_field(field.remote_field.field_name).column namespace, _ = split_identifier(model._meta.db_table) definition += " " + self.sql_create_column_inline_fk % { 'name': self._fk_constraint_name(model, field, constraint_suffix), 'namespace': '%s.' % self.quote_name(namespace) if namespace else '', 'column': self.quote_name(field.column), 'to_table': self.quote_name(to_table), 'to_column': self.quote_name(to_column), 'deferrable': self.connection.ops.deferrable_sql() } # Otherwise, add FK constraints later. else: self.deferred_sql.append(self._create_fk_sql(model, field, constraint_suffix)) # Build the SQL and run it sql = self.sql_create_column % { "table": self.quote_name(model._meta.db_table), "column": self.quote_name(field.column), "definition": definition, } self.execute(sql, params) # Drop the default if we need to # (Django usually does not use in-database defaults) if not self.skip_default(field) and self.effective_default(field) is not None: changes_sql, params = self._alter_column_default_sql(model, None, field, drop=True) sql = self.sql_alter_column % { "table": self.quote_name(model._meta.db_table), "changes": changes_sql, } self.execute(sql, params) # Add an index, if required self.deferred_sql.extend(self._field_indexes_sql(model, field)) # Reset connection if required if self.connection.features.connection_persists_old_columns: self.connection.close() def remove_field(self, model, field): """ Remove a field from a model. Usually involves deleting a column, but for M2Ms may involve deleting a table. """ # Special-case implicit M2M tables if field.many_to_many and field.remote_field.through._meta.auto_created: return self.delete_model(field.remote_field.through) # It might not actually have a column behind it if field.db_parameters(connection=self.connection)['type'] is None: return # Drop any FK constraints, MySQL requires explicit deletion if field.remote_field: fk_names = self._constraint_names(model, [field.column], foreign_key=True) for fk_name in fk_names: self.execute(self._delete_fk_sql(model, fk_name)) # Delete the column sql = self.sql_delete_column % { "table": self.quote_name(model._meta.db_table), "column": self.quote_name(field.column), } self.execute(sql) # Reset connection if required if self.connection.features.connection_persists_old_columns: self.connection.close() # Remove all deferred statements referencing the deleted column. for sql in list(self.deferred_sql): if isinstance(sql, Statement) and sql.references_column(model._meta.db_table, field.column): self.deferred_sql.remove(sql) def alter_field(self, model, old_field, new_field, strict=False): """ Allow a field's type, uniqueness, nullability, default, column, constraints, etc. to be modified. `old_field` is required to compute the necessary changes. If `strict` is True, raise errors if the old column does not match `old_field` precisely. """ if not self._field_should_be_altered(old_field, new_field): return # Ensure this field is even column-based old_db_params = old_field.db_parameters(connection=self.connection) old_type = old_db_params['type'] new_db_params = new_field.db_parameters(connection=self.connection) new_type = new_db_params['type'] if ((old_type is None and old_field.remote_field is None) or (new_type is None and new_field.remote_field is None)): raise ValueError( "Cannot alter field %s into %s - they do not properly define " "db_type (are you using a badly-written custom field?)" % (old_field, new_field), ) elif old_type is None and new_type is None and ( old_field.remote_field.through and new_field.remote_field.through and old_field.remote_field.through._meta.auto_created and new_field.remote_field.through._meta.auto_created): return self._alter_many_to_many(model, old_field, new_field, strict) elif old_type is None and new_type is None and ( old_field.remote_field.through and new_field.remote_field.through and not old_field.remote_field.through._meta.auto_created and not new_field.remote_field.through._meta.auto_created): # Both sides have through models; this is a no-op. return elif old_type is None or new_type is None: raise ValueError( "Cannot alter field %s into %s - they are not compatible types " "(you cannot alter to or from M2M fields, or add or remove " "through= on M2M fields)" % (old_field, new_field) ) self._alter_field(model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict) def _alter_field(self, model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict=False): """Perform a "physical" (non-ManyToMany) field update.""" # Drop any FK constraints, we'll remake them later fks_dropped = set() if ( self.connection.features.supports_foreign_keys and old_field.remote_field and old_field.db_constraint ): fk_names = self._constraint_names(model, [old_field.column], foreign_key=True) if strict and len(fk_names) != 1: raise ValueError("Found wrong number (%s) of foreign key constraints for %s.%s" % ( len(fk_names), model._meta.db_table, old_field.column, )) for fk_name in fk_names: fks_dropped.add((old_field.column,)) self.execute(self._delete_fk_sql(model, fk_name)) # Has unique been removed? if old_field.unique and (not new_field.unique or self._field_became_primary_key(old_field, new_field)): # Find the unique constraint for this field meta_constraint_names = {constraint.name for constraint in model._meta.constraints} constraint_names = self._constraint_names( model, [old_field.column], unique=True, primary_key=False, exclude=meta_constraint_names, ) if strict and len(constraint_names) != 1: raise ValueError("Found wrong number (%s) of unique constraints for %s.%s" % ( len(constraint_names), model._meta.db_table, old_field.column, )) for constraint_name in constraint_names: self.execute(self._delete_unique_sql(model, constraint_name)) # Drop incoming FK constraints if the field is a primary key or unique, # which might be a to_field target, and things are going to change. drop_foreign_keys = ( self.connection.features.supports_foreign_keys and ( (old_field.primary_key and new_field.primary_key) or (old_field.unique and new_field.unique) ) and old_type != new_type ) if drop_foreign_keys: # '_meta.related_field' also contains M2M reverse fields, these # will be filtered out for _old_rel, new_rel in _related_non_m2m_objects(old_field, new_field): rel_fk_names = self._constraint_names( new_rel.related_model, [new_rel.field.column], foreign_key=True ) for fk_name in rel_fk_names: self.execute(self._delete_fk_sql(new_rel.related_model, fk_name)) # Removed an index? (no strict check, as multiple indexes are possible) # Remove indexes if db_index switched to False or a unique constraint # will now be used in lieu of an index. The following lines from the # truth table show all True cases; the rest are False: # # old_field.db_index | old_field.unique | new_field.db_index | new_field.unique # ------------------------------------------------------------------------------ # True | False | False | False # True | False | False | True # True | False | True | True if old_field.db_index and not old_field.unique and (not new_field.db_index or new_field.unique): # Find the index for this field meta_index_names = {index.name for index in model._meta.indexes} # Retrieve only BTREE indexes since this is what's created with # db_index=True. index_names = self._constraint_names( model, [old_field.column], index=True, type_=Index.suffix, exclude=meta_index_names, ) for index_name in index_names: # The only way to check if an index was created with # db_index=True or with Index(['field'], name='foo') # is to look at its name (refs #28053). self.execute(self._delete_index_sql(model, index_name)) # Change check constraints? if old_db_params['check'] != new_db_params['check'] and old_db_params['check']: meta_constraint_names = {constraint.name for constraint in model._meta.constraints} constraint_names = self._constraint_names( model, [old_field.column], check=True, exclude=meta_constraint_names, ) if strict and len(constraint_names) != 1: raise ValueError("Found wrong number (%s) of check constraints for %s.%s" % ( len(constraint_names), model._meta.db_table, old_field.column, )) for constraint_name in constraint_names: self.execute(self._delete_check_sql(model, constraint_name)) # Have they renamed the column? if old_field.column != new_field.column: self.execute(self._rename_field_sql(model._meta.db_table, old_field, new_field, new_type)) # Rename all references to the renamed column. for sql in self.deferred_sql: if isinstance(sql, Statement): sql.rename_column_references(model._meta.db_table, old_field.column, new_field.column) # Next, start accumulating actions to do actions = [] null_actions = [] post_actions = [] # Collation change? old_collation = getattr(old_field, 'db_collation', None) new_collation = getattr(new_field, 'db_collation', None) if old_collation != new_collation: # Collation change handles also a type change. fragment = self._alter_column_collation_sql(model, new_field, new_type, new_collation) actions.append(fragment) # Type change? elif old_type != new_type: fragment, other_actions = self._alter_column_type_sql(model, old_field, new_field, new_type) actions.append(fragment) post_actions.extend(other_actions) # When changing a column NULL constraint to NOT NULL with a given # default value, we need to perform 4 steps: # 1. Add a default for new incoming writes # 2. Update existing NULL rows with new default # 3. Replace NULL constraint with NOT NULL # 4. Drop the default again. # Default change? needs_database_default = False if old_field.null and not new_field.null: old_default = self.effective_default(old_field) new_default = self.effective_default(new_field) if ( not self.skip_default_on_alter(new_field) and old_default != new_default and new_default is not None ): needs_database_default = True actions.append(self._alter_column_default_sql(model, old_field, new_field)) # Nullability change? if old_field.null != new_field.null: fragment = self._alter_column_null_sql(model, old_field, new_field) if fragment: null_actions.append(fragment) # Only if we have a default and there is a change from NULL to NOT NULL four_way_default_alteration = ( new_field.has_default() and (old_field.null and not new_field.null) ) if actions or null_actions: if not four_way_default_alteration: # If we don't have to do a 4-way default alteration we can # directly run a (NOT) NULL alteration actions = actions + null_actions # Combine actions together if we can (e.g. postgres) if self.connection.features.supports_combined_alters and actions: sql, params = tuple(zip(*actions)) actions = [(", ".join(sql), sum(params, []))] # Apply those actions for sql, params in actions: self.execute( self.sql_alter_column % { "table": self.quote_name(model._meta.db_table), "changes": sql, }, params, ) if four_way_default_alteration: # Update existing rows with default value self.execute( self.sql_update_with_default % { "table": self.quote_name(model._meta.db_table), "column": self.quote_name(new_field.column), "default": "%s", }, [new_default], ) # Since we didn't run a NOT NULL change before we need to do it # now for sql, params in null_actions: self.execute( self.sql_alter_column % { "table": self.quote_name(model._meta.db_table), "changes": sql, }, params, ) if post_actions: for sql, params in post_actions: self.execute(sql, params) # If primary_key changed to False, delete the primary key constraint. if old_field.primary_key and not new_field.primary_key: self._delete_primary_key(model, strict) # Added a unique? if self._unique_should_be_added(old_field, new_field): self.execute(self._create_unique_sql(model, [new_field.column])) # Added an index? Add an index if db_index switched to True or a unique # constraint will no longer be used in lieu of an index. The following # lines from the truth table show all True cases; the rest are False: # # old_field.db_index | old_field.unique | new_field.db_index | new_field.unique # ------------------------------------------------------------------------------ # False | False | True | False # False | True | True | False # True | True | True | False if (not old_field.db_index or old_field.unique) and new_field.db_index and not new_field.unique: self.execute(self._create_index_sql(model, fields=[new_field])) # Type alteration on primary key? Then we need to alter the column # referring to us. rels_to_update = [] if drop_foreign_keys: rels_to_update.extend(_related_non_m2m_objects(old_field, new_field)) # Changed to become primary key? if self._field_became_primary_key(old_field, new_field): # Make the new one self.execute(self._create_primary_key_sql(model, new_field)) # Update all referencing columns rels_to_update.extend(_related_non_m2m_objects(old_field, new_field)) # Handle our type alters on the other end of rels from the PK stuff above for old_rel, new_rel in rels_to_update: rel_db_params = new_rel.field.db_parameters(connection=self.connection) rel_type = rel_db_params['type'] fragment, other_actions = self._alter_column_type_sql( new_rel.related_model, old_rel.field, new_rel.field, rel_type ) self.execute( self.sql_alter_column % { "table": self.quote_name(new_rel.related_model._meta.db_table), "changes": fragment[0], }, fragment[1], ) for sql, params in other_actions: self.execute(sql, params) # Does it have a foreign key? if (self.connection.features.supports_foreign_keys and new_field.remote_field and (fks_dropped or not old_field.remote_field or not old_field.db_constraint) and new_field.db_constraint): self.execute(self._create_fk_sql(model, new_field, "_fk_%(to_table)s_%(to_column)s")) # Rebuild FKs that pointed to us if we previously had to drop them if drop_foreign_keys: for rel in new_field.model._meta.related_objects: if _is_relevant_relation(rel, new_field) and rel.field.db_constraint: self.execute(self._create_fk_sql(rel.related_model, rel.field, "_fk")) # Does it have check constraints we need to add? if old_db_params['check'] != new_db_params['check'] and new_db_params['check']: constraint_name = self._create_index_name(model._meta.db_table, [new_field.column], suffix='_check') self.execute(self._create_check_sql(model, constraint_name, new_db_params['check'])) # Drop the default if we need to # (Django usually does not use in-database defaults) if needs_database_default: changes_sql, params = self._alter_column_default_sql(model, old_field, new_field, drop=True) sql = self.sql_alter_column % { "table": self.quote_name(model._meta.db_table), "changes": changes_sql, } self.execute(sql, params) # Reset connection if required if self.connection.features.connection_persists_old_columns: self.connection.close() def _alter_column_null_sql(self, model, old_field, new_field): """ Hook to specialize column null alteration. Return a (sql, params) fragment to set a column to null or non-null as required by new_field, or None if no changes are required. """ if (self.connection.features.interprets_empty_strings_as_nulls and new_field.get_internal_type() in ("CharField", "TextField")): # The field is nullable in the database anyway, leave it alone. return else: new_db_params = new_field.db_parameters(connection=self.connection) sql = self.sql_alter_column_null if new_field.null else self.sql_alter_column_not_null return ( sql % { 'column': self.quote_name(new_field.column), 'type': new_db_params['type'], }, [], ) def _alter_column_default_sql(self, model, old_field, new_field, drop=False): """ Hook to specialize column default alteration. Return a (sql, params) fragment to add or drop (depending on the drop argument) a default to new_field's column. """ new_default = self.effective_default(new_field) default = self._column_default_sql(new_field) params = [new_default] if drop: params = [] elif self.connection.features.requires_literal_defaults: # Some databases (Oracle) can't take defaults as a parameter # If this is the case, the SchemaEditor for that database should # implement prepare_default(). default = self.prepare_default(new_default) params = [] new_db_params = new_field.db_parameters(connection=self.connection) if drop: if new_field.null: sql = self.sql_alter_column_no_default_null else: sql = self.sql_alter_column_no_default else: sql = self.sql_alter_column_default return ( sql % { 'column': self.quote_name(new_field.column), 'type': new_db_params['type'], 'default': default, }, params, ) def _alter_column_type_sql(self, model, old_field, new_field, new_type): """ Hook to specialize column type alteration for different backends, for cases when a creation type is different to an alteration type (e.g. SERIAL in PostgreSQL, PostGIS fields). Return a two-tuple of: an SQL fragment of (sql, params) to insert into an ALTER TABLE statement and a list of extra (sql, params) tuples to run once the field is altered. """ return ( ( self.sql_alter_column_type % { "column": self.quote_name(new_field.column), "type": new_type, }, [], ), [], ) def _alter_column_collation_sql(self, model, new_field, new_type, new_collation): return ( self.sql_alter_column_collate % { 'column': self.quote_name(new_field.column), 'type': new_type, 'collation': self._collate_sql(new_collation) if new_collation else '', }, [], ) def _alter_many_to_many(self, model, old_field, new_field, strict): """Alter M2Ms to repoint their to= endpoints.""" # Rename the through table if old_field.remote_field.through._meta.db_table != new_field.remote_field.through._meta.db_table: self.alter_db_table(old_field.remote_field.through, old_field.remote_field.through._meta.db_table, new_field.remote_field.through._meta.db_table) # Repoint the FK to the other side self.alter_field( new_field.remote_field.through, # We need the field that points to the target model, so we can tell alter_field to change it - # this is m2m_reverse_field_name() (as opposed to m2m_field_name, which points to our model) old_field.remote_field.through._meta.get_field(old_field.m2m_reverse_field_name()), new_field.remote_field.through._meta.get_field(new_field.m2m_reverse_field_name()), ) self.alter_field( new_field.remote_field.through, # for self-referential models we need to alter field from the other end too old_field.remote_field.through._meta.get_field(old_field.m2m_field_name()), new_field.remote_field.through._meta.get_field(new_field.m2m_field_name()), ) def _create_index_name(self, table_name, column_names, suffix=""): """ Generate a unique name for an index/unique constraint. The name is divided into 3 parts: the table name, the column names, and a unique digest and suffix. """ _, table_name = split_identifier(table_name) hash_suffix_part = '%s%s' % (names_digest(table_name, *column_names, length=8), suffix) max_length = self.connection.ops.max_name_length() or 200 # If everything fits into max_length, use that name. index_name = '%s_%s_%s' % (table_name, '_'.join(column_names), hash_suffix_part) if len(index_name) <= max_length: return index_name # Shorten a long suffix. if len(hash_suffix_part) > max_length / 3: hash_suffix_part = hash_suffix_part[:max_length // 3] other_length = (max_length - len(hash_suffix_part)) // 2 - 1 index_name = '%s_%s_%s' % ( table_name[:other_length], '_'.join(column_names)[:other_length], hash_suffix_part, ) # Prepend D if needed to prevent the name from starting with an # underscore or a number (not permitted on Oracle). if index_name[0] == "_" or index_name[0].isdigit(): index_name = "D%s" % index_name[:-1] return index_name def _get_index_tablespace_sql(self, model, fields, db_tablespace=None): if db_tablespace is None: if len(fields) == 1 and fields[0].db_tablespace: db_tablespace = fields[0].db_tablespace elif model._meta.db_tablespace: db_tablespace = model._meta.db_tablespace if db_tablespace is not None: return ' ' + self.connection.ops.tablespace_sql(db_tablespace) return '' def _index_condition_sql(self, condition): if condition: return ' WHERE ' + condition return '' def _index_include_sql(self, model, columns): if not columns or not self.connection.features.supports_covering_indexes: return '' return Statement( ' INCLUDE (%(columns)s)', columns=Columns(model._meta.db_table, columns, self.quote_name), ) def _create_index_sql(self, model, *, fields=None, name=None, suffix='', using='', db_tablespace=None, col_suffixes=(), sql=None, opclasses=(), condition=None, include=None, expressions=None): """ Return the SQL statement to create the index for one or several fields or expressions. `sql` can be specified if the syntax differs from the standard (GIS indexes, ...). """ fields = fields or [] expressions = expressions or [] compiler = Query(model, alias_cols=False).get_compiler( connection=self.connection, ) tablespace_sql = self._get_index_tablespace_sql(model, fields, db_tablespace=db_tablespace) columns = [field.column for field in fields] sql_create_index = sql or self.sql_create_index table = model._meta.db_table def create_index_name(*args, **kwargs): nonlocal name if name is None: name = self._create_index_name(*args, **kwargs) return self.quote_name(name) return Statement( sql_create_index, table=Table(table, self.quote_name), name=IndexName(table, columns, suffix, create_index_name), using=using, columns=( self._index_columns(table, columns, col_suffixes, opclasses) if columns else Expressions(table, expressions, compiler, self.quote_value) ), extra=tablespace_sql, condition=self._index_condition_sql(condition), include=self._index_include_sql(model, include), ) def _delete_index_sql(self, model, name, sql=None): return Statement( sql or self.sql_delete_index, table=Table(model._meta.db_table, self.quote_name), name=self.quote_name(name), ) def _index_columns(self, table, columns, col_suffixes, opclasses): return Columns(table, columns, self.quote_name, col_suffixes=col_suffixes) def _model_indexes_sql(self, model): """ Return a list of all index SQL statements (field indexes, index_together, Meta.indexes) for the specified model. """ if not model._meta.managed or model._meta.proxy or model._meta.swapped: return [] output = [] for field in model._meta.local_fields: output.extend(self._field_indexes_sql(model, field)) for field_names in model._meta.index_together: fields = [model._meta.get_field(field) for field in field_names] output.append(self._create_index_sql(model, fields=fields, suffix='_idx')) for index in model._meta.indexes: if ( not index.contains_expressions or self.connection.features.supports_expression_indexes ): output.append(index.create_sql(model, self)) return output def _field_indexes_sql(self, model, field): """ Return a list of all index SQL statements for the specified field. """ output = [] if self._field_should_be_indexed(model, field): output.append(self._create_index_sql(model, fields=[field])) return output def _field_should_be_altered(self, old_field, new_field): _, old_path, old_args, old_kwargs = old_field.deconstruct() _, new_path, new_args, new_kwargs = new_field.deconstruct() # Don't alter when: # - changing only a field name # - changing an attribute that doesn't affect the schema # - adding only a db_column and the column name is not changed non_database_attrs = [ 'blank', 'db_column', 'editable', 'error_messages', 'help_text', 'limit_choices_to', # Database-level options are not supported, see #21961. 'on_delete', 'related_name', 'related_query_name', 'validators', 'verbose_name', ] for attr in non_database_attrs: old_kwargs.pop(attr, None) new_kwargs.pop(attr, None) return ( self.quote_name(old_field.column) != self.quote_name(new_field.column) or (old_path, old_args, old_kwargs) != (new_path, new_args, new_kwargs) ) def _field_should_be_indexed(self, model, field): return field.db_index and not field.unique def _field_became_primary_key(self, old_field, new_field): return not old_field.primary_key and new_field.primary_key def _unique_should_be_added(self, old_field, new_field): return (not old_field.unique and new_field.unique) or ( old_field.primary_key and not new_field.primary_key and new_field.unique ) def _rename_field_sql(self, table, old_field, new_field, new_type): return self.sql_rename_column % { "table": self.quote_name(table), "old_column": self.quote_name(old_field.column), "new_column": self.quote_name(new_field.column), "type": new_type, } def _create_fk_sql(self, model, field, suffix): table = Table(model._meta.db_table, self.quote_name) name = self._fk_constraint_name(model, field, suffix) column = Columns(model._meta.db_table, [field.column], self.quote_name) to_table = Table(field.target_field.model._meta.db_table, self.quote_name) to_column = Columns(field.target_field.model._meta.db_table, [field.target_field.column], self.quote_name) deferrable = self.connection.ops.deferrable_sql() return Statement( self.sql_create_fk, table=table, name=name, column=column, to_table=to_table, to_column=to_column, deferrable=deferrable, ) def _fk_constraint_name(self, model, field, suffix): def create_fk_name(*args, **kwargs): return self.quote_name(self._create_index_name(*args, **kwargs)) return ForeignKeyName( model._meta.db_table, [field.column], split_identifier(field.target_field.model._meta.db_table)[1], [field.target_field.column], suffix, create_fk_name, ) def _delete_fk_sql(self, model, name): return self._delete_constraint_sql(self.sql_delete_fk, model, name) def _deferrable_constraint_sql(self, deferrable): if deferrable is None: return '' if deferrable == Deferrable.DEFERRED: return ' DEFERRABLE INITIALLY DEFERRED' if deferrable == Deferrable.IMMEDIATE: return ' DEFERRABLE INITIALLY IMMEDIATE' def _unique_sql( self, model, fields, name, condition=None, deferrable=None, include=None, opclasses=None, expressions=None, ): if ( deferrable and not self.connection.features.supports_deferrable_unique_constraints ): return None if condition or include or opclasses or expressions: # Databases support conditional, covering, and functional unique # constraints via a unique index. sql = self._create_unique_sql( model, fields, name=name, condition=condition, include=include, opclasses=opclasses, expressions=expressions, ) if sql: self.deferred_sql.append(sql) return None constraint = self.sql_unique_constraint % { 'columns': ', '.join(map(self.quote_name, fields)), 'deferrable': self._deferrable_constraint_sql(deferrable), } return self.sql_constraint % { 'name': self.quote_name(name), 'constraint': constraint, } def _create_unique_sql( self, model, columns, name=None, condition=None, deferrable=None, include=None, opclasses=None, expressions=None, ): if ( ( deferrable and not self.connection.features.supports_deferrable_unique_constraints ) or (condition and not self.connection.features.supports_partial_indexes) or (include and not self.connection.features.supports_covering_indexes) or (expressions and not self.connection.features.supports_expression_indexes) ): return None def create_unique_name(*args, **kwargs): return self.quote_name(self._create_index_name(*args, **kwargs)) compiler = Query(model, alias_cols=False).get_compiler(connection=self.connection) table = model._meta.db_table if name is None: name = IndexName(table, columns, '_uniq', create_unique_name) else: name = self.quote_name(name) if condition or include or opclasses or expressions: sql = self.sql_create_unique_index else: sql = self.sql_create_unique if columns: columns = self._index_columns(table, columns, col_suffixes=(), opclasses=opclasses) else: columns = Expressions(table, expressions, compiler, self.quote_value) return Statement( sql, table=Table(table, self.quote_name), name=name, columns=columns, condition=self._index_condition_sql(condition), deferrable=self._deferrable_constraint_sql(deferrable), include=self._index_include_sql(model, include), ) def _delete_unique_sql( self, model, name, condition=None, deferrable=None, include=None, opclasses=None, expressions=None, ): if ( ( deferrable and not self.connection.features.supports_deferrable_unique_constraints ) or (condition and not self.connection.features.supports_partial_indexes) or (include and not self.connection.features.supports_covering_indexes) or (expressions and not self.connection.features.supports_expression_indexes) ): return None if condition or include or opclasses or expressions: sql = self.sql_delete_index else: sql = self.sql_delete_unique return self._delete_constraint_sql(sql, model, name) def _check_sql(self, name, check): return self.sql_constraint % { 'name': self.quote_name(name), 'constraint': self.sql_check_constraint % {'check': check}, } def _create_check_sql(self, model, name, check): return Statement( self.sql_create_check, table=Table(model._meta.db_table, self.quote_name), name=self.quote_name(name), check=check, ) def _delete_check_sql(self, model, name): return self._delete_constraint_sql(self.sql_delete_check, model, name) def _delete_constraint_sql(self, template, model, name): return Statement( template, table=Table(model._meta.db_table, self.quote_name), name=self.quote_name(name), ) def _constraint_names(self, model, column_names=None, unique=None, primary_key=None, index=None, foreign_key=None, check=None, type_=None, exclude=None): """Return all constraint names matching the columns and conditions.""" if column_names is not None: column_names = [ self.connection.introspection.identifier_converter(name) for name in column_names ] with self.connection.cursor() as cursor: constraints = self.connection.introspection.get_constraints(cursor, model._meta.db_table) result = [] for name, infodict in constraints.items(): if column_names is None or column_names == infodict['columns']: if unique is not None and infodict['unique'] != unique: continue if primary_key is not None and infodict['primary_key'] != primary_key: continue if index is not None and infodict['index'] != index: continue if check is not None and infodict['check'] != check: continue if foreign_key is not None and not infodict['foreign_key']: continue if type_ is not None and infodict['type'] != type_: continue if not exclude or name not in exclude: result.append(name) return result def _delete_primary_key(self, model, strict=False): constraint_names = self._constraint_names(model, primary_key=True) if strict and len(constraint_names) != 1: raise ValueError('Found wrong number (%s) of PK constraints for %s' % ( len(constraint_names), model._meta.db_table, )) for constraint_name in constraint_names: self.execute(self._delete_primary_key_sql(model, constraint_name)) def _create_primary_key_sql(self, model, field): return Statement( self.sql_create_pk, table=Table(model._meta.db_table, self.quote_name), name=self.quote_name( self._create_index_name(model._meta.db_table, [field.column], suffix="_pk") ), columns=Columns(model._meta.db_table, [field.column], self.quote_name), ) def _delete_primary_key_sql(self, model, name): return self._delete_constraint_sql(self.sql_delete_pk, model, name) def _collate_sql(self, collation): return ' COLLATE ' + self.quote_name(collation) def remove_procedure(self, procedure_name, param_types=()): sql = self.sql_delete_procedure % { 'procedure': self.quote_name(procedure_name), 'param_types': ','.join(param_types), } self.execute(sql)
3398552b7d196759e9b9f146642952df1dc3cd3eba3faf47358ac4ab0061676d
import operator from django.db.backends.base.features import BaseDatabaseFeatures from django.utils.functional import cached_property class DatabaseFeatures(BaseDatabaseFeatures): empty_fetchmany_value = () allows_group_by_pk = True related_fields_match_type = True # MySQL doesn't support sliced subqueries with IN/ALL/ANY/SOME. allow_sliced_subqueries_with_in = False has_select_for_update = True supports_forward_references = False supports_regex_backreferencing = False supports_date_lookup_using_string = False supports_timezones = False requires_explicit_null_ordering_when_grouping = True can_release_savepoints = True atomic_transactions = False can_clone_databases = True supports_temporal_subtraction = True supports_select_intersection = False supports_select_difference = False supports_slicing_ordering_in_compound = True supports_index_on_text_field = False has_case_insensitive_like = False create_test_procedure_without_params_sql = """ CREATE PROCEDURE test_procedure () BEGIN DECLARE V_I INTEGER; SET V_I = 1; END; """ create_test_procedure_with_int_param_sql = """ CREATE PROCEDURE test_procedure (P_I INTEGER) BEGIN DECLARE V_I INTEGER; SET V_I = P_I; END; """ # Neither MySQL nor MariaDB support partial indexes. supports_partial_indexes = False # COLLATE must be wrapped in parentheses because MySQL treats COLLATE as an # indexed expression. collate_as_index_expression = True supports_order_by_nulls_modifier = False order_by_nulls_first = True test_collations = { 'ci': 'utf8_general_ci', 'non_default': 'utf8_esperanto_ci', 'swedish_ci': 'utf8_swedish_ci', } @cached_property def django_test_skips(self): skips = { "This doesn't work on MySQL.": { 'db_functions.comparison.test_greatest.GreatestTests.test_coalesce_workaround', 'db_functions.comparison.test_least.LeastTests.test_coalesce_workaround', }, 'Running on MySQL requires utf8mb4 encoding (#18392).': { 'model_fields.test_textfield.TextFieldTests.test_emoji', 'model_fields.test_charfield.TestCharField.test_emoji', }, "MySQL doesn't support functional indexes on a function that " "returns JSON": { 'schema.tests.SchemaTests.test_func_index_json_key_transform', }, "MySQL supports multiplying and dividing DurationFields by a " "scalar value but it's not implemented (#25287).": { 'expressions.tests.FTimeDeltaTests.test_durationfield_multiply_divide', }, } if 'ONLY_FULL_GROUP_BY' in self.connection.sql_mode: skips.update({ 'GROUP BY optimization does not work properly when ' 'ONLY_FULL_GROUP_BY mode is enabled on MySQL, see #31331.': { 'aggregation.tests.AggregateTestCase.test_aggregation_subquery_annotation_multivalued', 'annotations.tests.NonAggregateAnnotationTestCase.test_annotation_aggregate_with_m2o', }, }) if ( self.connection.mysql_is_mariadb and (10, 4, 3) < self.connection.mysql_version < (10, 5, 2) ): skips.update({ 'https://jira.mariadb.org/browse/MDEV-19598': { 'schema.tests.SchemaTests.test_alter_not_unique_field_to_primary_key', }, }) if ( self.connection.mysql_is_mariadb and (10, 4, 12) < self.connection.mysql_version < (10, 5) ): skips.update({ 'https://jira.mariadb.org/browse/MDEV-22775': { 'schema.tests.SchemaTests.test_alter_pk_with_self_referential_field', }, }) if not self.supports_explain_analyze: skips.update({ 'MariaDB and MySQL >= 8.0.18 specific.': { 'queries.test_explain.ExplainTests.test_mysql_analyze', }, }) return skips @cached_property def _mysql_storage_engine(self): "Internal method used in Django tests. Don't rely on this from your code" return self.connection.mysql_server_data['default_storage_engine'] @cached_property def allows_auto_pk_0(self): """ Autoincrement primary key can be set to 0 if it doesn't generate new autoincrement values. """ return 'NO_AUTO_VALUE_ON_ZERO' in self.connection.sql_mode @cached_property def update_can_self_select(self): return self.connection.mysql_is_mariadb and self.connection.mysql_version >= (10, 3, 2) @cached_property def can_introspect_foreign_keys(self): "Confirm support for introspected foreign keys" return self._mysql_storage_engine != 'MyISAM' @cached_property def introspected_field_types(self): return { **super().introspected_field_types, 'BinaryField': 'TextField', 'BooleanField': 'IntegerField', 'DurationField': 'BigIntegerField', 'GenericIPAddressField': 'CharField', } @cached_property def can_return_columns_from_insert(self): return self.connection.mysql_is_mariadb and self.connection.mysql_version >= (10, 5, 0) can_return_rows_from_bulk_insert = property(operator.attrgetter('can_return_columns_from_insert')) @cached_property def has_zoneinfo_database(self): return self.connection.mysql_server_data['has_zoneinfo_database'] @cached_property def is_sql_auto_is_null_enabled(self): return self.connection.mysql_server_data['sql_auto_is_null'] @cached_property def supports_over_clause(self): if self.connection.mysql_is_mariadb: return True return self.connection.mysql_version >= (8, 0, 2) supports_frame_range_fixed_distance = property(operator.attrgetter('supports_over_clause')) @cached_property def supports_column_check_constraints(self): if self.connection.mysql_is_mariadb: return self.connection.mysql_version >= (10, 2, 1) return self.connection.mysql_version >= (8, 0, 16) supports_table_check_constraints = property(operator.attrgetter('supports_column_check_constraints')) @cached_property def can_introspect_check_constraints(self): if self.connection.mysql_is_mariadb: version = self.connection.mysql_version return (version >= (10, 2, 22) and version < (10, 3)) or version >= (10, 3, 10) return self.connection.mysql_version >= (8, 0, 16) @cached_property def has_select_for_update_skip_locked(self): return not self.connection.mysql_is_mariadb and self.connection.mysql_version >= (8, 0, 1) @cached_property def has_select_for_update_nowait(self): if self.connection.mysql_is_mariadb: return self.connection.mysql_version >= (10, 3, 0) return self.connection.mysql_version >= (8, 0, 1) @cached_property def has_select_for_update_of(self): return not self.connection.mysql_is_mariadb and self.connection.mysql_version >= (8, 0, 1) @cached_property def supports_explain_analyze(self): return self.connection.mysql_is_mariadb or self.connection.mysql_version >= (8, 0, 18) @cached_property def supported_explain_formats(self): # Alias MySQL's TRADITIONAL to TEXT for consistency with other # backends. formats = {'JSON', 'TEXT', 'TRADITIONAL'} if not self.connection.mysql_is_mariadb and self.connection.mysql_version >= (8, 0, 16): formats.add('TREE') return formats @cached_property def supports_transactions(self): """ All storage engines except MyISAM support transactions. """ return self._mysql_storage_engine != 'MyISAM' @cached_property def ignores_table_name_case(self): return self.connection.mysql_server_data['lower_case_table_names'] @cached_property def supports_default_in_lead_lag(self): # To be added in https://jira.mariadb.org/browse/MDEV-12981. return not self.connection.mysql_is_mariadb @cached_property def supports_json_field(self): if self.connection.mysql_is_mariadb: return self.connection.mysql_version >= (10, 2, 7) return self.connection.mysql_version >= (5, 7, 8) @cached_property def can_introspect_json_field(self): if self.connection.mysql_is_mariadb: return self.supports_json_field and self.can_introspect_check_constraints return self.supports_json_field @cached_property def supports_index_column_ordering(self): return ( not self.connection.mysql_is_mariadb and self.connection.mysql_version >= (8, 0, 1) ) @cached_property def supports_expression_indexes(self): return ( not self.connection.mysql_is_mariadb and self.connection.mysql_version >= (8, 0, 13) )
68f96395b0d59b7b2db53e696379616339864c64336c09f492b81e8a6243dea0
import uuid from django.conf import settings from django.db.backends.base.operations import BaseDatabaseOperations from django.utils import timezone from django.utils.encoding import force_str class DatabaseOperations(BaseDatabaseOperations): compiler_module = "django.db.backends.mysql.compiler" # MySQL stores positive fields as UNSIGNED ints. integer_field_ranges = { **BaseDatabaseOperations.integer_field_ranges, 'PositiveSmallIntegerField': (0, 65535), 'PositiveIntegerField': (0, 4294967295), 'PositiveBigIntegerField': (0, 18446744073709551615), } cast_data_types = { 'AutoField': 'signed integer', 'BigAutoField': 'signed integer', 'SmallAutoField': 'signed integer', 'CharField': 'char(%(max_length)s)', 'DecimalField': 'decimal(%(max_digits)s, %(decimal_places)s)', 'TextField': 'char', 'IntegerField': 'signed integer', 'BigIntegerField': 'signed integer', 'SmallIntegerField': 'signed integer', 'PositiveBigIntegerField': 'unsigned integer', 'PositiveIntegerField': 'unsigned integer', 'PositiveSmallIntegerField': 'unsigned integer', 'DurationField': 'signed integer', } cast_char_field_without_max_length = 'char' explain_prefix = 'EXPLAIN' def date_extract_sql(self, lookup_type, field_name): # https://dev.mysql.com/doc/mysql/en/date-and-time-functions.html if lookup_type == 'week_day': # DAYOFWEEK() returns an integer, 1-7, Sunday=1. return "DAYOFWEEK(%s)" % field_name elif lookup_type == 'iso_week_day': # WEEKDAY() returns an integer, 0-6, Monday=0. return "WEEKDAY(%s) + 1" % field_name elif lookup_type == 'week': # Override the value of default_week_format for consistency with # other database backends. # Mode 3: Monday, 1-53, with 4 or more days this year. return "WEEK(%s, 3)" % field_name elif lookup_type == 'iso_year': # Get the year part from the YEARWEEK function, which returns a # number as year * 100 + week. return "TRUNCATE(YEARWEEK(%s, 3), -2) / 100" % field_name else: # EXTRACT returns 1-53 based on ISO-8601 for the week number. return "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name) def date_trunc_sql(self, lookup_type, field_name, tzname=None): field_name = self._convert_field_to_tz(field_name, tzname) fields = { 'year': '%%Y-01-01', 'month': '%%Y-%%m-01', } # Use double percents to escape. if lookup_type in fields: format_str = fields[lookup_type] return "CAST(DATE_FORMAT(%s, '%s') AS DATE)" % (field_name, format_str) elif lookup_type == 'quarter': return "MAKEDATE(YEAR(%s), 1) + INTERVAL QUARTER(%s) QUARTER - INTERVAL 1 QUARTER" % ( field_name, field_name ) elif lookup_type == 'week': return "DATE_SUB(%s, INTERVAL WEEKDAY(%s) DAY)" % ( field_name, field_name ) else: return "DATE(%s)" % (field_name) def _prepare_tzname_delta(self, tzname): if '+' in tzname: return tzname[tzname.find('+'):] elif '-' in tzname: return tzname[tzname.find('-'):] return tzname def _convert_field_to_tz(self, field_name, tzname): if tzname and settings.USE_TZ and self.connection.timezone_name != tzname: field_name = "CONVERT_TZ(%s, '%s', '%s')" % ( field_name, self.connection.timezone_name, self._prepare_tzname_delta(tzname), ) return field_name def datetime_cast_date_sql(self, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) return "DATE(%s)" % field_name def datetime_cast_time_sql(self, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) return "TIME(%s)" % field_name def datetime_extract_sql(self, lookup_type, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) return self.date_extract_sql(lookup_type, field_name) def datetime_trunc_sql(self, lookup_type, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) fields = ['year', 'month', 'day', 'hour', 'minute', 'second'] format = ('%%Y-', '%%m', '-%%d', ' %%H:', '%%i', ':%%s') # Use double percents to escape. format_def = ('0000-', '01', '-01', ' 00:', '00', ':00') if lookup_type == 'quarter': return ( "CAST(DATE_FORMAT(MAKEDATE(YEAR({field_name}), 1) + " "INTERVAL QUARTER({field_name}) QUARTER - " + "INTERVAL 1 QUARTER, '%%Y-%%m-01 00:00:00') AS DATETIME)" ).format(field_name=field_name) if lookup_type == 'week': return ( "CAST(DATE_FORMAT(DATE_SUB({field_name}, " "INTERVAL WEEKDAY({field_name}) DAY), " "'%%Y-%%m-%%d 00:00:00') AS DATETIME)" ).format(field_name=field_name) try: i = fields.index(lookup_type) + 1 except ValueError: sql = field_name else: format_str = ''.join(format[:i] + format_def[i:]) sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str) return sql def time_trunc_sql(self, lookup_type, field_name, tzname=None): field_name = self._convert_field_to_tz(field_name, tzname) fields = { 'hour': '%%H:00:00', 'minute': '%%H:%%i:00', 'second': '%%H:%%i:%%s', } # Use double percents to escape. if lookup_type in fields: format_str = fields[lookup_type] return "CAST(DATE_FORMAT(%s, '%s') AS TIME)" % (field_name, format_str) else: return "TIME(%s)" % (field_name) def fetch_returned_insert_rows(self, cursor): """ Given a cursor object that has just performed an INSERT...RETURNING statement into a table, return the tuple of returned data. """ return cursor.fetchall() def format_for_duration_arithmetic(self, sql): return 'INTERVAL %s MICROSECOND' % sql def force_no_ordering(self): """ "ORDER BY NULL" prevents MySQL from implicitly ordering by grouped columns. If no ordering would otherwise be applied, we don't want any implicit sorting going on. """ return [(None, ("NULL", [], False))] def last_executed_query(self, cursor, sql, params): # With MySQLdb, cursor objects have an (undocumented) "_executed" # attribute where the exact query sent to the database is saved. # See MySQLdb/cursors.py in the source distribution. # MySQLdb returns string, PyMySQL bytes. return force_str(getattr(cursor, '_executed', None), errors='replace') def no_limit_value(self): # 2**64 - 1, as recommended by the MySQL documentation return 18446744073709551615 def quote_name(self, name): if name.startswith("`") and name.endswith("`"): return name # Quoting once is enough. return "`%s`" % name def return_insert_columns(self, fields): # MySQL and MariaDB < 10.5.0 don't support an INSERT...RETURNING # statement. if not fields: return '', () columns = [ '%s.%s' % ( self.quote_name(field.model._meta.db_table), self.quote_name(field.column), ) for field in fields ] return 'RETURNING %s' % ', '.join(columns), () def sql_flush(self, style, tables, *, reset_sequences=False, allow_cascade=False): if not tables: return [] sql = ['SET FOREIGN_KEY_CHECKS = 0;'] if reset_sequences: # It's faster to TRUNCATE tables that require a sequence reset # since ALTER TABLE AUTO_INCREMENT is slower than TRUNCATE. sql.extend( '%s %s;' % ( style.SQL_KEYWORD('TRUNCATE'), style.SQL_FIELD(self.quote_name(table_name)), ) for table_name in tables ) else: # Otherwise issue a simple DELETE since it's faster than TRUNCATE # and preserves sequences. sql.extend( '%s %s %s;' % ( style.SQL_KEYWORD('DELETE'), style.SQL_KEYWORD('FROM'), style.SQL_FIELD(self.quote_name(table_name)), ) for table_name in tables ) sql.append('SET FOREIGN_KEY_CHECKS = 1;') return sql def sequence_reset_by_name_sql(self, style, sequences): return [ '%s %s %s %s = 1;' % ( style.SQL_KEYWORD('ALTER'), style.SQL_KEYWORD('TABLE'), style.SQL_FIELD(self.quote_name(sequence_info['table'])), style.SQL_FIELD('AUTO_INCREMENT'), ) for sequence_info in sequences ] def validate_autopk_value(self, value): # Zero in AUTO_INCREMENT field does not work without the # NO_AUTO_VALUE_ON_ZERO SQL mode. if value == 0 and not self.connection.features.allows_auto_pk_0: raise ValueError('The database backend does not accept 0 as a ' 'value for AutoField.') return value def adapt_datetimefield_value(self, value): if value is None: return None # Expression values are adapted by the database. if hasattr(value, 'resolve_expression'): return value # MySQL doesn't support tz-aware datetimes if timezone.is_aware(value): if settings.USE_TZ: value = timezone.make_naive(value, self.connection.timezone) else: raise ValueError("MySQL backend does not support timezone-aware datetimes when USE_TZ is False.") return str(value) def adapt_timefield_value(self, value): if value is None: return None # Expression values are adapted by the database. if hasattr(value, 'resolve_expression'): return value # MySQL doesn't support tz-aware times if timezone.is_aware(value): raise ValueError("MySQL backend does not support timezone-aware times.") return value.isoformat(timespec='microseconds') def max_name_length(self): return 64 def pk_default_value(self): return 'NULL' def bulk_insert_sql(self, fields, placeholder_rows): placeholder_rows_sql = (", ".join(row) for row in placeholder_rows) values_sql = ", ".join("(%s)" % sql for sql in placeholder_rows_sql) return "VALUES " + values_sql def combine_expression(self, connector, sub_expressions): if connector == '^': return 'POW(%s)' % ','.join(sub_expressions) # Convert the result to a signed integer since MySQL's binary operators # return an unsigned integer. elif connector in ('&', '|', '<<', '#'): connector = '^' if connector == '#' else connector return 'CONVERT(%s, SIGNED)' % connector.join(sub_expressions) elif connector == '>>': lhs, rhs = sub_expressions return 'FLOOR(%(lhs)s / POW(2, %(rhs)s))' % {'lhs': lhs, 'rhs': rhs} return super().combine_expression(connector, sub_expressions) def get_db_converters(self, expression): converters = super().get_db_converters(expression) internal_type = expression.output_field.get_internal_type() if internal_type == 'BooleanField': converters.append(self.convert_booleanfield_value) elif internal_type == 'DateTimeField': if settings.USE_TZ: converters.append(self.convert_datetimefield_value) elif internal_type == 'UUIDField': converters.append(self.convert_uuidfield_value) return converters def convert_booleanfield_value(self, value, expression, connection): if value in (0, 1): value = bool(value) return value def convert_datetimefield_value(self, value, expression, connection): if value is not None: value = timezone.make_aware(value, self.connection.timezone) return value def convert_uuidfield_value(self, value, expression, connection): if value is not None: value = uuid.UUID(value) return value def binary_placeholder_sql(self, value): return '_binary %s' if value is not None and not hasattr(value, 'as_sql') else '%s' def subtract_temporals(self, internal_type, lhs, rhs): lhs_sql, lhs_params = lhs rhs_sql, rhs_params = rhs if internal_type == 'TimeField': if self.connection.mysql_is_mariadb: # MariaDB includes the microsecond component in TIME_TO_SEC as # a decimal. MySQL returns an integer without microseconds. return 'CAST((TIME_TO_SEC(%(lhs)s) - TIME_TO_SEC(%(rhs)s)) * 1000000 AS SIGNED)' % { 'lhs': lhs_sql, 'rhs': rhs_sql }, (*lhs_params, *rhs_params) return ( "((TIME_TO_SEC(%(lhs)s) * 1000000 + MICROSECOND(%(lhs)s)) -" " (TIME_TO_SEC(%(rhs)s) * 1000000 + MICROSECOND(%(rhs)s)))" ) % {'lhs': lhs_sql, 'rhs': rhs_sql}, tuple(lhs_params) * 2 + tuple(rhs_params) * 2 params = (*rhs_params, *lhs_params) return "TIMESTAMPDIFF(MICROSECOND, %s, %s)" % (rhs_sql, lhs_sql), params def explain_query_prefix(self, format=None, **options): # Alias MySQL's TRADITIONAL to TEXT for consistency with other backends. if format and format.upper() == 'TEXT': format = 'TRADITIONAL' elif not format and 'TREE' in self.connection.features.supported_explain_formats: # Use TREE by default (if supported) as it's more informative. format = 'TREE' analyze = options.pop('analyze', False) prefix = super().explain_query_prefix(format, **options) if analyze and self.connection.features.supports_explain_analyze: # MariaDB uses ANALYZE instead of EXPLAIN ANALYZE. prefix = 'ANALYZE' if self.connection.mysql_is_mariadb else prefix + ' ANALYZE' if format and not (analyze and not self.connection.mysql_is_mariadb): # Only MariaDB supports the analyze option with formats. prefix += ' FORMAT=%s' % format return prefix def regex_lookup(self, lookup_type): # REGEXP BINARY doesn't work correctly in MySQL 8+ and REGEXP_LIKE # doesn't exist in MySQL 5.x or in MariaDB. if self.connection.mysql_version < (8, 0, 0) or self.connection.mysql_is_mariadb: if lookup_type == 'regex': return '%s REGEXP BINARY %s' return '%s REGEXP %s' match_option = 'c' if lookup_type == 'regex' else 'i' return "REGEXP_LIKE(%%s, %%s, '%s')" % match_option def insert_statement(self, ignore_conflicts=False): return 'INSERT IGNORE INTO' if ignore_conflicts else super().insert_statement(ignore_conflicts) def lookup_cast(self, lookup_type, internal_type=None): lookup = '%s' if internal_type == 'JSONField': if self.connection.mysql_is_mariadb or lookup_type in ( 'iexact', 'contains', 'icontains', 'startswith', 'istartswith', 'endswith', 'iendswith', 'regex', 'iregex', ): lookup = 'JSON_UNQUOTE(%s)' return lookup
391804f8cc6ab991d5cfe823935b48799c20abeba08fd35b152b0e9bc2325867
import copy from decimal import Decimal from django.apps.registry import Apps from django.db import NotSupportedError from django.db.backends.base.schema import BaseDatabaseSchemaEditor from django.db.backends.ddl_references import Statement from django.db.backends.utils import strip_quotes from django.db.models import UniqueConstraint from django.db.transaction import atomic class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): sql_delete_table = "DROP TABLE %(table)s" sql_create_fk = None sql_create_inline_fk = "REFERENCES %(to_table)s (%(to_column)s) DEFERRABLE INITIALLY DEFERRED" sql_create_unique = "CREATE UNIQUE INDEX %(name)s ON %(table)s (%(columns)s)" sql_delete_unique = "DROP INDEX %(name)s" def __enter__(self): # Some SQLite schema alterations need foreign key constraints to be # disabled. Enforce it here for the duration of the schema edition. if not self.connection.disable_constraint_checking(): raise NotSupportedError( 'SQLite schema editor cannot be used while foreign key ' 'constraint checks are enabled. Make sure to disable them ' 'before entering a transaction.atomic() context because ' 'SQLite does not support disabling them in the middle of ' 'a multi-statement transaction.' ) return super().__enter__() def __exit__(self, exc_type, exc_value, traceback): self.connection.check_constraints() super().__exit__(exc_type, exc_value, traceback) self.connection.enable_constraint_checking() def quote_value(self, value): # The backend "mostly works" without this function and there are use # cases for compiling Python without the sqlite3 libraries (e.g. # security hardening). try: import sqlite3 value = sqlite3.adapt(value) except ImportError: pass except sqlite3.ProgrammingError: pass # Manual emulation of SQLite parameter quoting if isinstance(value, bool): return str(int(value)) elif isinstance(value, (Decimal, float, int)): return str(value) elif isinstance(value, str): return "'%s'" % value.replace("\'", "\'\'") elif value is None: return "NULL" elif isinstance(value, (bytes, bytearray, memoryview)): # Bytes are only allowed for BLOB fields, encoded as string # literals containing hexadecimal data and preceded by a single "X" # character. return "X'%s'" % value.hex() else: raise ValueError("Cannot quote parameter value %r of type %s" % (value, type(value))) def _is_referenced_by_fk_constraint(self, table_name, column_name=None, ignore_self=False): """ Return whether or not the provided table name is referenced by another one. If `column_name` is specified, only references pointing to that column are considered. If `ignore_self` is True, self-referential constraints are ignored. """ with self.connection.cursor() as cursor: for other_table in self.connection.introspection.get_table_list(cursor): if ignore_self and other_table.name == table_name: continue constraints = self.connection.introspection._get_foreign_key_constraints(cursor, other_table.name) for constraint in constraints.values(): constraint_table, constraint_column = constraint['foreign_key'] if (constraint_table == table_name and (column_name is None or constraint_column == column_name)): return True return False def alter_db_table(self, model, old_db_table, new_db_table, disable_constraints=True): if (not self.connection.features.supports_atomic_references_rename and disable_constraints and self._is_referenced_by_fk_constraint(old_db_table)): if self.connection.in_atomic_block: raise NotSupportedError(( 'Renaming the %r table while in a transaction is not ' 'supported on SQLite < 3.26 because it would break referential ' 'integrity. Try adding `atomic = False` to the Migration class.' ) % old_db_table) self.connection.enable_constraint_checking() super().alter_db_table(model, old_db_table, new_db_table) self.connection.disable_constraint_checking() else: super().alter_db_table(model, old_db_table, new_db_table) def alter_field(self, model, old_field, new_field, strict=False): if not self._field_should_be_altered(old_field, new_field): return old_field_name = old_field.name table_name = model._meta.db_table _, old_column_name = old_field.get_attname_column() if (new_field.name != old_field_name and not self.connection.features.supports_atomic_references_rename and self._is_referenced_by_fk_constraint(table_name, old_column_name, ignore_self=True)): if self.connection.in_atomic_block: raise NotSupportedError(( 'Renaming the %r.%r column while in a transaction is not ' 'supported on SQLite < 3.26 because it would break referential ' 'integrity. Try adding `atomic = False` to the Migration class.' ) % (model._meta.db_table, old_field_name)) with atomic(self.connection.alias): super().alter_field(model, old_field, new_field, strict=strict) # Follow SQLite's documented procedure for performing changes # that don't affect the on-disk content. # https://sqlite.org/lang_altertable.html#otheralter with self.connection.cursor() as cursor: schema_version = cursor.execute('PRAGMA schema_version').fetchone()[0] cursor.execute('PRAGMA writable_schema = 1') references_template = ' REFERENCES "%s" ("%%s") ' % table_name new_column_name = new_field.get_attname_column()[1] search = references_template % old_column_name replacement = references_template % new_column_name cursor.execute('UPDATE sqlite_master SET sql = replace(sql, %s, %s)', (search, replacement)) cursor.execute('PRAGMA schema_version = %d' % (schema_version + 1)) cursor.execute('PRAGMA writable_schema = 0') # The integrity check will raise an exception and rollback # the transaction if the sqlite_master updates corrupt the # database. cursor.execute('PRAGMA integrity_check') # Perform a VACUUM to refresh the database representation from # the sqlite_master table. with self.connection.cursor() as cursor: cursor.execute('VACUUM') else: super().alter_field(model, old_field, new_field, strict=strict) def _remake_table(self, model, create_field=None, delete_field=None, alter_field=None): """ Shortcut to transform a model from old_model into new_model This follows the correct procedure to perform non-rename or column addition operations based on SQLite's documentation https://www.sqlite.org/lang_altertable.html#caution The essential steps are: 1. Create a table with the updated definition called "new__app_model" 2. Copy the data from the existing "app_model" table to the new table 3. Drop the "app_model" table 4. Rename the "new__app_model" table to "app_model" 5. Restore any index of the previous "app_model" table. """ # Self-referential fields must be recreated rather than copied from # the old model to ensure their remote_field.field_name doesn't refer # to an altered field. def is_self_referential(f): return f.is_relation and f.remote_field.model is model # Work out the new fields dict / mapping body = { f.name: f.clone() if is_self_referential(f) else f for f in model._meta.local_concrete_fields } # Since mapping might mix column names and default values, # its values must be already quoted. mapping = {f.column: self.quote_name(f.column) for f in model._meta.local_concrete_fields} # This maps field names (not columns) for things like unique_together rename_mapping = {} # If any of the new or altered fields is introducing a new PK, # remove the old one restore_pk_field = None if getattr(create_field, 'primary_key', False) or ( alter_field and getattr(alter_field[1], 'primary_key', False)): for name, field in list(body.items()): if field.primary_key: field.primary_key = False restore_pk_field = field if field.auto_created: del body[name] del mapping[field.column] # Add in any created fields if create_field: body[create_field.name] = create_field # Choose a default and insert it into the copy map if not create_field.many_to_many and create_field.concrete: mapping[create_field.column] = self.quote_value( self.effective_default(create_field) ) # Add in any altered fields if alter_field: old_field, new_field = alter_field body.pop(old_field.name, None) mapping.pop(old_field.column, None) body[new_field.name] = new_field if old_field.null and not new_field.null: case_sql = "coalesce(%(col)s, %(default)s)" % { 'col': self.quote_name(old_field.column), 'default': self.quote_value(self.effective_default(new_field)) } mapping[new_field.column] = case_sql else: mapping[new_field.column] = self.quote_name(old_field.column) rename_mapping[old_field.name] = new_field.name # Remove any deleted fields if delete_field: del body[delete_field.name] del mapping[delete_field.column] # Remove any implicit M2M tables if delete_field.many_to_many and delete_field.remote_field.through._meta.auto_created: return self.delete_model(delete_field.remote_field.through) # Work inside a new app registry apps = Apps() # Work out the new value of unique_together, taking renames into # account unique_together = [ [rename_mapping.get(n, n) for n in unique] for unique in model._meta.unique_together ] # Work out the new value for index_together, taking renames into # account index_together = [ [rename_mapping.get(n, n) for n in index] for index in model._meta.index_together ] indexes = model._meta.indexes if delete_field: indexes = [ index for index in indexes if delete_field.name not in index.fields ] constraints = list(model._meta.constraints) # Provide isolated instances of the fields to the new model body so # that the existing model's internals aren't interfered with when # the dummy model is constructed. body_copy = copy.deepcopy(body) # Construct a new model with the new fields to allow self referential # primary key to resolve to. This model won't ever be materialized as a # table and solely exists for foreign key reference resolution purposes. # This wouldn't be required if the schema editor was operating on model # states instead of rendered models. meta_contents = { 'app_label': model._meta.app_label, 'db_table': model._meta.db_table, 'unique_together': unique_together, 'index_together': index_together, 'indexes': indexes, 'constraints': constraints, 'apps': apps, } meta = type("Meta", (), meta_contents) body_copy['Meta'] = meta body_copy['__module__'] = model.__module__ type(model._meta.object_name, model.__bases__, body_copy) # Construct a model with a renamed table name. body_copy = copy.deepcopy(body) meta_contents = { 'app_label': model._meta.app_label, 'db_table': 'new__%s' % strip_quotes(model._meta.db_table), 'unique_together': unique_together, 'index_together': index_together, 'indexes': indexes, 'constraints': constraints, 'apps': apps, } meta = type("Meta", (), meta_contents) body_copy['Meta'] = meta body_copy['__module__'] = model.__module__ new_model = type('New%s' % model._meta.object_name, model.__bases__, body_copy) # Create a new table with the updated schema. self.create_model(new_model) # Copy data from the old table into the new table self.execute("INSERT INTO %s (%s) SELECT %s FROM %s" % ( self.quote_name(new_model._meta.db_table), ', '.join(self.quote_name(x) for x in mapping), ', '.join(mapping.values()), self.quote_name(model._meta.db_table), )) # Delete the old table to make way for the new self.delete_model(model, handle_autom2m=False) # Rename the new table to take way for the old self.alter_db_table( new_model, new_model._meta.db_table, model._meta.db_table, disable_constraints=False, ) # Run deferred SQL on correct table for sql in self.deferred_sql: self.execute(sql) self.deferred_sql = [] # Fix any PK-removed field if restore_pk_field: restore_pk_field.primary_key = True def delete_model(self, model, handle_autom2m=True): if handle_autom2m: super().delete_model(model) else: # Delete the table (and only that) self.execute(self.sql_delete_table % { "table": self.quote_name(model._meta.db_table), }) # Remove all deferred statements referencing the deleted table. for sql in list(self.deferred_sql): if isinstance(sql, Statement) and sql.references_table(model._meta.db_table): self.deferred_sql.remove(sql) def add_field(self, model, field): """ Create a field on a model. Usually involves adding a column, but may involve adding a table instead (for M2M fields). """ # Special-case implicit M2M tables if field.many_to_many and field.remote_field.through._meta.auto_created: return self.create_model(field.remote_field.through) self._remake_table(model, create_field=field) def remove_field(self, model, field): """ Remove a field from a model. Usually involves deleting a column, but for M2Ms may involve deleting a table. """ # M2M fields are a special case if field.many_to_many: # For implicit M2M tables, delete the auto-created table if field.remote_field.through._meta.auto_created: self.delete_model(field.remote_field.through) # For explicit "through" M2M fields, do nothing # For everything else, remake. else: # It might not actually have a column behind it if field.db_parameters(connection=self.connection)['type'] is None: return self._remake_table(model, delete_field=field) def _alter_field(self, model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict=False): """Perform a "physical" (non-ManyToMany) field update.""" # Use "ALTER TABLE ... RENAME COLUMN" if only the column name # changed and there aren't any constraints. if (self.connection.features.can_alter_table_rename_column and old_field.column != new_field.column and self.column_sql(model, old_field) == self.column_sql(model, new_field) and not (old_field.remote_field and old_field.db_constraint or new_field.remote_field and new_field.db_constraint)): return self.execute(self._rename_field_sql(model._meta.db_table, old_field, new_field, new_type)) # Alter by remaking table self._remake_table(model, alter_field=(old_field, new_field)) # Rebuild tables with FKs pointing to this field. if new_field.unique and old_type != new_type: related_models = set() opts = new_field.model._meta for remote_field in opts.related_objects: # Ignore self-relationship since the table was already rebuilt. if remote_field.related_model == model: continue if not remote_field.many_to_many: if remote_field.field_name == new_field.name: related_models.add(remote_field.related_model) elif new_field.primary_key and remote_field.through._meta.auto_created: related_models.add(remote_field.through) if new_field.primary_key: for many_to_many in opts.many_to_many: # Ignore self-relationship since the table was already rebuilt. if many_to_many.related_model == model: continue if many_to_many.remote_field.through._meta.auto_created: related_models.add(many_to_many.remote_field.through) for related_model in related_models: self._remake_table(related_model) def _alter_many_to_many(self, model, old_field, new_field, strict): """Alter M2Ms to repoint their to= endpoints.""" if old_field.remote_field.through._meta.db_table == new_field.remote_field.through._meta.db_table: # The field name didn't change, but some options did; we have to propagate this altering. self._remake_table( old_field.remote_field.through, alter_field=( # We need the field that points to the target model, so we can tell alter_field to change it - # this is m2m_reverse_field_name() (as opposed to m2m_field_name, which points to our model) old_field.remote_field.through._meta.get_field(old_field.m2m_reverse_field_name()), new_field.remote_field.through._meta.get_field(new_field.m2m_reverse_field_name()), ), ) return # Make a new through table self.create_model(new_field.remote_field.through) # Copy the data across self.execute("INSERT INTO %s (%s) SELECT %s FROM %s" % ( self.quote_name(new_field.remote_field.through._meta.db_table), ', '.join([ "id", new_field.m2m_column_name(), new_field.m2m_reverse_name(), ]), ', '.join([ "id", old_field.m2m_column_name(), old_field.m2m_reverse_name(), ]), self.quote_name(old_field.remote_field.through._meta.db_table), )) # Delete the old through table self.delete_model(old_field.remote_field.through) def add_constraint(self, model, constraint): if isinstance(constraint, UniqueConstraint) and ( constraint.condition or constraint.contains_expressions or constraint.include or constraint.deferrable ): super().add_constraint(model, constraint) else: self._remake_table(model) def remove_constraint(self, model, constraint): if isinstance(constraint, UniqueConstraint) and ( constraint.condition or constraint.contains_expressions or constraint.include or constraint.deferrable ): super().remove_constraint(model, constraint) else: self._remake_table(model) def _collate_sql(self, collation): return ' COLLATE ' + collation
ce3fa7dde189a5b12aedf9eb5cca0c7f913542444cf513d70391465bf196d58e
import os import pathlib from datetime import datetime from urllib.parse import urljoin from django.conf import settings from django.core.exceptions import SuspiciousFileOperation from django.core.files import File, locks from django.core.files.move import file_move_safe from django.core.files.utils import validate_file_name from django.core.signals import setting_changed from django.utils import timezone from django.utils._os import safe_join from django.utils.crypto import get_random_string from django.utils.deconstruct import deconstructible from django.utils.encoding import filepath_to_uri from django.utils.functional import LazyObject, cached_property from django.utils.module_loading import import_string from django.utils.text import get_valid_filename __all__ = ( 'Storage', 'FileSystemStorage', 'DefaultStorage', 'default_storage', 'get_storage_class', ) class Storage: """ A base storage class, providing some default behaviors that all other storage systems can inherit or override, as necessary. """ # The following methods represent a public interface to private methods. # These shouldn't be overridden by subclasses unless absolutely necessary. def open(self, name, mode='rb'): """Retrieve the specified file from storage.""" return self._open(name, mode) def save(self, name, content, max_length=None): """ Save new content to the file specified by name. The content should be a proper File object or any Python file-like object, ready to be read from the beginning. """ # Get the proper name for the file, as it will actually be saved. if name is None: name = content.name if not hasattr(content, 'chunks'): content = File(content, name) name = self.get_available_name(name, max_length=max_length) return self._save(name, content) # These methods are part of the public API, with default implementations. def get_valid_name(self, name): """ Return a filename, based on the provided filename, that's suitable for use in the target storage system. """ return get_valid_filename(name) def get_alternative_name(self, file_root, file_ext): """ Return an alternative filename, by adding an underscore and a random 7 character alphanumeric string (before the file extension, if one exists) to the filename. """ return '%s_%s%s' % (file_root, get_random_string(7), file_ext) def get_available_name(self, name, max_length=None): """ Return a filename that's free on the target storage system and available for new content to be written to. """ dir_name, file_name = os.path.split(name) if '..' in pathlib.PurePath(dir_name).parts: raise SuspiciousFileOperation("Detected path traversal attempt in '%s'" % dir_name) validate_file_name(file_name) file_root, file_ext = os.path.splitext(file_name) # If the filename already exists, generate an alternative filename # until it doesn't exist. # Truncate original name if required, so the new filename does not # exceed the max_length. while self.exists(name) or (max_length and len(name) > max_length): # file_ext includes the dot. name = os.path.join(dir_name, self.get_alternative_name(file_root, file_ext)) if max_length is None: continue # Truncate file_root if max_length exceeded. truncation = len(name) - max_length if truncation > 0: file_root = file_root[:-truncation] # Entire file_root was truncated in attempt to find an available filename. if not file_root: raise SuspiciousFileOperation( 'Storage can not find an available filename for "%s". ' 'Please make sure that the corresponding file field ' 'allows sufficient "max_length".' % name ) name = os.path.join(dir_name, self.get_alternative_name(file_root, file_ext)) return name def generate_filename(self, filename): """ Validate the filename by calling get_valid_name() and return a filename to be passed to the save() method. """ # `filename` may include a path as returned by FileField.upload_to. dirname, filename = os.path.split(filename) if '..' in pathlib.PurePath(dirname).parts: raise SuspiciousFileOperation("Detected path traversal attempt in '%s'" % dirname) return os.path.normpath(os.path.join(dirname, self.get_valid_name(filename))) def path(self, name): """ Return a local filesystem path where the file can be retrieved using Python's built-in open() function. Storage systems that can't be accessed using open() should *not* implement this method. """ raise NotImplementedError("This backend doesn't support absolute paths.") # The following methods form the public API for storage systems, but with # no default implementations. Subclasses must implement *all* of these. def delete(self, name): """ Delete the specified file from the storage system. """ raise NotImplementedError('subclasses of Storage must provide a delete() method') def exists(self, name): """ Return True if a file referenced by the given name already exists in the storage system, or False if the name is available for a new file. """ raise NotImplementedError('subclasses of Storage must provide an exists() method') def listdir(self, path): """ List the contents of the specified path. Return a 2-tuple of lists: the first item being directories, the second item being files. """ raise NotImplementedError('subclasses of Storage must provide a listdir() method') def size(self, name): """ Return the total size, in bytes, of the file specified by name. """ raise NotImplementedError('subclasses of Storage must provide a size() method') def url(self, name): """ Return an absolute URL where the file's contents can be accessed directly by a Web browser. """ raise NotImplementedError('subclasses of Storage must provide a url() method') def get_accessed_time(self, name): """ Return the last accessed time (as a datetime) of the file specified by name. The datetime will be timezone-aware if USE_TZ=True. """ raise NotImplementedError('subclasses of Storage must provide a get_accessed_time() method') def get_created_time(self, name): """ Return the creation time (as a datetime) of the file specified by name. The datetime will be timezone-aware if USE_TZ=True. """ raise NotImplementedError('subclasses of Storage must provide a get_created_time() method') def get_modified_time(self, name): """ Return the last modified time (as a datetime) of the file specified by name. The datetime will be timezone-aware if USE_TZ=True. """ raise NotImplementedError('subclasses of Storage must provide a get_modified_time() method') @deconstructible class FileSystemStorage(Storage): """ Standard filesystem storage """ # The combination of O_CREAT and O_EXCL makes os.open() raise OSError if # the file already exists before it's opened. OS_OPEN_FLAGS = os.O_WRONLY | os.O_CREAT | os.O_EXCL | getattr(os, 'O_BINARY', 0) def __init__(self, location=None, base_url=None, file_permissions_mode=None, directory_permissions_mode=None): self._location = location self._base_url = base_url self._file_permissions_mode = file_permissions_mode self._directory_permissions_mode = directory_permissions_mode setting_changed.connect(self._clear_cached_properties) def _clear_cached_properties(self, setting, **kwargs): """Reset setting based property values.""" if setting == 'MEDIA_ROOT': self.__dict__.pop('base_location', None) self.__dict__.pop('location', None) elif setting == 'MEDIA_URL': self.__dict__.pop('base_url', None) elif setting == 'FILE_UPLOAD_PERMISSIONS': self.__dict__.pop('file_permissions_mode', None) elif setting == 'FILE_UPLOAD_DIRECTORY_PERMISSIONS': self.__dict__.pop('directory_permissions_mode', None) def _value_or_setting(self, value, setting): return setting if value is None else value @cached_property def base_location(self): return self._value_or_setting(self._location, settings.MEDIA_ROOT) @cached_property def location(self): return os.path.abspath(self.base_location) @cached_property def base_url(self): if self._base_url is not None and not self._base_url.endswith('/'): self._base_url += '/' return self._value_or_setting(self._base_url, settings.MEDIA_URL) @cached_property def file_permissions_mode(self): return self._value_or_setting(self._file_permissions_mode, settings.FILE_UPLOAD_PERMISSIONS) @cached_property def directory_permissions_mode(self): return self._value_or_setting(self._directory_permissions_mode, settings.FILE_UPLOAD_DIRECTORY_PERMISSIONS) def _open(self, name, mode='rb'): return File(open(self.path(name), mode)) def _save(self, name, content): full_path = self.path(name) # Create any intermediate directories that do not exist. directory = os.path.dirname(full_path) try: if self.directory_permissions_mode is not None: # Set the umask because os.makedirs() doesn't apply the "mode" # argument to intermediate-level directories. old_umask = os.umask(0o777 & ~self.directory_permissions_mode) try: os.makedirs(directory, self.directory_permissions_mode, exist_ok=True) finally: os.umask(old_umask) else: os.makedirs(directory, exist_ok=True) except FileExistsError: raise FileExistsError('%s exists and is not a directory.' % directory) # There's a potential race condition between get_available_name and # saving the file; it's possible that two threads might return the # same name, at which point all sorts of fun happens. So we need to # try to create the file, but if it already exists we have to go back # to get_available_name() and try again. while True: try: # This file has a file path that we can move. if hasattr(content, 'temporary_file_path'): file_move_safe(content.temporary_file_path(), full_path) # This is a normal uploadedfile that we can stream. else: # The current umask value is masked out by os.open! fd = os.open(full_path, self.OS_OPEN_FLAGS, 0o666) _file = None try: locks.lock(fd, locks.LOCK_EX) for chunk in content.chunks(): if _file is None: mode = 'wb' if isinstance(chunk, bytes) else 'wt' _file = os.fdopen(fd, mode) _file.write(chunk) finally: locks.unlock(fd) if _file is not None: _file.close() else: os.close(fd) except FileExistsError: # A new name is needed if the file exists. name = self.get_available_name(name) full_path = self.path(name) else: # OK, the file save worked. Break out of the loop. break if self.file_permissions_mode is not None: os.chmod(full_path, self.file_permissions_mode) # Store filenames with forward slashes, even on Windows. return str(name).replace('\\', '/') def delete(self, name): if not name: raise ValueError('The name must be given to delete().') name = self.path(name) # If the file or directory exists, delete it from the filesystem. try: if os.path.isdir(name): os.rmdir(name) else: os.remove(name) except FileNotFoundError: # FileNotFoundError is raised if the file or directory was removed # concurrently. pass def exists(self, name): return os.path.exists(self.path(name)) def listdir(self, path): path = self.path(path) directories, files = [], [] for entry in os.scandir(path): if entry.is_dir(): directories.append(entry.name) else: files.append(entry.name) return directories, files def path(self, name): return safe_join(self.location, name) def size(self, name): return os.path.getsize(self.path(name)) def url(self, name): if self.base_url is None: raise ValueError("This file is not accessible via a URL.") url = filepath_to_uri(name) if url is not None: url = url.lstrip('/') return urljoin(self.base_url, url) def _datetime_from_timestamp(self, ts): """ If timezone support is enabled, make an aware datetime object in UTC; otherwise make a naive one in the local timezone. """ tz = timezone.utc if settings.USE_TZ else None return datetime.fromtimestamp(ts, tz=tz) def get_accessed_time(self, name): return self._datetime_from_timestamp(os.path.getatime(self.path(name))) def get_created_time(self, name): return self._datetime_from_timestamp(os.path.getctime(self.path(name))) def get_modified_time(self, name): return self._datetime_from_timestamp(os.path.getmtime(self.path(name))) def get_storage_class(import_path=None): return import_string(import_path or settings.DEFAULT_FILE_STORAGE) class DefaultStorage(LazyObject): def _setup(self): self._wrapped = get_storage_class()() default_storage = DefaultStorage()
9341b90b8567e9bce7551afc3860b0d0533809b475eb61945b1a9010f6d60097
import copy import json import operator import re from functools import partial, reduce, update_wrapper from urllib.parse import quote as urlquote from django import forms from django.conf import settings from django.contrib import messages from django.contrib.admin import helpers, widgets from django.contrib.admin.checks import ( BaseModelAdminChecks, InlineModelAdminChecks, ModelAdminChecks, ) from django.contrib.admin.decorators import display from django.contrib.admin.exceptions import DisallowedModelAdminToField from django.contrib.admin.templatetags.admin_urls import add_preserved_filters from django.contrib.admin.utils import ( NestedObjects, construct_change_message, flatten_fieldsets, get_deleted_objects, lookup_spawns_duplicates, model_format_dict, model_ngettext, quote, unquote, ) from django.contrib.admin.widgets import ( AutocompleteSelect, AutocompleteSelectMultiple, ) from django.contrib.auth import get_permission_codename from django.core.exceptions import ( FieldDoesNotExist, FieldError, PermissionDenied, ValidationError, ) from django.core.paginator import Paginator from django.db import models, router, transaction from django.db.models.constants import LOOKUP_SEP from django.forms.formsets import DELETION_FIELD_NAME, all_valid from django.forms.models import ( BaseInlineFormSet, inlineformset_factory, modelform_defines_fields, modelform_factory, modelformset_factory, ) from django.forms.widgets import CheckboxSelectMultiple, SelectMultiple from django.http import HttpResponseRedirect from django.http.response import HttpResponseBase from django.template.response import SimpleTemplateResponse, TemplateResponse from django.urls import reverse from django.utils.decorators import method_decorator from django.utils.html import format_html from django.utils.http import urlencode from django.utils.safestring import mark_safe from django.utils.text import ( capfirst, format_lazy, get_text_list, smart_split, unescape_string_literal, ) from django.utils.translation import gettext as _, ngettext from django.views.decorators.csrf import csrf_protect from django.views.generic import RedirectView IS_POPUP_VAR = '_popup' TO_FIELD_VAR = '_to_field' HORIZONTAL, VERTICAL = 1, 2 def get_content_type_for_model(obj): # Since this module gets imported in the application's root package, # it cannot import models from other applications at the module level. from django.contrib.contenttypes.models import ContentType return ContentType.objects.get_for_model(obj, for_concrete_model=False) def get_ul_class(radio_style): return 'radiolist' if radio_style == VERTICAL else 'radiolist inline' class IncorrectLookupParameters(Exception): pass # Defaults for formfield_overrides. ModelAdmin subclasses can change this # by adding to ModelAdmin.formfield_overrides. FORMFIELD_FOR_DBFIELD_DEFAULTS = { models.DateTimeField: { 'form_class': forms.SplitDateTimeField, 'widget': widgets.AdminSplitDateTime }, models.DateField: {'widget': widgets.AdminDateWidget}, models.TimeField: {'widget': widgets.AdminTimeWidget}, models.TextField: {'widget': widgets.AdminTextareaWidget}, models.URLField: {'widget': widgets.AdminURLFieldWidget}, models.IntegerField: {'widget': widgets.AdminIntegerFieldWidget}, models.BigIntegerField: {'widget': widgets.AdminBigIntegerFieldWidget}, models.CharField: {'widget': widgets.AdminTextInputWidget}, models.ImageField: {'widget': widgets.AdminFileWidget}, models.FileField: {'widget': widgets.AdminFileWidget}, models.EmailField: {'widget': widgets.AdminEmailInputWidget}, models.UUIDField: {'widget': widgets.AdminUUIDInputWidget}, } csrf_protect_m = method_decorator(csrf_protect) class BaseModelAdmin(metaclass=forms.MediaDefiningClass): """Functionality common to both ModelAdmin and InlineAdmin.""" autocomplete_fields = () raw_id_fields = () fields = None exclude = None fieldsets = None form = forms.ModelForm filter_vertical = () filter_horizontal = () radio_fields = {} prepopulated_fields = {} formfield_overrides = {} readonly_fields = () ordering = None sortable_by = None view_on_site = True show_full_result_count = True checks_class = BaseModelAdminChecks def check(self, **kwargs): return self.checks_class().check(self, **kwargs) def __init__(self): # Merge FORMFIELD_FOR_DBFIELD_DEFAULTS with the formfield_overrides # rather than simply overwriting. overrides = copy.deepcopy(FORMFIELD_FOR_DBFIELD_DEFAULTS) for k, v in self.formfield_overrides.items(): overrides.setdefault(k, {}).update(v) self.formfield_overrides = overrides def formfield_for_dbfield(self, db_field, request, **kwargs): """ Hook for specifying the form Field instance for a given database Field instance. If kwargs are given, they're passed to the form Field's constructor. """ # If the field specifies choices, we don't need to look for special # admin widgets - we just need to use a select widget of some kind. if db_field.choices: return self.formfield_for_choice_field(db_field, request, **kwargs) # ForeignKey or ManyToManyFields if isinstance(db_field, (models.ForeignKey, models.ManyToManyField)): # Combine the field kwargs with any options for formfield_overrides. # Make sure the passed in **kwargs override anything in # formfield_overrides because **kwargs is more specific, and should # always win. if db_field.__class__ in self.formfield_overrides: kwargs = {**self.formfield_overrides[db_field.__class__], **kwargs} # Get the correct formfield. if isinstance(db_field, models.ForeignKey): formfield = self.formfield_for_foreignkey(db_field, request, **kwargs) elif isinstance(db_field, models.ManyToManyField): formfield = self.formfield_for_manytomany(db_field, request, **kwargs) # For non-raw_id fields, wrap the widget with a wrapper that adds # extra HTML -- the "add other" interface -- to the end of the # rendered output. formfield can be None if it came from a # OneToOneField with parent_link=True or a M2M intermediary. if formfield and db_field.name not in self.raw_id_fields: related_modeladmin = self.admin_site._registry.get(db_field.remote_field.model) wrapper_kwargs = {} if related_modeladmin: wrapper_kwargs.update( can_add_related=related_modeladmin.has_add_permission(request), can_change_related=related_modeladmin.has_change_permission(request), can_delete_related=related_modeladmin.has_delete_permission(request), can_view_related=related_modeladmin.has_view_permission(request), ) formfield.widget = widgets.RelatedFieldWidgetWrapper( formfield.widget, db_field.remote_field, self.admin_site, **wrapper_kwargs ) return formfield # If we've got overrides for the formfield defined, use 'em. **kwargs # passed to formfield_for_dbfield override the defaults. for klass in db_field.__class__.mro(): if klass in self.formfield_overrides: kwargs = {**copy.deepcopy(self.formfield_overrides[klass]), **kwargs} return db_field.formfield(**kwargs) # For any other type of field, just call its formfield() method. return db_field.formfield(**kwargs) def formfield_for_choice_field(self, db_field, request, **kwargs): """ Get a form Field for a database Field that has declared choices. """ # If the field is named as a radio_field, use a RadioSelect if db_field.name in self.radio_fields: # Avoid stomping on custom widget/choices arguments. if 'widget' not in kwargs: kwargs['widget'] = widgets.AdminRadioSelect(attrs={ 'class': get_ul_class(self.radio_fields[db_field.name]), }) if 'choices' not in kwargs: kwargs['choices'] = db_field.get_choices( include_blank=db_field.blank, blank_choice=[('', _('None'))] ) return db_field.formfield(**kwargs) def get_field_queryset(self, db, db_field, request): """ If the ModelAdmin specifies ordering, the queryset should respect that ordering. Otherwise don't specify the queryset, let the field decide (return None in that case). """ related_admin = self.admin_site._registry.get(db_field.remote_field.model) if related_admin is not None: ordering = related_admin.get_ordering(request) if ordering is not None and ordering != (): return db_field.remote_field.model._default_manager.using(db).order_by(*ordering) return None def formfield_for_foreignkey(self, db_field, request, **kwargs): """ Get a form Field for a ForeignKey. """ db = kwargs.get('using') if 'widget' not in kwargs: if db_field.name in self.get_autocomplete_fields(request): kwargs['widget'] = AutocompleteSelect(db_field, self.admin_site, using=db) elif db_field.name in self.raw_id_fields: kwargs['widget'] = widgets.ForeignKeyRawIdWidget(db_field.remote_field, self.admin_site, using=db) elif db_field.name in self.radio_fields: kwargs['widget'] = widgets.AdminRadioSelect(attrs={ 'class': get_ul_class(self.radio_fields[db_field.name]), }) kwargs['empty_label'] = _('None') if db_field.blank else None if 'queryset' not in kwargs: queryset = self.get_field_queryset(db, db_field, request) if queryset is not None: kwargs['queryset'] = queryset return db_field.formfield(**kwargs) def formfield_for_manytomany(self, db_field, request, **kwargs): """ Get a form Field for a ManyToManyField. """ # If it uses an intermediary model that isn't auto created, don't show # a field in admin. if not db_field.remote_field.through._meta.auto_created: return None db = kwargs.get('using') if 'widget' not in kwargs: autocomplete_fields = self.get_autocomplete_fields(request) if db_field.name in autocomplete_fields: kwargs['widget'] = AutocompleteSelectMultiple( db_field, self.admin_site, using=db, ) elif db_field.name in self.raw_id_fields: kwargs['widget'] = widgets.ManyToManyRawIdWidget( db_field.remote_field, self.admin_site, using=db, ) elif db_field.name in [*self.filter_vertical, *self.filter_horizontal]: kwargs['widget'] = widgets.FilteredSelectMultiple( db_field.verbose_name, db_field.name in self.filter_vertical ) if 'queryset' not in kwargs: queryset = self.get_field_queryset(db, db_field, request) if queryset is not None: kwargs['queryset'] = queryset form_field = db_field.formfield(**kwargs) if (isinstance(form_field.widget, SelectMultiple) and not isinstance(form_field.widget, (CheckboxSelectMultiple, AutocompleteSelectMultiple))): msg = _('Hold down “Control”, or “Command” on a Mac, to select more than one.') help_text = form_field.help_text form_field.help_text = format_lazy('{} {}', help_text, msg) if help_text else msg return form_field def get_autocomplete_fields(self, request): """ Return a list of ForeignKey and/or ManyToMany fields which should use an autocomplete widget. """ return self.autocomplete_fields def get_view_on_site_url(self, obj=None): if obj is None or not self.view_on_site: return None if callable(self.view_on_site): return self.view_on_site(obj) elif hasattr(obj, 'get_absolute_url'): # use the ContentType lookup if view_on_site is True return reverse('admin:view_on_site', kwargs={ 'content_type_id': get_content_type_for_model(obj).pk, 'object_id': obj.pk }) def get_empty_value_display(self): """ Return the empty_value_display set on ModelAdmin or AdminSite. """ try: return mark_safe(self.empty_value_display) except AttributeError: return mark_safe(self.admin_site.empty_value_display) def get_exclude(self, request, obj=None): """ Hook for specifying exclude. """ return self.exclude def get_fields(self, request, obj=None): """ Hook for specifying fields. """ if self.fields: return self.fields # _get_form_for_get_fields() is implemented in subclasses. form = self._get_form_for_get_fields(request, obj) return [*form.base_fields, *self.get_readonly_fields(request, obj)] def get_fieldsets(self, request, obj=None): """ Hook for specifying fieldsets. """ if self.fieldsets: return self.fieldsets return [(None, {'fields': self.get_fields(request, obj)})] def get_inlines(self, request, obj): """Hook for specifying custom inlines.""" return self.inlines def get_ordering(self, request): """ Hook for specifying field ordering. """ return self.ordering or () # otherwise we might try to *None, which is bad ;) def get_readonly_fields(self, request, obj=None): """ Hook for specifying custom readonly fields. """ return self.readonly_fields def get_prepopulated_fields(self, request, obj=None): """ Hook for specifying custom prepopulated fields. """ return self.prepopulated_fields def get_queryset(self, request): """ Return a QuerySet of all model instances that can be edited by the admin site. This is used by changelist_view. """ qs = self.model._default_manager.get_queryset() # TODO: this should be handled by some parameter to the ChangeList. ordering = self.get_ordering(request) if ordering: qs = qs.order_by(*ordering) return qs def get_sortable_by(self, request): """Hook for specifying which fields can be sorted in the changelist.""" return self.sortable_by if self.sortable_by is not None else self.get_list_display(request) def lookup_allowed(self, lookup, value): from django.contrib.admin.filters import SimpleListFilter model = self.model # Check FKey lookups that are allowed, so that popups produced by # ForeignKeyRawIdWidget, on the basis of ForeignKey.limit_choices_to, # are allowed to work. for fk_lookup in model._meta.related_fkey_lookups: # As ``limit_choices_to`` can be a callable, invoke it here. if callable(fk_lookup): fk_lookup = fk_lookup() if (lookup, value) in widgets.url_params_from_lookup_dict(fk_lookup).items(): return True relation_parts = [] prev_field = None for part in lookup.split(LOOKUP_SEP): try: field = model._meta.get_field(part) except FieldDoesNotExist: # Lookups on nonexistent fields are ok, since they're ignored # later. break # It is allowed to filter on values that would be found from local # model anyways. For example, if you filter on employee__department__id, # then the id value would be found already from employee__department_id. if not prev_field or (prev_field.is_relation and field not in prev_field.get_path_info()[-1].target_fields): relation_parts.append(part) if not getattr(field, 'get_path_info', None): # This is not a relational field, so further parts # must be transforms. break prev_field = field model = field.get_path_info()[-1].to_opts.model if len(relation_parts) <= 1: # Either a local field filter, or no fields at all. return True valid_lookups = {self.date_hierarchy} for filter_item in self.list_filter: if isinstance(filter_item, type) and issubclass(filter_item, SimpleListFilter): valid_lookups.add(filter_item.parameter_name) elif isinstance(filter_item, (list, tuple)): valid_lookups.add(filter_item[0]) else: valid_lookups.add(filter_item) # Is it a valid relational lookup? return not { LOOKUP_SEP.join(relation_parts), LOOKUP_SEP.join(relation_parts + [part]) }.isdisjoint(valid_lookups) def to_field_allowed(self, request, to_field): """ Return True if the model associated with this admin should be allowed to be referenced by the specified field. """ opts = self.model._meta try: field = opts.get_field(to_field) except FieldDoesNotExist: return False # Always allow referencing the primary key since it's already possible # to get this information from the change view URL. if field.primary_key: return True # Allow reverse relationships to models defining m2m fields if they # target the specified field. for many_to_many in opts.many_to_many: if many_to_many.m2m_target_field_name() == to_field: return True # Make sure at least one of the models registered for this site # references this field through a FK or a M2M relationship. registered_models = set() for model, admin in self.admin_site._registry.items(): registered_models.add(model) for inline in admin.inlines: registered_models.add(inline.model) related_objects = ( f for f in opts.get_fields(include_hidden=True) if (f.auto_created and not f.concrete) ) for related_object in related_objects: related_model = related_object.related_model remote_field = related_object.field.remote_field if (any(issubclass(model, related_model) for model in registered_models) and hasattr(remote_field, 'get_related_field') and remote_field.get_related_field() == field): return True return False def has_add_permission(self, request): """ Return True if the given request has permission to add an object. Can be overridden by the user in subclasses. """ opts = self.opts codename = get_permission_codename('add', opts) return request.user.has_perm("%s.%s" % (opts.app_label, codename)) def has_change_permission(self, request, obj=None): """ Return True if the given request has permission to change the given Django model instance, the default implementation doesn't examine the `obj` parameter. Can be overridden by the user in subclasses. In such case it should return True if the given request has permission to change the `obj` model instance. If `obj` is None, this should return True if the given request has permission to change *any* object of the given type. """ opts = self.opts codename = get_permission_codename('change', opts) return request.user.has_perm("%s.%s" % (opts.app_label, codename)) def has_delete_permission(self, request, obj=None): """ Return True if the given request has permission to change the given Django model instance, the default implementation doesn't examine the `obj` parameter. Can be overridden by the user in subclasses. In such case it should return True if the given request has permission to delete the `obj` model instance. If `obj` is None, this should return True if the given request has permission to delete *any* object of the given type. """ opts = self.opts codename = get_permission_codename('delete', opts) return request.user.has_perm("%s.%s" % (opts.app_label, codename)) def has_view_permission(self, request, obj=None): """ Return True if the given request has permission to view the given Django model instance. The default implementation doesn't examine the `obj` parameter. If overridden by the user in subclasses, it should return True if the given request has permission to view the `obj` model instance. If `obj` is None, it should return True if the request has permission to view any object of the given type. """ opts = self.opts codename_view = get_permission_codename('view', opts) codename_change = get_permission_codename('change', opts) return ( request.user.has_perm('%s.%s' % (opts.app_label, codename_view)) or request.user.has_perm('%s.%s' % (opts.app_label, codename_change)) ) def has_view_or_change_permission(self, request, obj=None): return self.has_view_permission(request, obj) or self.has_change_permission(request, obj) def has_module_permission(self, request): """ Return True if the given request has any permission in the given app label. Can be overridden by the user in subclasses. In such case it should return True if the given request has permission to view the module on the admin index page and access the module's index page. Overriding it does not restrict access to the add, change or delete views. Use `ModelAdmin.has_(add|change|delete)_permission` for that. """ return request.user.has_module_perms(self.opts.app_label) class ModelAdmin(BaseModelAdmin): """Encapsulate all admin options and functionality for a given model.""" list_display = ('__str__',) list_display_links = () list_filter = () list_select_related = False list_per_page = 100 list_max_show_all = 200 list_editable = () search_fields = () search_help_text = None date_hierarchy = None save_as = False save_as_continue = True save_on_top = False paginator = Paginator preserve_filters = True inlines = [] # Custom templates (designed to be over-ridden in subclasses) add_form_template = None change_form_template = None change_list_template = None delete_confirmation_template = None delete_selected_confirmation_template = None object_history_template = None popup_response_template = None # Actions actions = [] action_form = helpers.ActionForm actions_on_top = True actions_on_bottom = False actions_selection_counter = True checks_class = ModelAdminChecks def __init__(self, model, admin_site): self.model = model self.opts = model._meta self.admin_site = admin_site super().__init__() def __str__(self): return "%s.%s" % (self.model._meta.app_label, self.__class__.__name__) def get_inline_instances(self, request, obj=None): inline_instances = [] for inline_class in self.get_inlines(request, obj): inline = inline_class(self.model, self.admin_site) if request: if not (inline.has_view_or_change_permission(request, obj) or inline.has_add_permission(request, obj) or inline.has_delete_permission(request, obj)): continue if not inline.has_add_permission(request, obj): inline.max_num = 0 inline_instances.append(inline) return inline_instances def get_urls(self): from django.urls import path def wrap(view): def wrapper(*args, **kwargs): return self.admin_site.admin_view(view)(*args, **kwargs) wrapper.model_admin = self return update_wrapper(wrapper, view) info = self.model._meta.app_label, self.model._meta.model_name return [ path('', wrap(self.changelist_view), name='%s_%s_changelist' % info), path('add/', wrap(self.add_view), name='%s_%s_add' % info), path('<path:object_id>/history/', wrap(self.history_view), name='%s_%s_history' % info), path('<path:object_id>/delete/', wrap(self.delete_view), name='%s_%s_delete' % info), path('<path:object_id>/change/', wrap(self.change_view), name='%s_%s_change' % info), # For backwards compatibility (was the change url before 1.9) path('<path:object_id>/', wrap(RedirectView.as_view( pattern_name='%s:%s_%s_change' % ((self.admin_site.name,) + info) ))), ] @property def urls(self): return self.get_urls() @property def media(self): extra = '' if settings.DEBUG else '.min' js = [ 'vendor/jquery/jquery%s.js' % extra, 'jquery.init.js', 'core.js', 'admin/RelatedObjectLookups.js', 'actions.js', 'urlify.js', 'prepopulate.js', 'vendor/xregexp/xregexp%s.js' % extra, ] return forms.Media(js=['admin/js/%s' % url for url in js]) def get_model_perms(self, request): """ Return a dict of all perms for this model. This dict has the keys ``add``, ``change``, ``delete``, and ``view`` mapping to the True/False for each of those actions. """ return { 'add': self.has_add_permission(request), 'change': self.has_change_permission(request), 'delete': self.has_delete_permission(request), 'view': self.has_view_permission(request), } def _get_form_for_get_fields(self, request, obj): return self.get_form(request, obj, fields=None) def get_form(self, request, obj=None, change=False, **kwargs): """ Return a Form class for use in the admin add view. This is used by add_view and change_view. """ if 'fields' in kwargs: fields = kwargs.pop('fields') else: fields = flatten_fieldsets(self.get_fieldsets(request, obj)) excluded = self.get_exclude(request, obj) exclude = [] if excluded is None else list(excluded) readonly_fields = self.get_readonly_fields(request, obj) exclude.extend(readonly_fields) # Exclude all fields if it's a change form and the user doesn't have # the change permission. if change and hasattr(request, 'user') and not self.has_change_permission(request, obj): exclude.extend(fields) if excluded is None and hasattr(self.form, '_meta') and self.form._meta.exclude: # Take the custom ModelForm's Meta.exclude into account only if the # ModelAdmin doesn't define its own. exclude.extend(self.form._meta.exclude) # if exclude is an empty list we pass None to be consistent with the # default on modelform_factory exclude = exclude or None # Remove declared form fields which are in readonly_fields. new_attrs = dict.fromkeys(f for f in readonly_fields if f in self.form.declared_fields) form = type(self.form.__name__, (self.form,), new_attrs) defaults = { 'form': form, 'fields': fields, 'exclude': exclude, 'formfield_callback': partial(self.formfield_for_dbfield, request=request), **kwargs, } if defaults['fields'] is None and not modelform_defines_fields(defaults['form']): defaults['fields'] = forms.ALL_FIELDS try: return modelform_factory(self.model, **defaults) except FieldError as e: raise FieldError( '%s. Check fields/fieldsets/exclude attributes of class %s.' % (e, self.__class__.__name__) ) def get_changelist(self, request, **kwargs): """ Return the ChangeList class for use on the changelist page. """ from django.contrib.admin.views.main import ChangeList return ChangeList def get_changelist_instance(self, request): """ Return a `ChangeList` instance based on `request`. May raise `IncorrectLookupParameters`. """ list_display = self.get_list_display(request) list_display_links = self.get_list_display_links(request, list_display) # Add the action checkboxes if any actions are available. if self.get_actions(request): list_display = ['action_checkbox', *list_display] sortable_by = self.get_sortable_by(request) ChangeList = self.get_changelist(request) return ChangeList( request, self.model, list_display, list_display_links, self.get_list_filter(request), self.date_hierarchy, self.get_search_fields(request), self.get_list_select_related(request), self.list_per_page, self.list_max_show_all, self.list_editable, self, sortable_by, self.search_help_text, ) def get_object(self, request, object_id, from_field=None): """ Return an instance matching the field and value provided, the primary key is used if no field is provided. Return ``None`` if no match is found or the object_id fails validation. """ queryset = self.get_queryset(request) model = queryset.model field = model._meta.pk if from_field is None else model._meta.get_field(from_field) try: object_id = field.to_python(object_id) return queryset.get(**{field.name: object_id}) except (model.DoesNotExist, ValidationError, ValueError): return None def get_changelist_form(self, request, **kwargs): """ Return a Form class for use in the Formset on the changelist page. """ defaults = { 'formfield_callback': partial(self.formfield_for_dbfield, request=request), **kwargs, } if defaults.get('fields') is None and not modelform_defines_fields(defaults.get('form')): defaults['fields'] = forms.ALL_FIELDS return modelform_factory(self.model, **defaults) def get_changelist_formset(self, request, **kwargs): """ Return a FormSet class for use on the changelist page if list_editable is used. """ defaults = { 'formfield_callback': partial(self.formfield_for_dbfield, request=request), **kwargs, } return modelformset_factory( self.model, self.get_changelist_form(request), extra=0, fields=self.list_editable, **defaults ) def get_formsets_with_inlines(self, request, obj=None): """ Yield formsets and the corresponding inlines. """ for inline in self.get_inline_instances(request, obj): yield inline.get_formset(request, obj), inline def get_paginator(self, request, queryset, per_page, orphans=0, allow_empty_first_page=True): return self.paginator(queryset, per_page, orphans, allow_empty_first_page) def log_addition(self, request, obj, message): """ Log that an object has been successfully added. The default implementation creates an admin LogEntry object. """ from django.contrib.admin.models import ADDITION, LogEntry return LogEntry.objects.log_action( user_id=request.user.pk, content_type_id=get_content_type_for_model(obj).pk, object_id=obj.pk, object_repr=str(obj), action_flag=ADDITION, change_message=message, ) def log_change(self, request, obj, message): """ Log that an object has been successfully changed. The default implementation creates an admin LogEntry object. """ from django.contrib.admin.models import CHANGE, LogEntry return LogEntry.objects.log_action( user_id=request.user.pk, content_type_id=get_content_type_for_model(obj).pk, object_id=obj.pk, object_repr=str(obj), action_flag=CHANGE, change_message=message, ) def log_deletion(self, request, obj, object_repr): """ Log that an object will be deleted. Note that this method must be called before the deletion. The default implementation creates an admin LogEntry object. """ from django.contrib.admin.models import DELETION, LogEntry return LogEntry.objects.log_action( user_id=request.user.pk, content_type_id=get_content_type_for_model(obj).pk, object_id=obj.pk, object_repr=object_repr, action_flag=DELETION, ) @display(description=mark_safe('<input type="checkbox" id="action-toggle">')) def action_checkbox(self, obj): """ A list_display column containing a checkbox widget. """ return helpers.checkbox.render(helpers.ACTION_CHECKBOX_NAME, str(obj.pk)) @staticmethod def _get_action_description(func, name): return getattr(func, 'short_description', capfirst(name.replace('_', ' '))) def _get_base_actions(self): """Return the list of actions, prior to any request-based filtering.""" actions = [] base_actions = (self.get_action(action) for action in self.actions or []) # get_action might have returned None, so filter any of those out. base_actions = [action for action in base_actions if action] base_action_names = {name for _, name, _ in base_actions} # Gather actions from the admin site first for (name, func) in self.admin_site.actions: if name in base_action_names: continue description = self._get_action_description(func, name) actions.append((func, name, description)) # Add actions from this ModelAdmin. actions.extend(base_actions) return actions def _filter_actions_by_permissions(self, request, actions): """Filter out any actions that the user doesn't have access to.""" filtered_actions = [] for action in actions: callable = action[0] if not hasattr(callable, 'allowed_permissions'): filtered_actions.append(action) continue permission_checks = ( getattr(self, 'has_%s_permission' % permission) for permission in callable.allowed_permissions ) if any(has_permission(request) for has_permission in permission_checks): filtered_actions.append(action) return filtered_actions def get_actions(self, request): """ Return a dictionary mapping the names of all actions for this ModelAdmin to a tuple of (callable, name, description) for each action. """ # If self.actions is set to None that means actions are disabled on # this page. if self.actions is None or IS_POPUP_VAR in request.GET: return {} actions = self._filter_actions_by_permissions(request, self._get_base_actions()) return {name: (func, name, desc) for func, name, desc in actions} def get_action_choices(self, request, default_choices=models.BLANK_CHOICE_DASH): """ Return a list of choices for use in a form object. Each choice is a tuple (name, description). """ choices = [] + default_choices for func, name, description in self.get_actions(request).values(): choice = (name, description % model_format_dict(self.opts)) choices.append(choice) return choices def get_action(self, action): """ Return a given action from a parameter, which can either be a callable, or the name of a method on the ModelAdmin. Return is a tuple of (callable, name, description). """ # If the action is a callable, just use it. if callable(action): func = action action = action.__name__ # Next, look for a method. Grab it off self.__class__ to get an unbound # method instead of a bound one; this ensures that the calling # conventions are the same for functions and methods. elif hasattr(self.__class__, action): func = getattr(self.__class__, action) # Finally, look for a named method on the admin site else: try: func = self.admin_site.get_action(action) except KeyError: return None description = self._get_action_description(func, action) return func, action, description def get_list_display(self, request): """ Return a sequence containing the fields to be displayed on the changelist. """ return self.list_display def get_list_display_links(self, request, list_display): """ Return a sequence containing the fields to be displayed as links on the changelist. The list_display parameter is the list of fields returned by get_list_display(). """ if self.list_display_links or self.list_display_links is None or not list_display: return self.list_display_links else: # Use only the first item in list_display as link return list(list_display)[:1] def get_list_filter(self, request): """ Return a sequence containing the fields to be displayed as filters in the right sidebar of the changelist page. """ return self.list_filter def get_list_select_related(self, request): """ Return a list of fields to add to the select_related() part of the changelist items query. """ return self.list_select_related def get_search_fields(self, request): """ Return a sequence containing the fields to be searched whenever somebody submits a search query. """ return self.search_fields def get_search_results(self, request, queryset, search_term): """ Return a tuple containing a queryset to implement the search and a boolean indicating if the results may contain duplicates. """ # Apply keyword searches. def construct_search(field_name): if field_name.startswith('^'): return "%s__istartswith" % field_name[1:] elif field_name.startswith('='): return "%s__iexact" % field_name[1:] elif field_name.startswith('@'): return "%s__search" % field_name[1:] # Use field_name if it includes a lookup. opts = queryset.model._meta lookup_fields = field_name.split(LOOKUP_SEP) # Go through the fields, following all relations. prev_field = None for path_part in lookup_fields: if path_part == 'pk': path_part = opts.pk.name try: field = opts.get_field(path_part) except FieldDoesNotExist: # Use valid query lookups. if prev_field and prev_field.get_lookup(path_part): return field_name else: prev_field = field if hasattr(field, 'get_path_info'): # Update opts to follow the relation. opts = field.get_path_info()[-1].to_opts # Otherwise, use the field with icontains. return "%s__icontains" % field_name may_have_duplicates = False search_fields = self.get_search_fields(request) if search_fields and search_term: orm_lookups = [construct_search(str(search_field)) for search_field in search_fields] for bit in smart_split(search_term): if bit.startswith(('"', "'")) and bit[0] == bit[-1]: bit = unescape_string_literal(bit) or_queries = [models.Q(**{orm_lookup: bit}) for orm_lookup in orm_lookups] queryset = queryset.filter(reduce(operator.or_, or_queries)) may_have_duplicates |= any( lookup_spawns_duplicates(self.opts, search_spec) for search_spec in orm_lookups ) return queryset, may_have_duplicates def get_preserved_filters(self, request): """ Return the preserved filters querystring. """ match = request.resolver_match if self.preserve_filters and match: opts = self.model._meta current_url = '%s:%s' % (match.app_name, match.url_name) changelist_url = 'admin:%s_%s_changelist' % (opts.app_label, opts.model_name) if current_url == changelist_url: preserved_filters = request.GET.urlencode() else: preserved_filters = request.GET.get('_changelist_filters') if preserved_filters: return urlencode({'_changelist_filters': preserved_filters}) return '' def construct_change_message(self, request, form, formsets, add=False): """ Construct a JSON structure describing changes from a changed object. """ return construct_change_message(form, formsets, add) def message_user(self, request, message, level=messages.INFO, extra_tags='', fail_silently=False): """ Send a message to the user. The default implementation posts a message using the django.contrib.messages backend. Exposes almost the same API as messages.add_message(), but accepts the positional arguments in a different order to maintain backwards compatibility. For convenience, it accepts the `level` argument as a string rather than the usual level number. """ if not isinstance(level, int): # attempt to get the level if passed a string try: level = getattr(messages.constants, level.upper()) except AttributeError: levels = messages.constants.DEFAULT_TAGS.values() levels_repr = ', '.join('`%s`' % level for level in levels) raise ValueError( 'Bad message level string: `%s`. Possible values are: %s' % (level, levels_repr) ) messages.add_message(request, level, message, extra_tags=extra_tags, fail_silently=fail_silently) def save_form(self, request, form, change): """ Given a ModelForm return an unsaved instance. ``change`` is True if the object is being changed, and False if it's being added. """ return form.save(commit=False) def save_model(self, request, obj, form, change): """ Given a model instance save it to the database. """ obj.save() def delete_model(self, request, obj): """ Given a model instance delete it from the database. """ obj.delete() def delete_queryset(self, request, queryset): """Given a queryset, delete it from the database.""" queryset.delete() def save_formset(self, request, form, formset, change): """ Given an inline formset save it to the database. """ formset.save() def save_related(self, request, form, formsets, change): """ Given the ``HttpRequest``, the parent ``ModelForm`` instance, the list of inline formsets and a boolean value based on whether the parent is being added or changed, save the related objects to the database. Note that at this point save_form() and save_model() have already been called. """ form.save_m2m() for formset in formsets: self.save_formset(request, form, formset, change=change) def render_change_form(self, request, context, add=False, change=False, form_url='', obj=None): opts = self.model._meta app_label = opts.app_label preserved_filters = self.get_preserved_filters(request) form_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, form_url) view_on_site_url = self.get_view_on_site_url(obj) has_editable_inline_admin_formsets = False for inline in context['inline_admin_formsets']: if inline.has_add_permission or inline.has_change_permission or inline.has_delete_permission: has_editable_inline_admin_formsets = True break context.update({ 'add': add, 'change': change, 'has_view_permission': self.has_view_permission(request, obj), 'has_add_permission': self.has_add_permission(request), 'has_change_permission': self.has_change_permission(request, obj), 'has_delete_permission': self.has_delete_permission(request, obj), 'has_editable_inline_admin_formsets': has_editable_inline_admin_formsets, 'has_file_field': context['adminform'].form.is_multipart() or any( admin_formset.formset.is_multipart() for admin_formset in context['inline_admin_formsets'] ), 'has_absolute_url': view_on_site_url is not None, 'absolute_url': view_on_site_url, 'form_url': form_url, 'opts': opts, 'content_type_id': get_content_type_for_model(self.model).pk, 'save_as': self.save_as, 'save_on_top': self.save_on_top, 'to_field_var': TO_FIELD_VAR, 'is_popup_var': IS_POPUP_VAR, 'app_label': app_label, }) if add and self.add_form_template is not None: form_template = self.add_form_template else: form_template = self.change_form_template request.current_app = self.admin_site.name return TemplateResponse(request, form_template or [ "admin/%s/%s/change_form.html" % (app_label, opts.model_name), "admin/%s/change_form.html" % app_label, "admin/change_form.html" ], context) def response_add(self, request, obj, post_url_continue=None): """ Determine the HttpResponse for the add_view stage. """ opts = obj._meta preserved_filters = self.get_preserved_filters(request) obj_url = reverse( 'admin:%s_%s_change' % (opts.app_label, opts.model_name), args=(quote(obj.pk),), current_app=self.admin_site.name, ) # Add a link to the object's change form if the user can edit the obj. if self.has_change_permission(request, obj): obj_repr = format_html('<a href="{}">{}</a>', urlquote(obj_url), obj) else: obj_repr = str(obj) msg_dict = { 'name': opts.verbose_name, 'obj': obj_repr, } # Here, we distinguish between different save types by checking for # the presence of keys in request.POST. if IS_POPUP_VAR in request.POST: to_field = request.POST.get(TO_FIELD_VAR) if to_field: attr = str(to_field) else: attr = obj._meta.pk.attname value = obj.serializable_value(attr) popup_response_data = json.dumps({ 'value': str(value), 'obj': str(obj), }) return TemplateResponse(request, self.popup_response_template or [ 'admin/%s/%s/popup_response.html' % (opts.app_label, opts.model_name), 'admin/%s/popup_response.html' % opts.app_label, 'admin/popup_response.html', ], { 'popup_response_data': popup_response_data, }) elif "_continue" in request.POST or ( # Redirecting after "Save as new". "_saveasnew" in request.POST and self.save_as_continue and self.has_change_permission(request, obj) ): msg = _('The {name} “{obj}” was added successfully.') if self.has_change_permission(request, obj): msg += ' ' + _('You may edit it again below.') self.message_user(request, format_html(msg, **msg_dict), messages.SUCCESS) if post_url_continue is None: post_url_continue = obj_url post_url_continue = add_preserved_filters( {'preserved_filters': preserved_filters, 'opts': opts}, post_url_continue ) return HttpResponseRedirect(post_url_continue) elif "_addanother" in request.POST: msg = format_html( _('The {name} “{obj}” was added successfully. You may add another {name} below.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) redirect_url = request.path redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url) return HttpResponseRedirect(redirect_url) else: msg = format_html( _('The {name} “{obj}” was added successfully.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) return self.response_post_save_add(request, obj) def response_change(self, request, obj): """ Determine the HttpResponse for the change_view stage. """ if IS_POPUP_VAR in request.POST: opts = obj._meta to_field = request.POST.get(TO_FIELD_VAR) attr = str(to_field) if to_field else opts.pk.attname value = request.resolver_match.kwargs['object_id'] new_value = obj.serializable_value(attr) popup_response_data = json.dumps({ 'action': 'change', 'value': str(value), 'obj': str(obj), 'new_value': str(new_value), }) return TemplateResponse(request, self.popup_response_template or [ 'admin/%s/%s/popup_response.html' % (opts.app_label, opts.model_name), 'admin/%s/popup_response.html' % opts.app_label, 'admin/popup_response.html', ], { 'popup_response_data': popup_response_data, }) opts = self.model._meta preserved_filters = self.get_preserved_filters(request) msg_dict = { 'name': opts.verbose_name, 'obj': format_html('<a href="{}">{}</a>', urlquote(request.path), obj), } if "_continue" in request.POST: msg = format_html( _('The {name} “{obj}” was changed successfully. You may edit it again below.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) redirect_url = request.path redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url) return HttpResponseRedirect(redirect_url) elif "_saveasnew" in request.POST: msg = format_html( _('The {name} “{obj}” was added successfully. You may edit it again below.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) redirect_url = reverse('admin:%s_%s_change' % (opts.app_label, opts.model_name), args=(obj.pk,), current_app=self.admin_site.name) redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url) return HttpResponseRedirect(redirect_url) elif "_addanother" in request.POST: msg = format_html( _('The {name} “{obj}” was changed successfully. You may add another {name} below.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) redirect_url = reverse('admin:%s_%s_add' % (opts.app_label, opts.model_name), current_app=self.admin_site.name) redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url) return HttpResponseRedirect(redirect_url) else: msg = format_html( _('The {name} “{obj}” was changed successfully.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) return self.response_post_save_change(request, obj) def _response_post_save(self, request, obj): opts = self.model._meta if self.has_view_or_change_permission(request): post_url = reverse('admin:%s_%s_changelist' % (opts.app_label, opts.model_name), current_app=self.admin_site.name) preserved_filters = self.get_preserved_filters(request) post_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, post_url) else: post_url = reverse('admin:index', current_app=self.admin_site.name) return HttpResponseRedirect(post_url) def response_post_save_add(self, request, obj): """ Figure out where to redirect after the 'Save' button has been pressed when adding a new object. """ return self._response_post_save(request, obj) def response_post_save_change(self, request, obj): """ Figure out where to redirect after the 'Save' button has been pressed when editing an existing object. """ return self._response_post_save(request, obj) def response_action(self, request, queryset): """ Handle an admin action. This is called if a request is POSTed to the changelist; it returns an HttpResponse if the action was handled, and None otherwise. """ # There can be multiple action forms on the page (at the top # and bottom of the change list, for example). Get the action # whose button was pushed. try: action_index = int(request.POST.get('index', 0)) except ValueError: action_index = 0 # Construct the action form. data = request.POST.copy() data.pop(helpers.ACTION_CHECKBOX_NAME, None) data.pop("index", None) # Use the action whose button was pushed try: data.update({'action': data.getlist('action')[action_index]}) except IndexError: # If we didn't get an action from the chosen form that's invalid # POST data, so by deleting action it'll fail the validation check # below. So no need to do anything here pass action_form = self.action_form(data, auto_id=None) action_form.fields['action'].choices = self.get_action_choices(request) # If the form's valid we can handle the action. if action_form.is_valid(): action = action_form.cleaned_data['action'] select_across = action_form.cleaned_data['select_across'] func = self.get_actions(request)[action][0] # Get the list of selected PKs. If nothing's selected, we can't # perform an action on it, so bail. Except we want to perform # the action explicitly on all objects. selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME) if not selected and not select_across: # Reminder that something needs to be selected or nothing will happen msg = _("Items must be selected in order to perform " "actions on them. No items have been changed.") self.message_user(request, msg, messages.WARNING) return None if not select_across: # Perform the action only on the selected objects queryset = queryset.filter(pk__in=selected) response = func(self, request, queryset) # Actions may return an HttpResponse-like object, which will be # used as the response from the POST. If not, we'll be a good # little HTTP citizen and redirect back to the changelist page. if isinstance(response, HttpResponseBase): return response else: return HttpResponseRedirect(request.get_full_path()) else: msg = _("No action selected.") self.message_user(request, msg, messages.WARNING) return None def response_delete(self, request, obj_display, obj_id): """ Determine the HttpResponse for the delete_view stage. """ opts = self.model._meta if IS_POPUP_VAR in request.POST: popup_response_data = json.dumps({ 'action': 'delete', 'value': str(obj_id), }) return TemplateResponse(request, self.popup_response_template or [ 'admin/%s/%s/popup_response.html' % (opts.app_label, opts.model_name), 'admin/%s/popup_response.html' % opts.app_label, 'admin/popup_response.html', ], { 'popup_response_data': popup_response_data, }) self.message_user( request, _('The %(name)s “%(obj)s” was deleted successfully.') % { 'name': opts.verbose_name, 'obj': obj_display, }, messages.SUCCESS, ) if self.has_change_permission(request, None): post_url = reverse( 'admin:%s_%s_changelist' % (opts.app_label, opts.model_name), current_app=self.admin_site.name, ) preserved_filters = self.get_preserved_filters(request) post_url = add_preserved_filters( {'preserved_filters': preserved_filters, 'opts': opts}, post_url ) else: post_url = reverse('admin:index', current_app=self.admin_site.name) return HttpResponseRedirect(post_url) def render_delete_form(self, request, context): opts = self.model._meta app_label = opts.app_label request.current_app = self.admin_site.name context.update( to_field_var=TO_FIELD_VAR, is_popup_var=IS_POPUP_VAR, media=self.media, ) return TemplateResponse( request, self.delete_confirmation_template or [ "admin/{}/{}/delete_confirmation.html".format(app_label, opts.model_name), "admin/{}/delete_confirmation.html".format(app_label), "admin/delete_confirmation.html", ], context, ) def get_inline_formsets(self, request, formsets, inline_instances, obj=None): # Edit permissions on parent model are required for editable inlines. can_edit_parent = self.has_change_permission(request, obj) if obj else self.has_add_permission(request) inline_admin_formsets = [] for inline, formset in zip(inline_instances, formsets): fieldsets = list(inline.get_fieldsets(request, obj)) readonly = list(inline.get_readonly_fields(request, obj)) if can_edit_parent: has_add_permission = inline.has_add_permission(request, obj) has_change_permission = inline.has_change_permission(request, obj) has_delete_permission = inline.has_delete_permission(request, obj) else: # Disable all edit-permissions, and overide formset settings. has_add_permission = has_change_permission = has_delete_permission = False formset.extra = formset.max_num = 0 has_view_permission = inline.has_view_permission(request, obj) prepopulated = dict(inline.get_prepopulated_fields(request, obj)) inline_admin_formset = helpers.InlineAdminFormSet( inline, formset, fieldsets, prepopulated, readonly, model_admin=self, has_add_permission=has_add_permission, has_change_permission=has_change_permission, has_delete_permission=has_delete_permission, has_view_permission=has_view_permission, ) inline_admin_formsets.append(inline_admin_formset) return inline_admin_formsets def get_changeform_initial_data(self, request): """ Get the initial form data from the request's GET params. """ initial = dict(request.GET.items()) for k in initial: try: f = self.model._meta.get_field(k) except FieldDoesNotExist: continue # We have to special-case M2Ms as a list of comma-separated PKs. if isinstance(f, models.ManyToManyField): initial[k] = initial[k].split(",") return initial def _get_obj_does_not_exist_redirect(self, request, opts, object_id): """ Create a message informing the user that the object doesn't exist and return a redirect to the admin index page. """ msg = _('%(name)s with ID “%(key)s” doesn’t exist. Perhaps it was deleted?') % { 'name': opts.verbose_name, 'key': unquote(object_id), } self.message_user(request, msg, messages.WARNING) url = reverse('admin:index', current_app=self.admin_site.name) return HttpResponseRedirect(url) @csrf_protect_m def changeform_view(self, request, object_id=None, form_url='', extra_context=None): with transaction.atomic(using=router.db_for_write(self.model)): return self._changeform_view(request, object_id, form_url, extra_context) def _changeform_view(self, request, object_id, form_url, extra_context): to_field = request.POST.get(TO_FIELD_VAR, request.GET.get(TO_FIELD_VAR)) if to_field and not self.to_field_allowed(request, to_field): raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field) model = self.model opts = model._meta if request.method == 'POST' and '_saveasnew' in request.POST: object_id = None add = object_id is None if add: if not self.has_add_permission(request): raise PermissionDenied obj = None else: obj = self.get_object(request, unquote(object_id), to_field) if request.method == 'POST': if not self.has_change_permission(request, obj): raise PermissionDenied else: if not self.has_view_or_change_permission(request, obj): raise PermissionDenied if obj is None: return self._get_obj_does_not_exist_redirect(request, opts, object_id) fieldsets = self.get_fieldsets(request, obj) ModelForm = self.get_form( request, obj, change=not add, fields=flatten_fieldsets(fieldsets) ) if request.method == 'POST': form = ModelForm(request.POST, request.FILES, instance=obj) form_validated = form.is_valid() if form_validated: new_object = self.save_form(request, form, change=not add) else: new_object = form.instance formsets, inline_instances = self._create_formsets(request, new_object, change=not add) if all_valid(formsets) and form_validated: self.save_model(request, new_object, form, not add) self.save_related(request, form, formsets, not add) change_message = self.construct_change_message(request, form, formsets, add) if add: self.log_addition(request, new_object, change_message) return self.response_add(request, new_object) else: self.log_change(request, new_object, change_message) return self.response_change(request, new_object) else: form_validated = False else: if add: initial = self.get_changeform_initial_data(request) form = ModelForm(initial=initial) formsets, inline_instances = self._create_formsets(request, form.instance, change=False) else: form = ModelForm(instance=obj) formsets, inline_instances = self._create_formsets(request, obj, change=True) if not add and not self.has_change_permission(request, obj): readonly_fields = flatten_fieldsets(fieldsets) else: readonly_fields = self.get_readonly_fields(request, obj) adminForm = helpers.AdminForm( form, list(fieldsets), # Clear prepopulated fields on a view-only form to avoid a crash. self.get_prepopulated_fields(request, obj) if add or self.has_change_permission(request, obj) else {}, readonly_fields, model_admin=self) media = self.media + adminForm.media inline_formsets = self.get_inline_formsets(request, formsets, inline_instances, obj) for inline_formset in inline_formsets: media = media + inline_formset.media if add: title = _('Add %s') elif self.has_change_permission(request, obj): title = _('Change %s') else: title = _('View %s') context = { **self.admin_site.each_context(request), 'title': title % opts.verbose_name, 'subtitle': str(obj) if obj else None, 'adminform': adminForm, 'object_id': object_id, 'original': obj, 'is_popup': IS_POPUP_VAR in request.POST or IS_POPUP_VAR in request.GET, 'to_field': to_field, 'media': media, 'inline_admin_formsets': inline_formsets, 'errors': helpers.AdminErrorList(form, formsets), 'preserved_filters': self.get_preserved_filters(request), } # Hide the "Save" and "Save and continue" buttons if "Save as New" was # previously chosen to prevent the interface from getting confusing. if request.method == 'POST' and not form_validated and "_saveasnew" in request.POST: context['show_save'] = False context['show_save_and_continue'] = False # Use the change template instead of the add template. add = False context.update(extra_context or {}) return self.render_change_form(request, context, add=add, change=not add, obj=obj, form_url=form_url) def add_view(self, request, form_url='', extra_context=None): return self.changeform_view(request, None, form_url, extra_context) def change_view(self, request, object_id, form_url='', extra_context=None): return self.changeform_view(request, object_id, form_url, extra_context) def _get_edited_object_pks(self, request, prefix): """Return POST data values of list_editable primary keys.""" pk_pattern = re.compile( r'{}-\d+-{}$'.format(re.escape(prefix), self.model._meta.pk.name) ) return [value for key, value in request.POST.items() if pk_pattern.match(key)] def _get_list_editable_queryset(self, request, prefix): """ Based on POST data, return a queryset of the objects that were edited via list_editable. """ object_pks = self._get_edited_object_pks(request, prefix) queryset = self.get_queryset(request) validate = queryset.model._meta.pk.to_python try: for pk in object_pks: validate(pk) except ValidationError: # Disable the optimization if the POST data was tampered with. return queryset return queryset.filter(pk__in=object_pks) @csrf_protect_m def changelist_view(self, request, extra_context=None): """ The 'change list' admin view for this model. """ from django.contrib.admin.views.main import ERROR_FLAG opts = self.model._meta app_label = opts.app_label if not self.has_view_or_change_permission(request): raise PermissionDenied try: cl = self.get_changelist_instance(request) except IncorrectLookupParameters: # Wacky lookup parameters were given, so redirect to the main # changelist page, without parameters, and pass an 'invalid=1' # parameter via the query string. If wacky parameters were given # and the 'invalid=1' parameter was already in the query string, # something is screwed up with the database, so display an error # page. if ERROR_FLAG in request.GET: return SimpleTemplateResponse('admin/invalid_setup.html', { 'title': _('Database error'), }) return HttpResponseRedirect(request.path + '?' + ERROR_FLAG + '=1') # If the request was POSTed, this might be a bulk action or a bulk # edit. Try to look up an action or confirmation first, but if this # isn't an action the POST will fall through to the bulk edit check, # below. action_failed = False selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME) actions = self.get_actions(request) # Actions with no confirmation if (actions and request.method == 'POST' and 'index' in request.POST and '_save' not in request.POST): if selected: response = self.response_action(request, queryset=cl.get_queryset(request)) if response: return response else: action_failed = True else: msg = _("Items must be selected in order to perform " "actions on them. No items have been changed.") self.message_user(request, msg, messages.WARNING) action_failed = True # Actions with confirmation if (actions and request.method == 'POST' and helpers.ACTION_CHECKBOX_NAME in request.POST and 'index' not in request.POST and '_save' not in request.POST): if selected: response = self.response_action(request, queryset=cl.get_queryset(request)) if response: return response else: action_failed = True if action_failed: # Redirect back to the changelist page to avoid resubmitting the # form if the user refreshes the browser or uses the "No, take # me back" button on the action confirmation page. return HttpResponseRedirect(request.get_full_path()) # If we're allowing changelist editing, we need to construct a formset # for the changelist given all the fields to be edited. Then we'll # use the formset to validate/process POSTed data. formset = cl.formset = None # Handle POSTed bulk-edit data. if request.method == 'POST' and cl.list_editable and '_save' in request.POST: if not self.has_change_permission(request): raise PermissionDenied FormSet = self.get_changelist_formset(request) modified_objects = self._get_list_editable_queryset(request, FormSet.get_default_prefix()) formset = cl.formset = FormSet(request.POST, request.FILES, queryset=modified_objects) if formset.is_valid(): changecount = 0 for form in formset.forms: if form.has_changed(): obj = self.save_form(request, form, change=True) self.save_model(request, obj, form, change=True) self.save_related(request, form, formsets=[], change=True) change_msg = self.construct_change_message(request, form, None) self.log_change(request, obj, change_msg) changecount += 1 if changecount: msg = ngettext( "%(count)s %(name)s was changed successfully.", "%(count)s %(name)s were changed successfully.", changecount ) % { 'count': changecount, 'name': model_ngettext(opts, changecount), } self.message_user(request, msg, messages.SUCCESS) return HttpResponseRedirect(request.get_full_path()) # Handle GET -- construct a formset for display. elif cl.list_editable and self.has_change_permission(request): FormSet = self.get_changelist_formset(request) formset = cl.formset = FormSet(queryset=cl.result_list) # Build the list of media to be used by the formset. if formset: media = self.media + formset.media else: media = self.media # Build the action form and populate it with available actions. if actions: action_form = self.action_form(auto_id=None) action_form.fields['action'].choices = self.get_action_choices(request) media += action_form.media else: action_form = None selection_note_all = ngettext( '%(total_count)s selected', 'All %(total_count)s selected', cl.result_count ) context = { **self.admin_site.each_context(request), 'module_name': str(opts.verbose_name_plural), 'selection_note': _('0 of %(cnt)s selected') % {'cnt': len(cl.result_list)}, 'selection_note_all': selection_note_all % {'total_count': cl.result_count}, 'title': cl.title, 'subtitle': None, 'is_popup': cl.is_popup, 'to_field': cl.to_field, 'cl': cl, 'media': media, 'has_add_permission': self.has_add_permission(request), 'opts': cl.opts, 'action_form': action_form, 'actions_on_top': self.actions_on_top, 'actions_on_bottom': self.actions_on_bottom, 'actions_selection_counter': self.actions_selection_counter, 'preserved_filters': self.get_preserved_filters(request), **(extra_context or {}), } request.current_app = self.admin_site.name return TemplateResponse(request, self.change_list_template or [ 'admin/%s/%s/change_list.html' % (app_label, opts.model_name), 'admin/%s/change_list.html' % app_label, 'admin/change_list.html' ], context) def get_deleted_objects(self, objs, request): """ Hook for customizing the delete process for the delete view and the "delete selected" action. """ return get_deleted_objects(objs, request, self.admin_site) @csrf_protect_m def delete_view(self, request, object_id, extra_context=None): with transaction.atomic(using=router.db_for_write(self.model)): return self._delete_view(request, object_id, extra_context) def _delete_view(self, request, object_id, extra_context): "The 'delete' admin view for this model." opts = self.model._meta app_label = opts.app_label to_field = request.POST.get(TO_FIELD_VAR, request.GET.get(TO_FIELD_VAR)) if to_field and not self.to_field_allowed(request, to_field): raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field) obj = self.get_object(request, unquote(object_id), to_field) if not self.has_delete_permission(request, obj): raise PermissionDenied if obj is None: return self._get_obj_does_not_exist_redirect(request, opts, object_id) # Populate deleted_objects, a data structure of all related objects that # will also be deleted. deleted_objects, model_count, perms_needed, protected = self.get_deleted_objects([obj], request) if request.POST and not protected: # The user has confirmed the deletion. if perms_needed: raise PermissionDenied obj_display = str(obj) attr = str(to_field) if to_field else opts.pk.attname obj_id = obj.serializable_value(attr) self.log_deletion(request, obj, obj_display) self.delete_model(request, obj) return self.response_delete(request, obj_display, obj_id) object_name = str(opts.verbose_name) if perms_needed or protected: title = _("Cannot delete %(name)s") % {"name": object_name} else: title = _("Are you sure?") context = { **self.admin_site.each_context(request), 'title': title, 'subtitle': None, 'object_name': object_name, 'object': obj, 'deleted_objects': deleted_objects, 'model_count': dict(model_count).items(), 'perms_lacking': perms_needed, 'protected': protected, 'opts': opts, 'app_label': app_label, 'preserved_filters': self.get_preserved_filters(request), 'is_popup': IS_POPUP_VAR in request.POST or IS_POPUP_VAR in request.GET, 'to_field': to_field, **(extra_context or {}), } return self.render_delete_form(request, context) def history_view(self, request, object_id, extra_context=None): "The 'history' admin view for this model." from django.contrib.admin.models import LogEntry # First check if the user can see this history. model = self.model obj = self.get_object(request, unquote(object_id)) if obj is None: return self._get_obj_does_not_exist_redirect(request, model._meta, object_id) if not self.has_view_or_change_permission(request, obj): raise PermissionDenied # Then get the history for this object. opts = model._meta app_label = opts.app_label action_list = LogEntry.objects.filter( object_id=unquote(object_id), content_type=get_content_type_for_model(model) ).select_related().order_by('action_time') context = { **self.admin_site.each_context(request), 'title': _('Change history: %s') % obj, 'subtitle': None, 'action_list': action_list, 'module_name': str(capfirst(opts.verbose_name_plural)), 'object': obj, 'opts': opts, 'preserved_filters': self.get_preserved_filters(request), **(extra_context or {}), } request.current_app = self.admin_site.name return TemplateResponse(request, self.object_history_template or [ "admin/%s/%s/object_history.html" % (app_label, opts.model_name), "admin/%s/object_history.html" % app_label, "admin/object_history.html" ], context) def get_formset_kwargs(self, request, obj, inline, prefix): formset_params = { 'instance': obj, 'prefix': prefix, 'queryset': inline.get_queryset(request), } if request.method == 'POST': formset_params.update({ 'data': request.POST.copy(), 'files': request.FILES, 'save_as_new': '_saveasnew' in request.POST }) return formset_params def _create_formsets(self, request, obj, change): "Helper function to generate formsets for add/change_view." formsets = [] inline_instances = [] prefixes = {} get_formsets_args = [request] if change: get_formsets_args.append(obj) for FormSet, inline in self.get_formsets_with_inlines(*get_formsets_args): prefix = FormSet.get_default_prefix() prefixes[prefix] = prefixes.get(prefix, 0) + 1 if prefixes[prefix] != 1 or not prefix: prefix = "%s-%s" % (prefix, prefixes[prefix]) formset_params = self.get_formset_kwargs(request, obj, inline, prefix) formset = FormSet(**formset_params) def user_deleted_form(request, obj, formset, index): """Return whether or not the user deleted the form.""" return ( inline.has_delete_permission(request, obj) and '{}-{}-DELETE'.format(formset.prefix, index) in request.POST ) # Bypass validation of each view-only inline form (since the form's # data won't be in request.POST), unless the form was deleted. if not inline.has_change_permission(request, obj if change else None): for index, form in enumerate(formset.initial_forms): if user_deleted_form(request, obj, formset, index): continue form._errors = {} form.cleaned_data = form.initial formsets.append(formset) inline_instances.append(inline) return formsets, inline_instances class InlineModelAdmin(BaseModelAdmin): """ Options for inline editing of ``model`` instances. Provide ``fk_name`` to specify the attribute name of the ``ForeignKey`` from ``model`` to its parent. This is required if ``model`` has more than one ``ForeignKey`` to its parent. """ model = None fk_name = None formset = BaseInlineFormSet extra = 3 min_num = None max_num = None template = None verbose_name = None verbose_name_plural = None can_delete = True show_change_link = False checks_class = InlineModelAdminChecks classes = None def __init__(self, parent_model, admin_site): self.admin_site = admin_site self.parent_model = parent_model self.opts = self.model._meta self.has_registered_model = admin_site.is_registered(self.model) super().__init__() if self.verbose_name is None: self.verbose_name = self.model._meta.verbose_name if self.verbose_name_plural is None: self.verbose_name_plural = self.model._meta.verbose_name_plural @property def media(self): extra = '' if settings.DEBUG else '.min' js = ['vendor/jquery/jquery%s.js' % extra, 'jquery.init.js', 'inlines.js'] if self.filter_vertical or self.filter_horizontal: js.extend(['SelectBox.js', 'SelectFilter2.js']) if self.classes and 'collapse' in self.classes: js.append('collapse.js') return forms.Media(js=['admin/js/%s' % url for url in js]) def get_extra(self, request, obj=None, **kwargs): """Hook for customizing the number of extra inline forms.""" return self.extra def get_min_num(self, request, obj=None, **kwargs): """Hook for customizing the min number of inline forms.""" return self.min_num def get_max_num(self, request, obj=None, **kwargs): """Hook for customizing the max number of extra inline forms.""" return self.max_num def get_formset(self, request, obj=None, **kwargs): """Return a BaseInlineFormSet class for use in admin add/change views.""" if 'fields' in kwargs: fields = kwargs.pop('fields') else: fields = flatten_fieldsets(self.get_fieldsets(request, obj)) excluded = self.get_exclude(request, obj) exclude = [] if excluded is None else list(excluded) exclude.extend(self.get_readonly_fields(request, obj)) if excluded is None and hasattr(self.form, '_meta') and self.form._meta.exclude: # Take the custom ModelForm's Meta.exclude into account only if the # InlineModelAdmin doesn't define its own. exclude.extend(self.form._meta.exclude) # If exclude is an empty list we use None, since that's the actual # default. exclude = exclude or None can_delete = self.can_delete and self.has_delete_permission(request, obj) defaults = { 'form': self.form, 'formset': self.formset, 'fk_name': self.fk_name, 'fields': fields, 'exclude': exclude, 'formfield_callback': partial(self.formfield_for_dbfield, request=request), 'extra': self.get_extra(request, obj, **kwargs), 'min_num': self.get_min_num(request, obj, **kwargs), 'max_num': self.get_max_num(request, obj, **kwargs), 'can_delete': can_delete, **kwargs, } base_model_form = defaults['form'] can_change = self.has_change_permission(request, obj) if request else True can_add = self.has_add_permission(request, obj) if request else True class DeleteProtectedModelForm(base_model_form): def hand_clean_DELETE(self): """ We don't validate the 'DELETE' field itself because on templates it's not rendered using the field information, but just using a generic "deletion_field" of the InlineModelAdmin. """ if self.cleaned_data.get(DELETION_FIELD_NAME, False): using = router.db_for_write(self._meta.model) collector = NestedObjects(using=using) if self.instance._state.adding: return collector.collect([self.instance]) if collector.protected: objs = [] for p in collector.protected: objs.append( # Translators: Model verbose name and instance representation, # suitable to be an item in a list. _('%(class_name)s %(instance)s') % { 'class_name': p._meta.verbose_name, 'instance': p} ) params = { 'class_name': self._meta.model._meta.verbose_name, 'instance': self.instance, 'related_objects': get_text_list(objs, _('and')), } msg = _("Deleting %(class_name)s %(instance)s would require " "deleting the following protected related objects: " "%(related_objects)s") raise ValidationError(msg, code='deleting_protected', params=params) def is_valid(self): result = super().is_valid() self.hand_clean_DELETE() return result def has_changed(self): # Protect against unauthorized edits. if not can_change and not self.instance._state.adding: return False if not can_add and self.instance._state.adding: return False return super().has_changed() defaults['form'] = DeleteProtectedModelForm if defaults['fields'] is None and not modelform_defines_fields(defaults['form']): defaults['fields'] = forms.ALL_FIELDS return inlineformset_factory(self.parent_model, self.model, **defaults) def _get_form_for_get_fields(self, request, obj=None): return self.get_formset(request, obj, fields=None).form def get_queryset(self, request): queryset = super().get_queryset(request) if not self.has_view_or_change_permission(request): queryset = queryset.none() return queryset def _has_any_perms_for_target_model(self, request, perms): """ This method is called only when the ModelAdmin's model is for an ManyToManyField's implicit through model (if self.opts.auto_created). Return True if the user has any of the given permissions ('add', 'change', etc.) for the model that points to the through model. """ opts = self.opts # Find the target model of an auto-created many-to-many relationship. for field in opts.fields: if field.remote_field and field.remote_field.model != self.parent_model: opts = field.remote_field.model._meta break return any( request.user.has_perm('%s.%s' % (opts.app_label, get_permission_codename(perm, opts))) for perm in perms ) def has_add_permission(self, request, obj): if self.opts.auto_created: # Auto-created intermediate models don't have their own # permissions. The user needs to have the change permission for the # related model in order to be able to do anything with the # intermediate model. return self._has_any_perms_for_target_model(request, ['change']) return super().has_add_permission(request) def has_change_permission(self, request, obj=None): if self.opts.auto_created: # Same comment as has_add_permission(). return self._has_any_perms_for_target_model(request, ['change']) return super().has_change_permission(request) def has_delete_permission(self, request, obj=None): if self.opts.auto_created: # Same comment as has_add_permission(). return self._has_any_perms_for_target_model(request, ['change']) return super().has_delete_permission(request, obj) def has_view_permission(self, request, obj=None): if self.opts.auto_created: # Same comment as has_add_permission(). The 'change' permission # also implies the 'view' permission. return self._has_any_perms_for_target_model(request, ['view', 'change']) return super().has_view_permission(request) class StackedInline(InlineModelAdmin): template = 'admin/edit_inline/stacked.html' class TabularInline(InlineModelAdmin): template = 'admin/edit_inline/tabular.html'
b846d949fb3577dca131a6ad252b33e9ebf2b22b2caa7f16c033da25771ab261
import re from functools import update_wrapper from weakref import WeakSet from django.apps import apps from django.conf import settings from django.contrib.admin import ModelAdmin, actions from django.contrib.admin.views.autocomplete import AutocompleteJsonView from django.contrib.auth import REDIRECT_FIELD_NAME from django.core.exceptions import ImproperlyConfigured from django.db.models.base import ModelBase from django.http import ( Http404, HttpResponsePermanentRedirect, HttpResponseRedirect, ) from django.template.response import TemplateResponse from django.urls import NoReverseMatch, Resolver404, resolve, reverse from django.utils.decorators import method_decorator from django.utils.functional import LazyObject from django.utils.module_loading import import_string from django.utils.text import capfirst from django.utils.translation import gettext as _, gettext_lazy from django.views.decorators.cache import never_cache from django.views.decorators.common import no_append_slash from django.views.decorators.csrf import csrf_protect from django.views.i18n import JavaScriptCatalog all_sites = WeakSet() class AlreadyRegistered(Exception): pass class NotRegistered(Exception): pass class AdminSite: """ An AdminSite object encapsulates an instance of the Django admin application, ready to be hooked in to your URLconf. Models are registered with the AdminSite using the register() method, and the get_urls() method can then be used to access Django view functions that present a full admin interface for the collection of registered models. """ # Text to put at the end of each page's <title>. site_title = gettext_lazy('Django site admin') # Text to put in each page's <h1>. site_header = gettext_lazy('Django administration') # Text to put at the top of the admin index page. index_title = gettext_lazy('Site administration') # URL for the "View site" link at the top of each admin page. site_url = '/' enable_nav_sidebar = True empty_value_display = '-' login_form = None index_template = None app_index_template = None login_template = None logout_template = None password_change_template = None password_change_done_template = None final_catch_all_view = True def __init__(self, name='admin'): self._registry = {} # model_class class -> admin_class instance self.name = name self._actions = {'delete_selected': actions.delete_selected} self._global_actions = self._actions.copy() all_sites.add(self) def check(self, app_configs): """ Run the system checks on all ModelAdmins, except if they aren't customized at all. """ if app_configs is None: app_configs = apps.get_app_configs() app_configs = set(app_configs) # Speed up lookups below errors = [] modeladmins = (o for o in self._registry.values() if o.__class__ is not ModelAdmin) for modeladmin in modeladmins: if modeladmin.model._meta.app_config in app_configs: errors.extend(modeladmin.check()) return errors def register(self, model_or_iterable, admin_class=None, **options): """ Register the given model(s) with the given admin class. The model(s) should be Model classes, not instances. If an admin class isn't given, use ModelAdmin (the default admin options). If keyword arguments are given -- e.g., list_display -- apply them as options to the admin class. If a model is already registered, raise AlreadyRegistered. If a model is abstract, raise ImproperlyConfigured. """ admin_class = admin_class or ModelAdmin if isinstance(model_or_iterable, ModelBase): model_or_iterable = [model_or_iterable] for model in model_or_iterable: if model._meta.abstract: raise ImproperlyConfigured( 'The model %s is abstract, so it cannot be registered with admin.' % model.__name__ ) if model in self._registry: registered_admin = str(self._registry[model]) msg = 'The model %s is already registered ' % model.__name__ if registered_admin.endswith('.ModelAdmin'): # Most likely registered without a ModelAdmin subclass. msg += 'in app %r.' % re.sub(r'\.ModelAdmin$', '', registered_admin) else: msg += 'with %r.' % registered_admin raise AlreadyRegistered(msg) # Ignore the registration if the model has been # swapped out. if not model._meta.swapped: # If we got **options then dynamically construct a subclass of # admin_class with those **options. if options: # For reasons I don't quite understand, without a __module__ # the created class appears to "live" in the wrong place, # which causes issues later on. options['__module__'] = __name__ admin_class = type("%sAdmin" % model.__name__, (admin_class,), options) # Instantiate the admin class to save in the registry self._registry[model] = admin_class(model, self) def unregister(self, model_or_iterable): """ Unregister the given model(s). If a model isn't already registered, raise NotRegistered. """ if isinstance(model_or_iterable, ModelBase): model_or_iterable = [model_or_iterable] for model in model_or_iterable: if model not in self._registry: raise NotRegistered('The model %s is not registered' % model.__name__) del self._registry[model] def is_registered(self, model): """ Check if a model class is registered with this `AdminSite`. """ return model in self._registry def add_action(self, action, name=None): """ Register an action to be available globally. """ name = name or action.__name__ self._actions[name] = action self._global_actions[name] = action def disable_action(self, name): """ Disable a globally-registered action. Raise KeyError for invalid names. """ del self._actions[name] def get_action(self, name): """ Explicitly get a registered global action whether it's enabled or not. Raise KeyError for invalid names. """ return self._global_actions[name] @property def actions(self): """ Get all the enabled actions as an iterable of (name, func). """ return self._actions.items() def has_permission(self, request): """ Return True if the given HttpRequest has permission to view *at least one* page in the admin site. """ return request.user.is_active and request.user.is_staff def admin_view(self, view, cacheable=False): """ Decorator to create an admin view attached to this ``AdminSite``. This wraps the view and provides permission checking by calling ``self.has_permission``. You'll want to use this from within ``AdminSite.get_urls()``: class MyAdminSite(AdminSite): def get_urls(self): from django.urls import path urls = super().get_urls() urls += [ path('my_view/', self.admin_view(some_view)) ] return urls By default, admin_views are marked non-cacheable using the ``never_cache`` decorator. If the view can be safely cached, set cacheable=True. """ def inner(request, *args, **kwargs): if not self.has_permission(request): if request.path == reverse('admin:logout', current_app=self.name): index_path = reverse('admin:index', current_app=self.name) return HttpResponseRedirect(index_path) # Inner import to prevent django.contrib.admin (app) from # importing django.contrib.auth.models.User (unrelated model). from django.contrib.auth.views import redirect_to_login return redirect_to_login( request.get_full_path(), reverse('admin:login', current_app=self.name) ) return view(request, *args, **kwargs) if not cacheable: inner = never_cache(inner) # We add csrf_protect here so this function can be used as a utility # function for any view, without having to repeat 'csrf_protect'. if not getattr(view, 'csrf_exempt', False): inner = csrf_protect(inner) return update_wrapper(inner, view) def get_urls(self): # Since this module gets imported in the application's root package, # it cannot import models from other applications at the module level, # and django.contrib.contenttypes.views imports ContentType. from django.contrib.contenttypes import views as contenttype_views from django.urls import include, path, re_path def wrap(view, cacheable=False): def wrapper(*args, **kwargs): return self.admin_view(view, cacheable)(*args, **kwargs) wrapper.admin_site = self return update_wrapper(wrapper, view) # Admin-site-wide views. urlpatterns = [ path('', wrap(self.index), name='index'), path('login/', self.login, name='login'), path('logout/', wrap(self.logout), name='logout'), path('password_change/', wrap(self.password_change, cacheable=True), name='password_change'), path( 'password_change/done/', wrap(self.password_change_done, cacheable=True), name='password_change_done', ), path('autocomplete/', wrap(self.autocomplete_view), name='autocomplete'), path('jsi18n/', wrap(self.i18n_javascript, cacheable=True), name='jsi18n'), path( 'r/<int:content_type_id>/<path:object_id>/', wrap(contenttype_views.shortcut), name='view_on_site', ), ] # Add in each model's views, and create a list of valid URLS for the # app_index valid_app_labels = [] for model, model_admin in self._registry.items(): urlpatterns += [ path('%s/%s/' % (model._meta.app_label, model._meta.model_name), include(model_admin.urls)), ] if model._meta.app_label not in valid_app_labels: valid_app_labels.append(model._meta.app_label) # If there were ModelAdmins registered, we should have a list of app # labels for which we need to allow access to the app_index view, if valid_app_labels: regex = r'^(?P<app_label>' + '|'.join(valid_app_labels) + ')/$' urlpatterns += [ re_path(regex, wrap(self.app_index), name='app_list'), ] if self.final_catch_all_view: urlpatterns.append(re_path(r'(?P<url>.*)$', wrap(self.catch_all_view))) return urlpatterns @property def urls(self): return self.get_urls(), 'admin', self.name def each_context(self, request): """ Return a dictionary of variables to put in the template context for *every* page in the admin site. For sites running on a subpath, use the SCRIPT_NAME value if site_url hasn't been customized. """ script_name = request.META['SCRIPT_NAME'] site_url = script_name if self.site_url == '/' and script_name else self.site_url return { 'site_title': self.site_title, 'site_header': self.site_header, 'site_url': site_url, 'has_permission': self.has_permission(request), 'available_apps': self.get_app_list(request), 'is_popup': False, 'is_nav_sidebar_enabled': self.enable_nav_sidebar, } def password_change(self, request, extra_context=None): """ Handle the "change password" task -- both form display and validation. """ from django.contrib.admin.forms import AdminPasswordChangeForm from django.contrib.auth.views import PasswordChangeView url = reverse('admin:password_change_done', current_app=self.name) defaults = { 'form_class': AdminPasswordChangeForm, 'success_url': url, 'extra_context': {**self.each_context(request), **(extra_context or {})}, } if self.password_change_template is not None: defaults['template_name'] = self.password_change_template request.current_app = self.name return PasswordChangeView.as_view(**defaults)(request) def password_change_done(self, request, extra_context=None): """ Display the "success" page after a password change. """ from django.contrib.auth.views import PasswordChangeDoneView defaults = { 'extra_context': {**self.each_context(request), **(extra_context or {})}, } if self.password_change_done_template is not None: defaults['template_name'] = self.password_change_done_template request.current_app = self.name return PasswordChangeDoneView.as_view(**defaults)(request) def i18n_javascript(self, request, extra_context=None): """ Display the i18n JavaScript that the Django admin requires. `extra_context` is unused but present for consistency with the other admin views. """ return JavaScriptCatalog.as_view(packages=['django.contrib.admin'])(request) def logout(self, request, extra_context=None): """ Log out the user for the given HttpRequest. This should *not* assume the user is already logged in. """ from django.contrib.auth.views import LogoutView defaults = { 'extra_context': { **self.each_context(request), # Since the user isn't logged out at this point, the value of # has_permission must be overridden. 'has_permission': False, **(extra_context or {}) }, } if self.logout_template is not None: defaults['template_name'] = self.logout_template request.current_app = self.name return LogoutView.as_view(**defaults)(request) @method_decorator(never_cache) def login(self, request, extra_context=None): """ Display the login form for the given HttpRequest. """ if request.method == 'GET' and self.has_permission(request): # Already logged-in, redirect to admin index index_path = reverse('admin:index', current_app=self.name) return HttpResponseRedirect(index_path) # Since this module gets imported in the application's root package, # it cannot import models from other applications at the module level, # and django.contrib.admin.forms eventually imports User. from django.contrib.admin.forms import AdminAuthenticationForm from django.contrib.auth.views import LoginView context = { **self.each_context(request), 'title': _('Log in'), 'app_path': request.get_full_path(), 'username': request.user.get_username(), } if (REDIRECT_FIELD_NAME not in request.GET and REDIRECT_FIELD_NAME not in request.POST): context[REDIRECT_FIELD_NAME] = reverse('admin:index', current_app=self.name) context.update(extra_context or {}) defaults = { 'extra_context': context, 'authentication_form': self.login_form or AdminAuthenticationForm, 'template_name': self.login_template or 'admin/login.html', } request.current_app = self.name return LoginView.as_view(**defaults)(request) def autocomplete_view(self, request): return AutocompleteJsonView.as_view(admin_site=self)(request) @no_append_slash def catch_all_view(self, request, url): if settings.APPEND_SLASH and not url.endswith('/'): urlconf = getattr(request, 'urlconf', None) try: match = resolve('%s/' % request.path_info, urlconf) except Resolver404: pass else: if getattr(match.func, 'should_append_slash', True): return HttpResponsePermanentRedirect('%s/' % request.path) raise Http404 def _build_app_dict(self, request, label=None): """ Build the app dictionary. The optional `label` parameter filters models of a specific app. """ app_dict = {} if label: models = { m: m_a for m, m_a in self._registry.items() if m._meta.app_label == label } else: models = self._registry for model, model_admin in models.items(): app_label = model._meta.app_label has_module_perms = model_admin.has_module_permission(request) if not has_module_perms: continue perms = model_admin.get_model_perms(request) # Check whether user has any perm for this module. # If so, add the module to the model_list. if True not in perms.values(): continue info = (app_label, model._meta.model_name) model_dict = { 'model': model, 'name': capfirst(model._meta.verbose_name_plural), 'object_name': model._meta.object_name, 'perms': perms, 'admin_url': None, 'add_url': None, } if perms.get('change') or perms.get('view'): model_dict['view_only'] = not perms.get('change') try: model_dict['admin_url'] = reverse('admin:%s_%s_changelist' % info, current_app=self.name) except NoReverseMatch: pass if perms.get('add'): try: model_dict['add_url'] = reverse('admin:%s_%s_add' % info, current_app=self.name) except NoReverseMatch: pass if app_label in app_dict: app_dict[app_label]['models'].append(model_dict) else: app_dict[app_label] = { 'name': apps.get_app_config(app_label).verbose_name, 'app_label': app_label, 'app_url': reverse( 'admin:app_list', kwargs={'app_label': app_label}, current_app=self.name, ), 'has_module_perms': has_module_perms, 'models': [model_dict], } if label: return app_dict.get(label) return app_dict def get_app_list(self, request): """ Return a sorted list of all the installed apps that have been registered in this site. """ app_dict = self._build_app_dict(request) # Sort the apps alphabetically. app_list = sorted(app_dict.values(), key=lambda x: x['name'].lower()) # Sort the models alphabetically within each app. for app in app_list: app['models'].sort(key=lambda x: x['name']) return app_list def index(self, request, extra_context=None): """ Display the main admin index page, which lists all of the installed apps that have been registered in this site. """ app_list = self.get_app_list(request) context = { **self.each_context(request), 'title': self.index_title, 'subtitle': None, 'app_list': app_list, **(extra_context or {}), } request.current_app = self.name return TemplateResponse(request, self.index_template or 'admin/index.html', context) def app_index(self, request, app_label, extra_context=None): app_dict = self._build_app_dict(request, app_label) if not app_dict: raise Http404('The requested admin page does not exist.') # Sort the models alphabetically within each app. app_dict['models'].sort(key=lambda x: x['name']) context = { **self.each_context(request), 'title': _('%(app)s administration') % {'app': app_dict['name']}, 'subtitle': None, 'app_list': [app_dict], 'app_label': app_label, **(extra_context or {}), } request.current_app = self.name return TemplateResponse(request, self.app_index_template or [ 'admin/%s/app_index.html' % app_label, 'admin/app_index.html' ], context) class DefaultAdminSite(LazyObject): def _setup(self): AdminSiteClass = import_string(apps.get_app_config('admin').default_site) self._wrapped = AdminSiteClass() # This global object represents the default admin site, for the common case. # You can provide your own AdminSite using the (Simple)AdminConfig.default_site # attribute. You can also instantiate AdminSite in your own code to create a # custom admin site. site = DefaultAdminSite()
d21a2b51ab8894eb2b9ed121b3be6a1230eb702a106694099f81c3af02fb94bd
import datetime import decimal import json from collections import defaultdict from django.core.exceptions import FieldDoesNotExist from django.db import models, router from django.db.models.constants import LOOKUP_SEP from django.db.models.deletion import Collector from django.forms.utils import pretty_name from django.urls import NoReverseMatch, reverse from django.utils import formats, timezone from django.utils.html import format_html from django.utils.regex_helper import _lazy_re_compile from django.utils.text import capfirst from django.utils.translation import ngettext, override as translation_override QUOTE_MAP = {i: '_%02X' % i for i in b'":/_#?;@&=+$,"[]<>%\n\\'} UNQUOTE_MAP = {v: chr(k) for k, v in QUOTE_MAP.items()} UNQUOTE_RE = _lazy_re_compile('_(?:%s)' % '|'.join([x[1:] for x in UNQUOTE_MAP])) class FieldIsAForeignKeyColumnName(Exception): """A field is a foreign key attname, i.e. <FK>_id.""" pass def lookup_spawns_duplicates(opts, lookup_path): """ Return True if the given lookup path spawns duplicates. """ lookup_fields = lookup_path.split(LOOKUP_SEP) # Go through the fields (following all relations) and look for an m2m. for field_name in lookup_fields: if field_name == 'pk': field_name = opts.pk.name try: field = opts.get_field(field_name) except FieldDoesNotExist: # Ignore query lookups. continue else: if hasattr(field, 'get_path_info'): # This field is a relation; update opts to follow the relation. path_info = field.get_path_info() opts = path_info[-1].to_opts if any(path.m2m for path in path_info): # This field is a m2m relation so duplicates must be # handled. return True return False def prepare_lookup_value(key, value): """ Return a lookup value prepared to be used in queryset filtering. """ # if key ends with __in, split parameter into separate values if key.endswith('__in'): value = value.split(',') # if key ends with __isnull, special case '' and the string literals 'false' and '0' elif key.endswith('__isnull'): value = value.lower() not in ('', 'false', '0') return value def quote(s): """ Ensure that primary key values do not confuse the admin URLs by escaping any '/', '_' and ':' and similarly problematic characters. Similar to urllib.parse.quote(), except that the quoting is slightly different so that it doesn't get automatically unquoted by the Web browser. """ return s.translate(QUOTE_MAP) if isinstance(s, str) else s def unquote(s): """Undo the effects of quote().""" return UNQUOTE_RE.sub(lambda m: UNQUOTE_MAP[m[0]], s) def flatten(fields): """ Return a list which is a single level of flattening of the original list. """ flat = [] for field in fields: if isinstance(field, (list, tuple)): flat.extend(field) else: flat.append(field) return flat def flatten_fieldsets(fieldsets): """Return a list of field names from an admin fieldsets structure.""" field_names = [] for name, opts in fieldsets: field_names.extend( flatten(opts['fields']) ) return field_names def get_deleted_objects(objs, request, admin_site): """ Find all objects related to ``objs`` that should also be deleted. ``objs`` must be a homogeneous iterable of objects (e.g. a QuerySet). Return a nested list of strings suitable for display in the template with the ``unordered_list`` filter. """ try: obj = objs[0] except IndexError: return [], {}, set(), [] else: using = router.db_for_write(obj._meta.model) collector = NestedObjects(using=using) collector.collect(objs) perms_needed = set() def format_callback(obj): model = obj.__class__ has_admin = model in admin_site._registry opts = obj._meta no_edit_link = '%s: %s' % (capfirst(opts.verbose_name), obj) if has_admin: if not admin_site._registry[model].has_delete_permission(request, obj): perms_needed.add(opts.verbose_name) try: admin_url = reverse('%s:%s_%s_change' % (admin_site.name, opts.app_label, opts.model_name), None, (quote(obj.pk),)) except NoReverseMatch: # Change url doesn't exist -- don't display link to edit return no_edit_link # Display a link to the admin page. return format_html('{}: <a href="{}">{}</a>', capfirst(opts.verbose_name), admin_url, obj) else: # Don't display link to edit, because it either has no # admin or is edited inline. return no_edit_link to_delete = collector.nested(format_callback) protected = [format_callback(obj) for obj in collector.protected] model_count = {model._meta.verbose_name_plural: len(objs) for model, objs in collector.model_objs.items()} return to_delete, model_count, perms_needed, protected class NestedObjects(Collector): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.edges = {} # {from_instance: [to_instances]} self.protected = set() self.model_objs = defaultdict(set) def add_edge(self, source, target): self.edges.setdefault(source, []).append(target) def collect(self, objs, source=None, source_attr=None, **kwargs): for obj in objs: if source_attr and not source_attr.endswith('+'): related_name = source_attr % { 'class': source._meta.model_name, 'app_label': source._meta.app_label, } self.add_edge(getattr(obj, related_name), obj) else: self.add_edge(None, obj) self.model_objs[obj._meta.model].add(obj) try: return super().collect(objs, source_attr=source_attr, **kwargs) except models.ProtectedError as e: self.protected.update(e.protected_objects) except models.RestrictedError as e: self.protected.update(e.restricted_objects) def related_objects(self, related_model, related_fields, objs): qs = super().related_objects(related_model, related_fields, objs) return qs.select_related(*[related_field.name for related_field in related_fields]) def _nested(self, obj, seen, format_callback): if obj in seen: return [] seen.add(obj) children = [] for child in self.edges.get(obj, ()): children.extend(self._nested(child, seen, format_callback)) if format_callback: ret = [format_callback(obj)] else: ret = [obj] if children: ret.append(children) return ret def nested(self, format_callback=None): """ Return the graph as a nested list. """ seen = set() roots = [] for root in self.edges.get(None, ()): roots.extend(self._nested(root, seen, format_callback)) return roots def can_fast_delete(self, *args, **kwargs): """ We always want to load the objects into memory so that we can display them to the user in confirm page. """ return False def model_format_dict(obj): """ Return a `dict` with keys 'verbose_name' and 'verbose_name_plural', typically for use with string formatting. `obj` may be a `Model` instance, `Model` subclass, or `QuerySet` instance. """ if isinstance(obj, (models.Model, models.base.ModelBase)): opts = obj._meta elif isinstance(obj, models.query.QuerySet): opts = obj.model._meta else: opts = obj return { 'verbose_name': opts.verbose_name, 'verbose_name_plural': opts.verbose_name_plural, } def model_ngettext(obj, n=None): """ Return the appropriate `verbose_name` or `verbose_name_plural` value for `obj` depending on the count `n`. `obj` may be a `Model` instance, `Model` subclass, or `QuerySet` instance. If `obj` is a `QuerySet` instance, `n` is optional and the length of the `QuerySet` is used. """ if isinstance(obj, models.query.QuerySet): if n is None: n = obj.count() obj = obj.model d = model_format_dict(obj) singular, plural = d["verbose_name"], d["verbose_name_plural"] return ngettext(singular, plural, n or 0) def lookup_field(name, obj, model_admin=None): opts = obj._meta try: f = _get_non_gfk_field(opts, name) except (FieldDoesNotExist, FieldIsAForeignKeyColumnName): # For non-field values, the value is either a method, property or # returned via a callable. if callable(name): attr = name value = attr(obj) elif hasattr(model_admin, name) and name != '__str__': attr = getattr(model_admin, name) value = attr(obj) else: attr = getattr(obj, name) if callable(attr): value = attr() else: value = attr f = None else: attr = None value = getattr(obj, name) return f, attr, value def _get_non_gfk_field(opts, name): """ For historical reasons, the admin app relies on GenericForeignKeys as being "not found" by get_field(). This could likely be cleaned up. Reverse relations should also be excluded as these aren't attributes of the model (rather something like `foo_set`). """ field = opts.get_field(name) if (field.is_relation and # Generic foreign keys OR reverse relations ((field.many_to_one and not field.related_model) or field.one_to_many)): raise FieldDoesNotExist() # Avoid coercing <FK>_id fields to FK if field.is_relation and not field.many_to_many and hasattr(field, 'attname') and field.attname == name: raise FieldIsAForeignKeyColumnName() return field def label_for_field(name, model, model_admin=None, return_attr=False, form=None): """ Return a sensible label for a field name. The name can be a callable, property (but not created with @property decorator), or the name of an object's attribute, as well as a model field. If return_attr is True, also return the resolved attribute (which could be a callable). This will be None if (and only if) the name refers to a field. """ attr = None try: field = _get_non_gfk_field(model._meta, name) try: label = field.verbose_name except AttributeError: # field is likely a ForeignObjectRel label = field.related_model._meta.verbose_name except FieldDoesNotExist: if name == "__str__": label = str(model._meta.verbose_name) attr = str else: if callable(name): attr = name elif hasattr(model_admin, name): attr = getattr(model_admin, name) elif hasattr(model, name): attr = getattr(model, name) elif form and name in form.fields: attr = form.fields[name] else: message = "Unable to lookup '%s' on %s" % (name, model._meta.object_name) if model_admin: message += " or %s" % model_admin.__class__.__name__ if form: message += " or %s" % form.__class__.__name__ raise AttributeError(message) if hasattr(attr, "short_description"): label = attr.short_description elif (isinstance(attr, property) and hasattr(attr, "fget") and hasattr(attr.fget, "short_description")): label = attr.fget.short_description elif callable(attr): if attr.__name__ == "<lambda>": label = "--" else: label = pretty_name(attr.__name__) else: label = pretty_name(name) except FieldIsAForeignKeyColumnName: label = pretty_name(name) attr = name if return_attr: return (label, attr) else: return label def help_text_for_field(name, model): help_text = "" try: field = _get_non_gfk_field(model._meta, name) except (FieldDoesNotExist, FieldIsAForeignKeyColumnName): pass else: if hasattr(field, 'help_text'): help_text = field.help_text return help_text def display_for_field(value, field, empty_value_display): from django.contrib.admin.templatetags.admin_list import _boolean_icon if getattr(field, 'flatchoices', None): return dict(field.flatchoices).get(value, empty_value_display) # BooleanField needs special-case null-handling, so it comes before the # general null test. elif isinstance(field, models.BooleanField): return _boolean_icon(value) elif value is None: return empty_value_display elif isinstance(field, models.DateTimeField): return formats.localize(timezone.template_localtime(value)) elif isinstance(field, (models.DateField, models.TimeField)): return formats.localize(value) elif isinstance(field, models.DecimalField): return formats.number_format(value, field.decimal_places) elif isinstance(field, (models.IntegerField, models.FloatField)): return formats.number_format(value) elif isinstance(field, models.FileField) and value: return format_html('<a href="{}">{}</a>', value.url, value) elif isinstance(field, models.JSONField) and value: try: return json.dumps(value, ensure_ascii=False, cls=field.encoder) except TypeError: return display_for_value(value, empty_value_display) else: return display_for_value(value, empty_value_display) def display_for_value(value, empty_value_display, boolean=False): from django.contrib.admin.templatetags.admin_list import _boolean_icon if boolean: return _boolean_icon(value) elif value is None: return empty_value_display elif isinstance(value, bool): return str(value) elif isinstance(value, datetime.datetime): return formats.localize(timezone.template_localtime(value)) elif isinstance(value, (datetime.date, datetime.time)): return formats.localize(value) elif isinstance(value, (int, decimal.Decimal, float)): return formats.number_format(value) elif isinstance(value, (list, tuple)): return ', '.join(str(v) for v in value) else: return str(value) class NotRelationField(Exception): pass def get_model_from_relation(field): if hasattr(field, 'get_path_info'): return field.get_path_info()[-1].to_opts.model else: raise NotRelationField def reverse_field_path(model, path): """ Create a reversed field path. E.g. Given (Order, "user__groups"), return (Group, "user__order"). Final field must be a related model, not a data field. """ reversed_path = [] parent = model pieces = path.split(LOOKUP_SEP) for piece in pieces: field = parent._meta.get_field(piece) # skip trailing data field if extant: if len(reversed_path) == len(pieces) - 1: # final iteration try: get_model_from_relation(field) except NotRelationField: break # Field should point to another model if field.is_relation and not (field.auto_created and not field.concrete): related_name = field.related_query_name() parent = field.remote_field.model else: related_name = field.field.name parent = field.related_model reversed_path.insert(0, related_name) return (parent, LOOKUP_SEP.join(reversed_path)) def get_fields_from_path(model, path): """ Return list of Fields given path relative to model. e.g. (ModelX, "user__groups__name") -> [ <django.db.models.fields.related.ForeignKey object at 0x...>, <django.db.models.fields.related.ManyToManyField object at 0x...>, <django.db.models.fields.CharField object at 0x...>, ] """ pieces = path.split(LOOKUP_SEP) fields = [] for piece in pieces: if fields: parent = get_model_from_relation(fields[-1]) else: parent = model fields.append(parent._meta.get_field(piece)) return fields def construct_change_message(form, formsets, add): """ Construct a JSON structure describing changes from a changed object. Translations are deactivated so that strings are stored untranslated. Translation happens later on LogEntry access. """ # Evaluating `form.changed_data` prior to disabling translations is required # to avoid fields affected by localization from being included incorrectly, # e.g. where date formats differ such as MM/DD/YYYY vs DD/MM/YYYY. changed_data = form.changed_data with translation_override(None): # Deactivate translations while fetching verbose_name for form # field labels and using `field_name`, if verbose_name is not provided. # Translations will happen later on LogEntry access. changed_field_labels = _get_changed_field_labels_from_form(form, changed_data) change_message = [] if add: change_message.append({'added': {}}) elif form.changed_data: change_message.append({'changed': {'fields': changed_field_labels}}) if formsets: with translation_override(None): for formset in formsets: for added_object in formset.new_objects: change_message.append({ 'added': { 'name': str(added_object._meta.verbose_name), 'object': str(added_object), } }) for changed_object, changed_fields in formset.changed_objects: change_message.append({ 'changed': { 'name': str(changed_object._meta.verbose_name), 'object': str(changed_object), 'fields': _get_changed_field_labels_from_form(formset.forms[0], changed_fields), } }) for deleted_object in formset.deleted_objects: change_message.append({ 'deleted': { 'name': str(deleted_object._meta.verbose_name), 'object': str(deleted_object), } }) return change_message def _get_changed_field_labels_from_form(form, changed_data): changed_field_labels = [] for field_name in changed_data: try: verbose_field_name = form.fields[field_name].label or field_name except KeyError: verbose_field_name = field_name changed_field_labels.append(str(verbose_field_name)) return changed_field_labels
8705ee7c0cdc7026cd7512f32e98c2be15110fc970ded9449c1106738ea6a62c
from datetime import datetime, timedelta from django import forms from django.conf import settings from django.contrib import messages from django.contrib.admin import FieldListFilter from django.contrib.admin.exceptions import ( DisallowedModelAdminLookup, DisallowedModelAdminToField, ) from django.contrib.admin.options import ( IS_POPUP_VAR, TO_FIELD_VAR, IncorrectLookupParameters, ) from django.contrib.admin.utils import ( get_fields_from_path, lookup_spawns_duplicates, prepare_lookup_value, quote, ) from django.core.exceptions import ( FieldDoesNotExist, ImproperlyConfigured, SuspiciousOperation, ) from django.core.paginator import InvalidPage from django.db.models import Exists, F, Field, ManyToOneRel, OrderBy, OuterRef from django.db.models.expressions import Combinable from django.urls import reverse from django.utils.http import urlencode from django.utils.timezone import make_aware from django.utils.translation import gettext # Changelist settings ALL_VAR = 'all' ORDER_VAR = 'o' PAGE_VAR = 'p' SEARCH_VAR = 'q' ERROR_FLAG = 'e' IGNORED_PARAMS = (ALL_VAR, ORDER_VAR, SEARCH_VAR, IS_POPUP_VAR, TO_FIELD_VAR) class ChangeListSearchForm(forms.Form): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # Populate "fields" dynamically because SEARCH_VAR is a variable: self.fields = { SEARCH_VAR: forms.CharField(required=False, strip=False), } class ChangeList: search_form_class = ChangeListSearchForm def __init__(self, request, model, list_display, list_display_links, list_filter, date_hierarchy, search_fields, list_select_related, list_per_page, list_max_show_all, list_editable, model_admin, sortable_by, search_help_text): self.model = model self.opts = model._meta self.lookup_opts = self.opts self.root_queryset = model_admin.get_queryset(request) self.list_display = list_display self.list_display_links = list_display_links self.list_filter = list_filter self.has_filters = None self.has_active_filters = None self.clear_all_filters_qs = None self.date_hierarchy = date_hierarchy self.search_fields = search_fields self.list_select_related = list_select_related self.list_per_page = list_per_page self.list_max_show_all = list_max_show_all self.model_admin = model_admin self.preserved_filters = model_admin.get_preserved_filters(request) self.sortable_by = sortable_by self.search_help_text = search_help_text # Get search parameters from the query string. _search_form = self.search_form_class(request.GET) if not _search_form.is_valid(): for error in _search_form.errors.values(): messages.error(request, ', '.join(error)) self.query = _search_form.cleaned_data.get(SEARCH_VAR) or '' try: self.page_num = int(request.GET.get(PAGE_VAR, 1)) except ValueError: self.page_num = 1 self.show_all = ALL_VAR in request.GET self.is_popup = IS_POPUP_VAR in request.GET to_field = request.GET.get(TO_FIELD_VAR) if to_field and not model_admin.to_field_allowed(request, to_field): raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field) self.to_field = to_field self.params = dict(request.GET.items()) if PAGE_VAR in self.params: del self.params[PAGE_VAR] if ERROR_FLAG in self.params: del self.params[ERROR_FLAG] if self.is_popup: self.list_editable = () else: self.list_editable = list_editable self.queryset = self.get_queryset(request) self.get_results(request) if self.is_popup: title = gettext('Select %s') elif self.model_admin.has_change_permission(request): title = gettext('Select %s to change') else: title = gettext('Select %s to view') self.title = title % self.opts.verbose_name self.pk_attname = self.lookup_opts.pk.attname def get_filters_params(self, params=None): """ Return all params except IGNORED_PARAMS. """ params = params or self.params lookup_params = params.copy() # a dictionary of the query string # Remove all the parameters that are globally and systematically # ignored. for ignored in IGNORED_PARAMS: if ignored in lookup_params: del lookup_params[ignored] return lookup_params def get_filters(self, request): lookup_params = self.get_filters_params() may_have_duplicates = False has_active_filters = False for key, value in lookup_params.items(): if not self.model_admin.lookup_allowed(key, value): raise DisallowedModelAdminLookup("Filtering by %s not allowed" % key) filter_specs = [] for list_filter in self.list_filter: lookup_params_count = len(lookup_params) if callable(list_filter): # This is simply a custom list filter class. spec = list_filter(request, lookup_params, self.model, self.model_admin) else: field_path = None if isinstance(list_filter, (tuple, list)): # This is a custom FieldListFilter class for a given field. field, field_list_filter_class = list_filter else: # This is simply a field name, so use the default # FieldListFilter class that has been registered for the # type of the given field. field, field_list_filter_class = list_filter, FieldListFilter.create if not isinstance(field, Field): field_path = field field = get_fields_from_path(self.model, field_path)[-1] spec = field_list_filter_class( field, request, lookup_params, self.model, self.model_admin, field_path=field_path, ) # field_list_filter_class removes any lookup_params it # processes. If that happened, check if duplicates should be # removed. if lookup_params_count > len(lookup_params): may_have_duplicates |= lookup_spawns_duplicates( self.lookup_opts, field_path, ) if spec and spec.has_output(): filter_specs.append(spec) if lookup_params_count > len(lookup_params): has_active_filters = True if self.date_hierarchy: # Create bounded lookup parameters so that the query is more # efficient. year = lookup_params.pop('%s__year' % self.date_hierarchy, None) if year is not None: month = lookup_params.pop('%s__month' % self.date_hierarchy, None) day = lookup_params.pop('%s__day' % self.date_hierarchy, None) try: from_date = datetime( int(year), int(month if month is not None else 1), int(day if day is not None else 1), ) except ValueError as e: raise IncorrectLookupParameters(e) from e if day: to_date = from_date + timedelta(days=1) elif month: # In this branch, from_date will always be the first of a # month, so advancing 32 days gives the next month. to_date = (from_date + timedelta(days=32)).replace(day=1) else: to_date = from_date.replace(year=from_date.year + 1) if settings.USE_TZ: from_date = make_aware(from_date) to_date = make_aware(to_date) lookup_params.update({ '%s__gte' % self.date_hierarchy: from_date, '%s__lt' % self.date_hierarchy: to_date, }) # At this point, all the parameters used by the various ListFilters # have been removed from lookup_params, which now only contains other # parameters passed via the query string. We now loop through the # remaining parameters both to ensure that all the parameters are valid # fields and to determine if at least one of them spawns duplicates. If # the lookup parameters aren't real fields, then bail out. try: for key, value in lookup_params.items(): lookup_params[key] = prepare_lookup_value(key, value) may_have_duplicates |= lookup_spawns_duplicates(self.lookup_opts, key) return ( filter_specs, bool(filter_specs), lookup_params, may_have_duplicates, has_active_filters, ) except FieldDoesNotExist as e: raise IncorrectLookupParameters(e) from e def get_query_string(self, new_params=None, remove=None): if new_params is None: new_params = {} if remove is None: remove = [] p = self.params.copy() for r in remove: for k in list(p): if k.startswith(r): del p[k] for k, v in new_params.items(): if v is None: if k in p: del p[k] else: p[k] = v return '?%s' % urlencode(sorted(p.items())) def get_results(self, request): paginator = self.model_admin.get_paginator(request, self.queryset, self.list_per_page) # Get the number of objects, with admin filters applied. result_count = paginator.count # Get the total number of objects, with no admin filters applied. if self.model_admin.show_full_result_count: full_result_count = self.root_queryset.count() else: full_result_count = None can_show_all = result_count <= self.list_max_show_all multi_page = result_count > self.list_per_page # Get the list of objects to display on this page. if (self.show_all and can_show_all) or not multi_page: result_list = self.queryset._clone() else: try: result_list = paginator.page(self.page_num).object_list except InvalidPage: raise IncorrectLookupParameters self.result_count = result_count self.show_full_result_count = self.model_admin.show_full_result_count # Admin actions are shown if there is at least one entry # or if entries are not counted because show_full_result_count is disabled self.show_admin_actions = not self.show_full_result_count or bool(full_result_count) self.full_result_count = full_result_count self.result_list = result_list self.can_show_all = can_show_all self.multi_page = multi_page self.paginator = paginator def _get_default_ordering(self): ordering = [] if self.model_admin.ordering: ordering = self.model_admin.ordering elif self.lookup_opts.ordering: ordering = self.lookup_opts.ordering return ordering def get_ordering_field(self, field_name): """ Return the proper model field name corresponding to the given field_name to use for ordering. field_name may either be the name of a proper model field or the name of a method (on the admin or model) or a callable with the 'admin_order_field' attribute. Return None if no proper model field name can be matched. """ try: field = self.lookup_opts.get_field(field_name) return field.name except FieldDoesNotExist: # See whether field_name is a name of a non-field # that allows sorting. if callable(field_name): attr = field_name elif hasattr(self.model_admin, field_name): attr = getattr(self.model_admin, field_name) else: attr = getattr(self.model, field_name) if isinstance(attr, property) and hasattr(attr, 'fget'): attr = attr.fget return getattr(attr, 'admin_order_field', None) def get_ordering(self, request, queryset): """ Return the list of ordering fields for the change list. First check the get_ordering() method in model admin, then check the object's default ordering. Then, any manually-specified ordering from the query string overrides anything. Finally, a deterministic order is guaranteed by calling _get_deterministic_ordering() with the constructed ordering. """ params = self.params ordering = list(self.model_admin.get_ordering(request) or self._get_default_ordering()) if ORDER_VAR in params: # Clear ordering and used params ordering = [] order_params = params[ORDER_VAR].split('.') for p in order_params: try: none, pfx, idx = p.rpartition('-') field_name = self.list_display[int(idx)] order_field = self.get_ordering_field(field_name) if not order_field: continue # No 'admin_order_field', skip it if isinstance(order_field, OrderBy): if pfx == '-': order_field = order_field.copy() order_field.reverse_ordering() ordering.append(order_field) elif hasattr(order_field, 'resolve_expression'): # order_field is an expression. ordering.append(order_field.desc() if pfx == '-' else order_field.asc()) # reverse order if order_field has already "-" as prefix elif order_field.startswith('-') and pfx == '-': ordering.append(order_field[1:]) else: ordering.append(pfx + order_field) except (IndexError, ValueError): continue # Invalid ordering specified, skip it. # Add the given query's ordering fields, if any. ordering.extend(queryset.query.order_by) return self._get_deterministic_ordering(ordering) def _get_deterministic_ordering(self, ordering): """ Ensure a deterministic order across all database backends. Search for a single field or unique together set of fields providing a total ordering. If these are missing, augment the ordering with a descendant primary key. """ ordering = list(ordering) ordering_fields = set() total_ordering_fields = {'pk'} | { field.attname for field in self.lookup_opts.fields if field.unique and not field.null } for part in ordering: # Search for single field providing a total ordering. field_name = None if isinstance(part, str): field_name = part.lstrip('-') elif isinstance(part, F): field_name = part.name elif isinstance(part, OrderBy) and isinstance(part.expression, F): field_name = part.expression.name if field_name: # Normalize attname references by using get_field(). try: field = self.lookup_opts.get_field(field_name) except FieldDoesNotExist: # Could be "?" for random ordering or a related field # lookup. Skip this part of introspection for now. continue # Ordering by a related field name orders by the referenced # model's ordering. Skip this part of introspection for now. if field.remote_field and field_name == field.name: continue if field.attname in total_ordering_fields: break ordering_fields.add(field.attname) else: # No single total ordering field, try unique_together and total # unique constraints. constraint_field_names = ( *self.lookup_opts.unique_together, *( constraint.fields for constraint in self.lookup_opts.total_unique_constraints ), ) for field_names in constraint_field_names: # Normalize attname references by using get_field(). fields = [self.lookup_opts.get_field(field_name) for field_name in field_names] # Composite unique constraints containing a nullable column # cannot ensure total ordering. if any(field.null for field in fields): continue if ordering_fields.issuperset(field.attname for field in fields): break else: # If no set of unique fields is present in the ordering, rely # on the primary key to provide total ordering. ordering.append('-pk') return ordering def get_ordering_field_columns(self): """ Return a dictionary of ordering field column numbers and asc/desc. """ # We must cope with more than one column having the same underlying sort # field, so we base things on column numbers. ordering = self._get_default_ordering() ordering_fields = {} if ORDER_VAR not in self.params: # for ordering specified on ModelAdmin or model Meta, we don't know # the right column numbers absolutely, because there might be more # than one column associated with that ordering, so we guess. for field in ordering: if isinstance(field, (Combinable, OrderBy)): if not isinstance(field, OrderBy): field = field.asc() if isinstance(field.expression, F): order_type = 'desc' if field.descending else 'asc' field = field.expression.name else: continue elif field.startswith('-'): field = field[1:] order_type = 'desc' else: order_type = 'asc' for index, attr in enumerate(self.list_display): if self.get_ordering_field(attr) == field: ordering_fields[index] = order_type break else: for p in self.params[ORDER_VAR].split('.'): none, pfx, idx = p.rpartition('-') try: idx = int(idx) except ValueError: continue # skip it ordering_fields[idx] = 'desc' if pfx == '-' else 'asc' return ordering_fields def get_queryset(self, request): # First, we collect all the declared list filters. ( self.filter_specs, self.has_filters, remaining_lookup_params, filters_may_have_duplicates, self.has_active_filters, ) = self.get_filters(request) # Then, we let every list filter modify the queryset to its liking. qs = self.root_queryset for filter_spec in self.filter_specs: new_qs = filter_spec.queryset(request, qs) if new_qs is not None: qs = new_qs try: # Finally, we apply the remaining lookup parameters from the query # string (i.e. those that haven't already been processed by the # filters). qs = qs.filter(**remaining_lookup_params) except (SuspiciousOperation, ImproperlyConfigured): # Allow certain types of errors to be re-raised as-is so that the # caller can treat them in a special way. raise except Exception as e: # Every other error is caught with a naked except, because we don't # have any other way of validating lookup parameters. They might be # invalid if the keyword arguments are incorrect, or if the values # are not in the correct type, so we might get FieldError, # ValueError, ValidationError, or ?. raise IncorrectLookupParameters(e) # Apply search results qs, search_may_have_duplicates = self.model_admin.get_search_results( request, qs, self.query, ) # Set query string for clearing all filters. self.clear_all_filters_qs = self.get_query_string( new_params=remaining_lookup_params, remove=self.get_filters_params(), ) # Remove duplicates from results, if necessary if filters_may_have_duplicates | search_may_have_duplicates: qs = qs.filter(pk=OuterRef('pk')) qs = self.root_queryset.filter(Exists(qs)) # Set ordering. ordering = self.get_ordering(request, qs) qs = qs.order_by(*ordering) if not qs.query.select_related: qs = self.apply_select_related(qs) return qs def apply_select_related(self, qs): if self.list_select_related is True: return qs.select_related() if self.list_select_related is False: if self.has_related_field_in_list_display(): return qs.select_related() if self.list_select_related: return qs.select_related(*self.list_select_related) return qs def has_related_field_in_list_display(self): for field_name in self.list_display: try: field = self.lookup_opts.get_field(field_name) except FieldDoesNotExist: pass else: if isinstance(field.remote_field, ManyToOneRel): # <FK>_id field names don't require a join. if field_name != field.get_attname(): return True return False def url_for_result(self, result): pk = getattr(result, self.pk_attname) return reverse('admin:%s_%s_change' % (self.opts.app_label, self.opts.model_name), args=(quote(pk),), current_app=self.model_admin.admin_site.name)
06fcdbbe16ed7cafaca03336aa38a6b705cb9bc1866bfd2f7a446218802c8f2a
""" GDAL - Constant definitions """ from ctypes import ( c_double, c_float, c_int16, c_int32, c_ubyte, c_uint16, c_uint32, ) # See https://www.gdal.org/gdal_8h.html#a22e22ce0a55036a96f652765793fb7a4 GDAL_PIXEL_TYPES = { 0: 'GDT_Unknown', # Unknown or unspecified type 1: 'GDT_Byte', # Eight bit unsigned integer 2: 'GDT_UInt16', # Sixteen bit unsigned integer 3: 'GDT_Int16', # Sixteen bit signed integer 4: 'GDT_UInt32', # Thirty-two bit unsigned integer 5: 'GDT_Int32', # Thirty-two bit signed integer 6: 'GDT_Float32', # Thirty-two bit floating point 7: 'GDT_Float64', # Sixty-four bit floating point 8: 'GDT_CInt16', # Complex Int16 9: 'GDT_CInt32', # Complex Int32 10: 'GDT_CFloat32', # Complex Float32 11: 'GDT_CFloat64', # Complex Float64 } # A list of gdal datatypes that are integers. GDAL_INTEGER_TYPES = [1, 2, 3, 4, 5] # Lookup values to convert GDAL pixel type indices into ctypes objects. # The GDAL band-io works with ctypes arrays to hold data to be written # or to hold the space for data to be read into. The lookup below helps # selecting the right ctypes object for a given gdal pixel type. GDAL_TO_CTYPES = [ None, c_ubyte, c_uint16, c_int16, c_uint32, c_int32, c_float, c_double, None, None, None, None ] # List of resampling algorithms that can be used to warp a GDALRaster. GDAL_RESAMPLE_ALGORITHMS = { 'NearestNeighbour': 0, 'Bilinear': 1, 'Cubic': 2, 'CubicSpline': 3, 'Lanczos': 4, 'Average': 5, 'Mode': 6, } # See https://www.gdal.org/gdal_8h.html#ace76452d94514561fffa8ea1d2a5968c GDAL_COLOR_TYPES = { 0: 'GCI_Undefined', # Undefined, default value, i.e. not known 1: 'GCI_GrayIndex', # Greyscale 2: 'GCI_PaletteIndex', # Paletted 3: 'GCI_RedBand', # Red band of RGBA image 4: 'GCI_GreenBand', # Green band of RGBA image 5: 'GCI_BlueBand', # Blue band of RGBA image 6: 'GCI_AlphaBand', # Alpha (0=transparent, 255=opaque) 7: 'GCI_HueBand', # Hue band of HLS image 8: 'GCI_SaturationBand', # Saturation band of HLS image 9: 'GCI_LightnessBand', # Lightness band of HLS image 10: 'GCI_CyanBand', # Cyan band of CMYK image 11: 'GCI_MagentaBand', # Magenta band of CMYK image 12: 'GCI_YellowBand', # Yellow band of CMYK image 13: 'GCI_BlackBand', # Black band of CMLY image 14: 'GCI_YCbCr_YBand', # Y Luminance 15: 'GCI_YCbCr_CbBand', # Cb Chroma 16: 'GCI_YCbCr_CrBand', # Cr Chroma, also GCI_Max } # GDAL virtual filesystems prefix. VSI_FILESYSTEM_PREFIX = '/vsi' # Fixed base path for buffer-based GDAL in-memory files. VSI_MEM_FILESYSTEM_BASE_PATH = '/vsimem/' # Should the memory file system take ownership of the buffer, freeing it when # the file is deleted? (No, GDALRaster.__del__() will delete the buffer.) VSI_TAKE_BUFFER_OWNERSHIP = False # Should a VSI file be removed when retrieving its buffer? VSI_DELETE_BUFFER_ON_READ = False
d86b1e40620f374f1477a316c93c0d6cc6b53f5e551cc928ebe3120418557108
import datetime import importlib import io import os import sys from unittest import mock from django.apps import apps from django.core.management import CommandError, call_command from django.db import ( ConnectionHandler, DatabaseError, OperationalError, connection, connections, models, ) from django.db.backends.base.schema import BaseDatabaseSchemaEditor from django.db.backends.utils import truncate_name from django.db.migrations.exceptions import InconsistentMigrationHistory from django.db.migrations.recorder import MigrationRecorder from django.test import TestCase, override_settings, skipUnlessDBFeature from .models import UnicodeModel, UnserializableModel from .routers import TestRouter from .test_base import MigrationTestBase class MigrateTests(MigrationTestBase): """ Tests running the migrate command. """ databases = {'default', 'other'} @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_migrate(self): """ Tests basic usage of the migrate command. """ # No tables are created self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableNotExists("migrations_book") # Run the migrations to 0001 only stdout = io.StringIO() call_command('migrate', 'migrations', '0001', verbosity=2, stdout=stdout, no_color=True) stdout = stdout.getvalue() self.assertIn('Target specific migration: 0001_initial, from migrations', stdout) self.assertIn('Applying migrations.0001_initial... OK', stdout) self.assertIn('Running pre-migrate handlers for application migrations', stdout) self.assertIn('Running post-migrate handlers for application migrations', stdout) # The correct tables exist self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble") self.assertTableNotExists("migrations_book") # Run migrations all the way call_command("migrate", verbosity=0) # The correct tables exist self.assertTableExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableExists("migrations_book") # Unmigrate everything stdout = io.StringIO() call_command('migrate', 'migrations', 'zero', verbosity=2, stdout=stdout, no_color=True) stdout = stdout.getvalue() self.assertIn('Unapply all migrations: migrations', stdout) self.assertIn('Unapplying migrations.0002_second... OK', stdout) self.assertIn('Running pre-migrate handlers for application migrations', stdout) self.assertIn('Running post-migrate handlers for application migrations', stdout) # Tables are gone self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableNotExists("migrations_book") @override_settings(INSTALLED_APPS=[ 'django.contrib.auth', 'django.contrib.contenttypes', 'migrations.migrations_test_apps.migrated_app', ]) def test_migrate_with_system_checks(self): out = io.StringIO() call_command('migrate', skip_checks=False, no_color=True, stdout=out) self.assertIn('Apply all migrations: migrated_app', out.getvalue()) @override_settings(INSTALLED_APPS=['migrations', 'migrations.migrations_test_apps.unmigrated_app_syncdb']) def test_app_without_migrations(self): msg = "App 'unmigrated_app_syncdb' does not have migrations." with self.assertRaisesMessage(CommandError, msg): call_command('migrate', app_label='unmigrated_app_syncdb') @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_clashing_prefix'}) def test_ambiguous_prefix(self): msg = ( "More than one migration matches 'a' in app 'migrations'. Please " "be more specific." ) with self.assertRaisesMessage(CommandError, msg): call_command('migrate', app_label='migrations', migration_name='a') @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_unknown_prefix(self): msg = "Cannot find a migration matching 'nonexistent' from app 'migrations'." with self.assertRaisesMessage(CommandError, msg): call_command('migrate', app_label='migrations', migration_name='nonexistent') @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_initial_false"}) def test_migrate_initial_false(self): """ `Migration.initial = False` skips fake-initial detection. """ # Make sure no tables are created self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") # Run the migrations to 0001 only call_command("migrate", "migrations", "0001", verbosity=0) # Fake rollback call_command("migrate", "migrations", "zero", fake=True, verbosity=0) # Make sure fake-initial detection does not run with self.assertRaises(DatabaseError): call_command("migrate", "migrations", "0001", fake_initial=True, verbosity=0) call_command("migrate", "migrations", "0001", fake=True, verbosity=0) # Real rollback call_command("migrate", "migrations", "zero", verbosity=0) # Make sure it's all gone self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableNotExists("migrations_book") @override_settings( MIGRATION_MODULES={"migrations": "migrations.test_migrations"}, DATABASE_ROUTERS=['migrations.routers.TestRouter'], ) def test_migrate_fake_initial(self): """ --fake-initial only works if all tables created in the initial migration of an app exists. Database routers must be obeyed when doing that check. """ # Make sure no tables are created for db in self.databases: self.assertTableNotExists("migrations_author", using=db) self.assertTableNotExists("migrations_tribble", using=db) # Run the migrations to 0001 only call_command("migrate", "migrations", "0001", verbosity=0) call_command("migrate", "migrations", "0001", verbosity=0, database="other") # Make sure the right tables exist self.assertTableExists("migrations_author") self.assertTableNotExists("migrations_tribble") # Also check the "other" database self.assertTableNotExists("migrations_author", using="other") self.assertTableExists("migrations_tribble", using="other") # Fake a roll-back call_command("migrate", "migrations", "zero", fake=True, verbosity=0) call_command("migrate", "migrations", "zero", fake=True, verbosity=0, database="other") # Make sure the tables still exist self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble", using="other") # Try to run initial migration with self.assertRaises(DatabaseError): call_command("migrate", "migrations", "0001", verbosity=0) # Run initial migration with an explicit --fake-initial out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: False): call_command("migrate", "migrations", "0001", fake_initial=True, stdout=out, verbosity=1) call_command("migrate", "migrations", "0001", fake_initial=True, verbosity=0, database="other") self.assertIn( "migrations.0001_initial... faked", out.getvalue().lower() ) try: # Run migrations all the way. call_command('migrate', verbosity=0) call_command('migrate', verbosity=0, database="other") self.assertTableExists('migrations_author') self.assertTableNotExists('migrations_tribble') self.assertTableExists('migrations_book') self.assertTableNotExists('migrations_author', using='other') self.assertTableNotExists('migrations_tribble', using='other') self.assertTableNotExists('migrations_book', using='other') # Fake a roll-back. call_command('migrate', 'migrations', 'zero', fake=True, verbosity=0) call_command('migrate', 'migrations', 'zero', fake=True, verbosity=0, database='other') self.assertTableExists('migrations_author') self.assertTableNotExists('migrations_tribble') self.assertTableExists('migrations_book') # Run initial migration. with self.assertRaises(DatabaseError): call_command('migrate', 'migrations', verbosity=0) # Run initial migration with an explicit --fake-initial. with self.assertRaises(DatabaseError): # Fails because "migrations_tribble" does not exist but needs # to in order to make --fake-initial work. call_command('migrate', 'migrations', fake_initial=True, verbosity=0) # Fake an apply. call_command('migrate', 'migrations', fake=True, verbosity=0) call_command('migrate', 'migrations', fake=True, verbosity=0, database='other') finally: # Unmigrate everything. call_command('migrate', 'migrations', 'zero', verbosity=0) call_command('migrate', 'migrations', 'zero', verbosity=0, database='other') # Make sure it's all gone for db in self.databases: self.assertTableNotExists("migrations_author", using=db) self.assertTableNotExists("migrations_tribble", using=db) self.assertTableNotExists("migrations_book", using=db) @skipUnlessDBFeature('ignores_table_name_case') def test_migrate_fake_initial_case_insensitive(self): with override_settings(MIGRATION_MODULES={ 'migrations': 'migrations.test_fake_initial_case_insensitive.initial', }): call_command('migrate', 'migrations', '0001', verbosity=0) call_command('migrate', 'migrations', 'zero', fake=True, verbosity=0) with override_settings(MIGRATION_MODULES={ 'migrations': 'migrations.test_fake_initial_case_insensitive.fake_initial', }): out = io.StringIO() call_command( 'migrate', 'migrations', '0001', fake_initial=True, stdout=out, verbosity=1, no_color=True, ) self.assertIn( 'migrations.0001_initial... faked', out.getvalue().lower(), ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_fake_split_initial"}) def test_migrate_fake_split_initial(self): """ Split initial migrations can be faked with --fake-initial. """ try: call_command('migrate', 'migrations', '0002', verbosity=0) call_command('migrate', 'migrations', 'zero', fake=True, verbosity=0) out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: False): call_command('migrate', 'migrations', '0002', fake_initial=True, stdout=out, verbosity=1) value = out.getvalue().lower() self.assertIn('migrations.0001_initial... faked', value) self.assertIn('migrations.0002_second... faked', value) finally: # Fake an apply. call_command('migrate', 'migrations', fake=True, verbosity=0) # Unmigrate everything. call_command('migrate', 'migrations', 'zero', verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_conflict"}) def test_migrate_conflict_exit(self): """ migrate exits if it detects a conflict. """ msg = ( "Conflicting migrations detected; multiple leaf nodes in the " "migration graph: (0002_conflicting_second, 0002_second in " "migrations).\n" "To fix them run 'python manage.py makemigrations --merge'" ) with self.assertRaisesMessage(CommandError, msg): call_command("migrate", "migrations") @override_settings(MIGRATION_MODULES={ 'migrations': 'migrations.test_migrations', }) def test_migrate_check(self): with self.assertRaises(SystemExit): call_command('migrate', 'migrations', '0001', check_unapplied=True) self.assertTableNotExists('migrations_author') self.assertTableNotExists('migrations_tribble') self.assertTableNotExists('migrations_book') @override_settings(MIGRATION_MODULES={ 'migrations': 'migrations.test_migrations_plan', }) def test_migrate_check_plan(self): out = io.StringIO() with self.assertRaises(SystemExit): call_command( 'migrate', 'migrations', '0001', check_unapplied=True, plan=True, stdout=out, no_color=True, ) self.assertEqual( 'Planned operations:\n' 'migrations.0001_initial\n' ' Create model Salamander\n' ' Raw Python operation -> Grow salamander tail.\n', out.getvalue(), ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_showmigrations_list(self): """ showmigrations --list displays migrations and whether or not they're applied. """ out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: True): call_command("showmigrations", format='list', stdout=out, verbosity=0, no_color=False) self.assertEqual( '\x1b[1mmigrations\n\x1b[0m' ' [ ] 0001_initial\n' ' [ ] 0002_second\n', out.getvalue().lower() ) call_command("migrate", "migrations", "0001", verbosity=0) out = io.StringIO() # Giving the explicit app_label tests for selective `show_list` in the command call_command("showmigrations", "migrations", format='list', stdout=out, verbosity=0, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_initial\n' ' [ ] 0002_second\n', out.getvalue().lower() ) out = io.StringIO() # Applied datetimes are displayed at verbosity 2+. call_command('showmigrations', 'migrations', stdout=out, verbosity=2, no_color=True) migration1 = MigrationRecorder(connection).migration_qs.get(app='migrations', name='0001_initial') self.assertEqual( 'migrations\n' ' [x] 0001_initial (applied at %s)\n' ' [ ] 0002_second\n' % migration1.applied.strftime('%Y-%m-%d %H:%M:%S'), out.getvalue().lower() ) # Cleanup by unmigrating everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed'}) def test_showmigrations_list_squashed(self): out = io.StringIO() call_command('showmigrations', format='list', stdout=out, verbosity=2, no_color=True) self.assertEqual( 'migrations\n' ' [ ] 0001_squashed_0002 (2 squashed migrations)\n', out.getvalue().lower(), ) out = io.StringIO() call_command( 'migrate', 'migrations', '0001_squashed_0002', stdout=out, verbosity=2, no_color=True, ) try: self.assertIn( 'operations to perform:\n' ' target specific migration: 0001_squashed_0002, from migrations\n' 'running pre-migrate handlers for application migrations\n' 'running migrations:\n' ' applying migrations.0001_squashed_0002... ok (', out.getvalue().lower(), ) out = io.StringIO() call_command('showmigrations', format='list', stdout=out, verbosity=2, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_squashed_0002 (2 squashed migrations)\n', out.getvalue().lower(), ) finally: # Unmigrate everything. call_command('migrate', 'migrations', 'zero', verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"}) def test_showmigrations_plan(self): """ Tests --plan output of showmigrations command """ out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[ ] migrations.0001_initial\n" "[ ] migrations.0003_third\n" "[ ] migrations.0002_second\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[ ] migrations.0001_initial\n" "[ ] migrations.0003_third ... (migrations.0001_initial)\n" "[ ] migrations.0002_second ... (migrations.0001_initial, migrations.0003_third)\n", out.getvalue().lower() ) call_command("migrate", "migrations", "0003", verbosity=0) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[x] migrations.0001_initial\n" "[x] migrations.0003_third\n" "[ ] migrations.0002_second\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[x] migrations.0001_initial\n" "[x] migrations.0003_third ... (migrations.0001_initial)\n" "[ ] migrations.0002_second ... (migrations.0001_initial, migrations.0003_third)\n", out.getvalue().lower() ) # Cleanup by unmigrating everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_plan'}) def test_migrate_plan(self): """Tests migrate --plan output.""" out = io.StringIO() # Show the plan up to the third migration. call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0001_initial\n' ' Create model Salamander\n' ' Raw Python operation -> Grow salamander tail.\n' 'migrations.0002_second\n' ' Create model Book\n' " Raw SQL operation -> ['SELECT * FROM migrations_book']\n" 'migrations.0003_third\n' ' Create model Author\n' " Raw SQL operation -> ['SELECT * FROM migrations_author']\n", out.getvalue() ) try: # Migrate to the third migration. call_command('migrate', 'migrations', '0003', verbosity=0) out = io.StringIO() # Show the plan for when there is nothing to apply. call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' ' No planned migration operations.\n', out.getvalue() ) out = io.StringIO() # Show the plan for reverse migration back to 0001. call_command('migrate', 'migrations', '0001', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0003_third\n' ' Undo Create model Author\n' " Raw SQL operation -> ['SELECT * FROM migrations_book']\n" 'migrations.0002_second\n' ' Undo Create model Book\n' " Raw SQL operation -> ['SELECT * FROM migrations_salamand…\n", out.getvalue() ) out = io.StringIO() # Show the migration plan to fourth, with truncated details. call_command('migrate', 'migrations', '0004', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0004_fourth\n' ' Raw SQL operation -> SELECT * FROM migrations_author WHE…\n', out.getvalue() ) # Show the plan when an operation is irreversible. # Migrate to the fourth migration. call_command('migrate', 'migrations', '0004', verbosity=0) out = io.StringIO() call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0004_fourth\n' ' Raw SQL operation -> IRREVERSIBLE\n', out.getvalue() ) out = io.StringIO() call_command('migrate', 'migrations', '0005', plan=True, stdout=out, no_color=True) # Operation is marked as irreversible only in the revert plan. self.assertEqual( 'Planned operations:\n' 'migrations.0005_fifth\n' ' Raw Python operation\n' ' Raw Python operation\n' ' Raw Python operation -> Feed salamander.\n', out.getvalue() ) call_command('migrate', 'migrations', '0005', verbosity=0) out = io.StringIO() call_command('migrate', 'migrations', '0004', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0005_fifth\n' ' Raw Python operation -> IRREVERSIBLE\n' ' Raw Python operation -> IRREVERSIBLE\n' ' Raw Python operation\n', out.getvalue() ) finally: # Cleanup by unmigrating everything: fake the irreversible, then # migrate all to zero. call_command('migrate', 'migrations', '0003', fake=True, verbosity=0) call_command('migrate', 'migrations', 'zero', verbosity=0) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_empty'}) def test_showmigrations_no_migrations(self): out = io.StringIO() call_command('showmigrations', stdout=out, no_color=True) self.assertEqual('migrations\n (no migrations)\n', out.getvalue().lower()) @override_settings(INSTALLED_APPS=['migrations.migrations_test_apps.unmigrated_app']) def test_showmigrations_unmigrated_app(self): out = io.StringIO() call_command('showmigrations', 'unmigrated_app', stdout=out, no_color=True) try: self.assertEqual('unmigrated_app\n (no migrations)\n', out.getvalue().lower()) finally: # unmigrated_app.SillyModel has a foreign key to # 'migrations.Tribble', but that model is only defined in a # migration, so the global app registry never sees it and the # reference is left dangling. Remove it to avoid problems in # subsequent tests. apps._pending_operations.pop(('migrations', 'tribble'), None) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_empty"}) def test_showmigrations_plan_no_migrations(self): """ Tests --plan output of showmigrations command without migrations """ out = io.StringIO() call_command('showmigrations', format='plan', stdout=out, no_color=True) self.assertEqual('(no migrations)\n', out.getvalue().lower()) out = io.StringIO() call_command('showmigrations', format='plan', stdout=out, verbosity=2, no_color=True) self.assertEqual('(no migrations)\n', out.getvalue().lower()) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"}) def test_showmigrations_plan_squashed(self): """ Tests --plan output of showmigrations command with squashed migrations. """ out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[ ] migrations.1_auto\n" "[ ] migrations.2_auto\n" "[ ] migrations.3_squashed_5\n" "[ ] migrations.6_auto\n" "[ ] migrations.7_auto\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[ ] migrations.1_auto\n" "[ ] migrations.2_auto ... (migrations.1_auto)\n" "[ ] migrations.3_squashed_5 ... (migrations.2_auto)\n" "[ ] migrations.6_auto ... (migrations.3_squashed_5)\n" "[ ] migrations.7_auto ... (migrations.6_auto)\n", out.getvalue().lower() ) call_command("migrate", "migrations", "3_squashed_5", verbosity=0) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[x] migrations.1_auto\n" "[x] migrations.2_auto\n" "[x] migrations.3_squashed_5\n" "[ ] migrations.6_auto\n" "[ ] migrations.7_auto\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[x] migrations.1_auto\n" "[x] migrations.2_auto ... (migrations.1_auto)\n" "[x] migrations.3_squashed_5 ... (migrations.2_auto)\n" "[ ] migrations.6_auto ... (migrations.3_squashed_5)\n" "[ ] migrations.7_auto ... (migrations.6_auto)\n", out.getvalue().lower() ) @override_settings(INSTALLED_APPS=[ 'migrations.migrations_test_apps.mutate_state_b', 'migrations.migrations_test_apps.alter_fk.author_app', 'migrations.migrations_test_apps.alter_fk.book_app', ]) def test_showmigrations_plan_single_app_label(self): """ `showmigrations --plan app_label` output with a single app_label. """ # Single app with no dependencies on other apps. out = io.StringIO() call_command('showmigrations', 'mutate_state_b', format='plan', stdout=out) self.assertEqual( '[ ] mutate_state_b.0001_initial\n' '[ ] mutate_state_b.0002_add_field\n', out.getvalue() ) # Single app with dependencies. out = io.StringIO() call_command('showmigrations', 'author_app', format='plan', stdout=out) self.assertEqual( '[ ] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n', out.getvalue() ) # Some migrations already applied. call_command('migrate', 'author_app', '0001', verbosity=0) out = io.StringIO() call_command('showmigrations', 'author_app', format='plan', stdout=out) self.assertEqual( '[X] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n', out.getvalue() ) # Cleanup by unmigrating author_app. call_command('migrate', 'author_app', 'zero', verbosity=0) @override_settings(INSTALLED_APPS=[ 'migrations.migrations_test_apps.mutate_state_b', 'migrations.migrations_test_apps.alter_fk.author_app', 'migrations.migrations_test_apps.alter_fk.book_app', ]) def test_showmigrations_plan_multiple_app_labels(self): """ `showmigrations --plan app_label` output with multiple app_labels. """ # Multiple apps: author_app depends on book_app; mutate_state_b doesn't # depend on other apps. out = io.StringIO() call_command('showmigrations', 'mutate_state_b', 'author_app', format='plan', stdout=out) self.assertEqual( '[ ] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n' '[ ] mutate_state_b.0001_initial\n' '[ ] mutate_state_b.0002_add_field\n', out.getvalue() ) # Multiple apps: args order shouldn't matter (the same result is # expected as above). out = io.StringIO() call_command('showmigrations', 'author_app', 'mutate_state_b', format='plan', stdout=out) self.assertEqual( '[ ] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n' '[ ] mutate_state_b.0001_initial\n' '[ ] mutate_state_b.0002_add_field\n', out.getvalue() ) @override_settings(INSTALLED_APPS=['migrations.migrations_test_apps.unmigrated_app']) def test_showmigrations_plan_app_label_no_migrations(self): out = io.StringIO() call_command('showmigrations', 'unmigrated_app', format='plan', stdout=out, no_color=True) try: self.assertEqual('(no migrations)\n', out.getvalue()) finally: # unmigrated_app.SillyModel has a foreign key to # 'migrations.Tribble', but that model is only defined in a # migration, so the global app registry never sees it and the # reference is left dangling. Remove it to avoid problems in # subsequent tests. apps._pending_operations.pop(('migrations', 'tribble'), None) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_sqlmigrate_forwards(self): """ sqlmigrate outputs forward looking SQL. """ out = io.StringIO() call_command("sqlmigrate", "migrations", "0001", stdout=out) output = out.getvalue().lower() index_tx_start = output.find(connection.ops.start_transaction_sql().lower()) index_op_desc_author = output.find('-- create model author') index_create_table = output.find('create table') index_op_desc_tribble = output.find('-- create model tribble') index_op_desc_unique_together = output.find('-- alter unique_together') index_tx_end = output.find(connection.ops.end_transaction_sql().lower()) if connection.features.can_rollback_ddl: self.assertGreater(index_tx_start, -1, "Transaction start not found") self.assertGreater( index_tx_end, index_op_desc_unique_together, "Transaction end not found or found before operation description (unique_together)" ) self.assertGreater( index_op_desc_author, index_tx_start, "Operation description (author) not found or found before transaction start" ) self.assertGreater( index_create_table, index_op_desc_author, "CREATE TABLE not found or found before operation description (author)" ) self.assertGreater( index_op_desc_tribble, index_create_table, "Operation description (tribble) not found or found before CREATE TABLE (author)" ) self.assertGreater( index_op_desc_unique_together, index_op_desc_tribble, "Operation description (unique_together) not found or found before operation description (tribble)" ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_sqlmigrate_backwards(self): """ sqlmigrate outputs reverse looking SQL. """ # Cannot generate the reverse SQL unless we've applied the migration. call_command("migrate", "migrations", verbosity=0) out = io.StringIO() call_command("sqlmigrate", "migrations", "0001", stdout=out, backwards=True) output = out.getvalue().lower() index_tx_start = output.find(connection.ops.start_transaction_sql().lower()) index_op_desc_unique_together = output.find('-- alter unique_together') index_op_desc_tribble = output.find('-- create model tribble') index_op_desc_author = output.find('-- create model author') index_drop_table = output.rfind('drop table') index_tx_end = output.find(connection.ops.end_transaction_sql().lower()) if connection.features.can_rollback_ddl: self.assertGreater(index_tx_start, -1, "Transaction start not found") self.assertGreater( index_tx_end, index_op_desc_unique_together, "Transaction end not found or found before DROP TABLE" ) self.assertGreater( index_op_desc_unique_together, index_tx_start, "Operation description (unique_together) not found or found before transaction start" ) self.assertGreater( index_op_desc_tribble, index_op_desc_unique_together, "Operation description (tribble) not found or found before operation description (unique_together)" ) self.assertGreater( index_op_desc_author, index_op_desc_tribble, "Operation description (author) not found or found before operation description (tribble)" ) self.assertGreater( index_drop_table, index_op_desc_author, "DROP TABLE not found or found before operation description (author)" ) # Cleanup by unmigrating everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_non_atomic"}) def test_sqlmigrate_for_non_atomic_migration(self): """ Transaction wrappers aren't shown for non-atomic migrations. """ out = io.StringIO() call_command("sqlmigrate", "migrations", "0001", stdout=out) output = out.getvalue().lower() queries = [q.strip() for q in output.splitlines()] if connection.ops.start_transaction_sql(): self.assertNotIn(connection.ops.start_transaction_sql().lower(), queries) self.assertNotIn(connection.ops.end_transaction_sql().lower(), queries) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_sqlmigrate_for_non_transactional_databases(self): """ Transaction wrappers aren't shown for databases that don't support transactional DDL. """ out = io.StringIO() with mock.patch.object(connection.features, 'can_rollback_ddl', False): call_command('sqlmigrate', 'migrations', '0001', stdout=out) output = out.getvalue().lower() queries = [q.strip() for q in output.splitlines()] start_transaction_sql = connection.ops.start_transaction_sql() if start_transaction_sql: self.assertNotIn(start_transaction_sql.lower(), queries) self.assertNotIn(connection.ops.end_transaction_sql().lower(), queries) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed'}) def test_sqlmigrate_ambiguous_prefix_squashed_migrations(self): msg = ( "More than one migration matches '0001' in app 'migrations'. " "Please be more specific." ) with self.assertRaisesMessage(CommandError, msg): call_command('sqlmigrate', 'migrations', '0001') @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed'}) def test_sqlmigrate_squashed_migration(self): out = io.StringIO() call_command('sqlmigrate', 'migrations', '0001_squashed_0002', stdout=out) output = out.getvalue().lower() self.assertIn('-- create model author', output) self.assertIn('-- create model book', output) self.assertNotIn('-- create model tribble', output) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed'}) def test_sqlmigrate_replaced_migration(self): out = io.StringIO() call_command('sqlmigrate', 'migrations', '0001_initial', stdout=out) output = out.getvalue().lower() self.assertIn('-- create model author', output) self.assertIn('-- create model tribble', output) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_no_operations'}) def test_migrations_no_operations(self): err = io.StringIO() call_command('sqlmigrate', 'migrations', '0001_initial', stderr=err) self.assertEqual(err.getvalue(), 'No operations found.\n') @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.migrated_app", "migrations.migrations_test_apps.migrated_unapplied_app", "migrations.migrations_test_apps.unmigrated_app", ], ) def test_regression_22823_unmigrated_fk_to_migrated_model(self): """ Assuming you have 3 apps, `A`, `B`, and `C`, such that: * `A` has migrations * `B` has a migration we want to apply * `C` has no migrations, but has an FK to `A` When we try to migrate "B", an exception occurs because the "B" was not included in the ProjectState that is used to detect soft-applied migrations (#22823). """ call_command('migrate', 'migrated_unapplied_app', verbosity=0) # unmigrated_app.SillyModel has a foreign key to 'migrations.Tribble', # but that model is only defined in a migration, so the global app # registry never sees it and the reference is left dangling. Remove it # to avoid problems in subsequent tests. apps._pending_operations.pop(('migrations', 'tribble'), None) @override_settings(INSTALLED_APPS=['migrations.migrations_test_apps.unmigrated_app_syncdb']) def test_migrate_syncdb_deferred_sql_executed_with_schemaeditor(self): """ For an app without migrations, editor.execute() is used for executing the syncdb deferred SQL. """ stdout = io.StringIO() with mock.patch.object(BaseDatabaseSchemaEditor, 'execute') as execute: call_command('migrate', run_syncdb=True, verbosity=1, stdout=stdout, no_color=True) create_table_count = len([call for call in execute.mock_calls if 'CREATE TABLE' in str(call)]) self.assertEqual(create_table_count, 2) # There's at least one deferred SQL for creating the foreign key # index. self.assertGreater(len(execute.mock_calls), 2) stdout = stdout.getvalue() self.assertIn('Synchronize unmigrated apps: unmigrated_app_syncdb', stdout) self.assertIn('Creating tables...', stdout) table_name = truncate_name('unmigrated_app_syncdb_classroom', connection.ops.max_name_length()) self.assertIn('Creating table %s' % table_name, stdout) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_migrate_syncdb_app_with_migrations(self): msg = "Can't use run_syncdb with app 'migrations' as it has migrations." with self.assertRaisesMessage(CommandError, msg): call_command('migrate', 'migrations', run_syncdb=True, verbosity=0) @override_settings(INSTALLED_APPS=[ 'migrations.migrations_test_apps.unmigrated_app_syncdb', 'migrations.migrations_test_apps.unmigrated_app_simple', ]) def test_migrate_syncdb_app_label(self): """ Running migrate --run-syncdb with an app_label only creates tables for the specified app. """ stdout = io.StringIO() with mock.patch.object(BaseDatabaseSchemaEditor, 'execute') as execute: call_command('migrate', 'unmigrated_app_syncdb', run_syncdb=True, stdout=stdout) create_table_count = len([call for call in execute.mock_calls if 'CREATE TABLE' in str(call)]) self.assertEqual(create_table_count, 2) self.assertGreater(len(execute.mock_calls), 2) self.assertIn('Synchronize unmigrated app: unmigrated_app_syncdb', stdout.getvalue()) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_migrate_record_replaced(self): """ Running a single squashed migration should record all of the original replaced migrations as run. """ recorder = MigrationRecorder(connection) out = io.StringIO() call_command("migrate", "migrations", verbosity=0) call_command("showmigrations", "migrations", stdout=out, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_squashed_0002 (2 squashed migrations)\n', out.getvalue().lower() ) applied_migrations = recorder.applied_migrations() self.assertIn(("migrations", "0001_initial"), applied_migrations) self.assertIn(("migrations", "0002_second"), applied_migrations) self.assertIn(("migrations", "0001_squashed_0002"), applied_migrations) # Rollback changes call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_migrate_record_squashed(self): """ Running migrate for a squashed migration should record as run if all of the replaced migrations have been run (#25231). """ recorder = MigrationRecorder(connection) recorder.record_applied("migrations", "0001_initial") recorder.record_applied("migrations", "0002_second") out = io.StringIO() call_command("migrate", "migrations", verbosity=0) call_command("showmigrations", "migrations", stdout=out, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_squashed_0002 (2 squashed migrations)\n', out.getvalue().lower() ) self.assertIn( ("migrations", "0001_squashed_0002"), recorder.applied_migrations() ) # No changes were actually applied so there is nothing to rollback @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_migrate_inconsistent_history(self): """ Running migrate with some migrations applied before their dependencies should not be allowed. """ recorder = MigrationRecorder(connection) recorder.record_applied("migrations", "0002_second") msg = "Migration migrations.0002_second is applied before its dependency migrations.0001_initial" with self.assertRaisesMessage(InconsistentMigrationHistory, msg): call_command("migrate") applied_migrations = recorder.applied_migrations() self.assertNotIn(("migrations", "0001_initial"), applied_migrations) @override_settings(INSTALLED_APPS=[ 'migrations.migrations_test_apps.migrated_unapplied_app', 'migrations.migrations_test_apps.migrated_app', ]) def test_migrate_not_reflected_changes(self): class NewModel1(models.Model): class Meta(): app_label = 'migrated_app' class NewModel2(models.Model): class Meta(): app_label = 'migrated_unapplied_app' out = io.StringIO() try: call_command('migrate', verbosity=0) call_command('migrate', stdout=out, no_color=True) self.assertEqual( "operations to perform:\n" " apply all migrations: migrated_app, migrated_unapplied_app\n" "running migrations:\n" " no migrations to apply.\n" " your models in app(s): 'migrated_app', " "'migrated_unapplied_app' have changes that are not yet " "reflected in a migration, and so won't be applied.\n" " run 'manage.py makemigrations' to make new migrations, and " "then re-run 'manage.py migrate' to apply them.\n", out.getvalue().lower(), ) finally: # Unmigrate everything. call_command('migrate', 'migrated_app', 'zero', verbosity=0) call_command('migrate', 'migrated_unapplied_app', 'zero', verbosity=0) class MakeMigrationsTests(MigrationTestBase): """ Tests running the makemigrations command. """ def setUp(self): super().setUp() self._old_models = apps.app_configs['migrations'].models.copy() def tearDown(self): apps.app_configs['migrations'].models = self._old_models apps.all_models['migrations'] = self._old_models apps.clear_cache() super().tearDown() def test_files_content(self): self.assertTableNotExists("migrations_unicodemodel") apps.register_model('migrations', UnicodeModel) with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", verbosity=0) # Check for empty __init__.py file in migrations folder init_file = os.path.join(migration_dir, "__init__.py") self.assertTrue(os.path.exists(init_file)) with open(init_file) as fp: content = fp.read() self.assertEqual(content, '') # Check for existing 0001_initial.py file in migration folder initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) with open(initial_file, encoding='utf-8') as fp: content = fp.read() self.assertIn('migrations.CreateModel', content) self.assertIn('initial = True', content) self.assertIn('úñí©óðé µóðéø', content) # Meta.verbose_name self.assertIn('úñí©óðé µóðéøß', content) # Meta.verbose_name_plural self.assertIn('ÚÑÍ¢ÓÐÉ', content) # title.verbose_name self.assertIn('“Ðjáñgó”', content) # title.default def test_makemigrations_order(self): """ makemigrations should recognize number-only migrations (0001.py). """ module = 'migrations.test_migrations_order' with self.temporary_migration_module(module=module) as migration_dir: if hasattr(importlib, 'invalidate_caches'): # importlib caches os.listdir() on some platforms like macOS # (#23850). importlib.invalidate_caches() call_command('makemigrations', 'migrations', '--empty', '-n', 'a', '-v', '0') self.assertTrue(os.path.exists(os.path.join(migration_dir, '0002_a.py'))) def test_makemigrations_empty_connections(self): empty_connections = ConnectionHandler({'default': {}}) with mock.patch('django.core.management.commands.makemigrations.connections', new=empty_connections): # with no apps out = io.StringIO() call_command('makemigrations', stdout=out) self.assertIn('No changes detected', out.getvalue()) # with an app with self.temporary_migration_module() as migration_dir: call_command('makemigrations', 'migrations', verbosity=0) init_file = os.path.join(migration_dir, '__init__.py') self.assertTrue(os.path.exists(init_file)) @override_settings(INSTALLED_APPS=['migrations', 'migrations2']) def test_makemigrations_consistency_checks_respect_routers(self): """ The history consistency checks in makemigrations respect settings.DATABASE_ROUTERS. """ def patched_has_table(migration_recorder): if migration_recorder.connection is connections['other']: raise Exception('Other connection') else: return mock.DEFAULT self.assertTableNotExists('migrations_unicodemodel') apps.register_model('migrations', UnicodeModel) with mock.patch.object( MigrationRecorder, 'has_table', autospec=True, side_effect=patched_has_table) as has_table: with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", verbosity=0) initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) self.assertEqual(has_table.call_count, 1) # 'default' is checked # Router says not to migrate 'other' so consistency shouldn't # be checked. with self.settings(DATABASE_ROUTERS=['migrations.routers.TestRouter']): call_command('makemigrations', 'migrations', verbosity=0) self.assertEqual(has_table.call_count, 2) # 'default' again # With a router that doesn't prohibit migrating 'other', # consistency is checked. with self.settings(DATABASE_ROUTERS=['migrations.routers.DefaultOtherRouter']): with self.assertRaisesMessage(Exception, 'Other connection'): call_command('makemigrations', 'migrations', verbosity=0) self.assertEqual(has_table.call_count, 4) # 'default' and 'other' # With a router that doesn't allow migrating on any database, # no consistency checks are made. with self.settings(DATABASE_ROUTERS=['migrations.routers.TestRouter']): with mock.patch.object(TestRouter, 'allow_migrate', return_value=False) as allow_migrate: call_command('makemigrations', 'migrations', verbosity=0) allow_migrate.assert_any_call('other', 'migrations', model_name='UnicodeModel') # allow_migrate() is called with the correct arguments. self.assertGreater(len(allow_migrate.mock_calls), 0) called_aliases = set() for mock_call in allow_migrate.mock_calls: _, call_args, call_kwargs = mock_call connection_alias, app_name = call_args called_aliases.add(connection_alias) # Raises an error if invalid app_name/model_name occurs. apps.get_app_config(app_name).get_model(call_kwargs['model_name']) self.assertEqual(called_aliases, set(connections)) self.assertEqual(has_table.call_count, 4) def test_failing_migration(self): # If a migration fails to serialize, it shouldn't generate an empty file. #21280 apps.register_model('migrations', UnserializableModel) with self.temporary_migration_module() as migration_dir: with self.assertRaisesMessage(ValueError, 'Cannot serialize'): call_command("makemigrations", "migrations", verbosity=0) initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertFalse(os.path.exists(initial_file)) def test_makemigrations_conflict_exit(self): """ makemigrations exits if it detects a conflict. """ with self.temporary_migration_module(module="migrations.test_migrations_conflict"): with self.assertRaises(CommandError) as context: call_command("makemigrations") self.assertEqual( str(context.exception), "Conflicting migrations detected; multiple leaf nodes in the " "migration graph: (0002_conflicting_second, 0002_second in " "migrations).\n" "To fix them run 'python manage.py makemigrations --merge'" ) def test_makemigrations_merge_no_conflict(self): """ makemigrations exits if in merge mode with no conflicts. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("makemigrations", merge=True, stdout=out) self.assertIn("No conflicts detected to merge.", out.getvalue()) def test_makemigrations_empty_no_app_specified(self): """ makemigrations exits if no app is specified with 'empty' mode. """ msg = 'You must supply at least one app label when using --empty.' with self.assertRaisesMessage(CommandError, msg): call_command("makemigrations", empty=True) def test_makemigrations_empty_migration(self): """ makemigrations properly constructs an empty migration. """ with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", empty=True, verbosity=0) # Check for existing 0001_initial.py file in migration folder initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) with open(initial_file, encoding='utf-8') as fp: content = fp.read() # Remove all whitespace to check for empty dependencies and operations content = content.replace(' ', '') self.assertIn('dependencies=[\n]', content) self.assertIn('operations=[\n]', content) @override_settings(MIGRATION_MODULES={"migrations": None}) def test_makemigrations_disabled_migrations_for_app(self): """ makemigrations raises a nice error when migrations are disabled for an app. """ msg = ( "Django can't create migrations for app 'migrations' because migrations " "have been disabled via the MIGRATION_MODULES setting." ) with self.assertRaisesMessage(ValueError, msg): call_command("makemigrations", "migrations", empty=True, verbosity=0) def test_makemigrations_no_changes_no_apps(self): """ makemigrations exits when there are no changes and no apps are specified. """ out = io.StringIO() call_command("makemigrations", stdout=out) self.assertIn("No changes detected", out.getvalue()) def test_makemigrations_no_changes(self): """ makemigrations exits when there are no changes to an app. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): call_command("makemigrations", "migrations", stdout=out) self.assertIn("No changes detected in app 'migrations'", out.getvalue()) def test_makemigrations_no_apps_initial(self): """ makemigrations should detect initial is needed on empty migration modules if no app provided. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_empty"): call_command("makemigrations", stdout=out) self.assertIn("0001_initial.py", out.getvalue()) def test_makemigrations_no_init(self): """Migration directories without an __init__.py file are allowed.""" out = io.StringIO() with self.temporary_migration_module(module='migrations.test_migrations_no_init'): call_command('makemigrations', stdout=out) self.assertIn('0001_initial.py', out.getvalue()) def test_makemigrations_migrations_announce(self): """ makemigrations announces the migration at the default verbosity level. """ out = io.StringIO() with self.temporary_migration_module(): call_command("makemigrations", "migrations", stdout=out) self.assertIn("Migrations for 'migrations'", out.getvalue()) def test_makemigrations_no_common_ancestor(self): """ makemigrations fails to merge migrations with no common ancestor. """ with self.assertRaises(ValueError) as context: with self.temporary_migration_module(module="migrations.test_migrations_no_ancestor"): call_command("makemigrations", "migrations", merge=True) exception_message = str(context.exception) self.assertIn("Could not find common ancestor of", exception_message) self.assertIn("0002_second", exception_message) self.assertIn("0002_conflicting_second", exception_message) def test_makemigrations_interactive_reject(self): """ makemigrations enters and exits interactive mode properly. """ # Monkeypatch interactive questioner to auto reject with mock.patch('builtins.input', mock.Mock(return_value='N')): with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, interactive=True, verbosity=0) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) def test_makemigrations_interactive_accept(self): """ makemigrations enters interactive mode and merges properly. """ # Monkeypatch interactive questioner to auto accept with mock.patch('builtins.input', mock.Mock(return_value='y')): out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, interactive=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertTrue(os.path.exists(merge_file)) self.assertIn("Created new merge migration", out.getvalue()) def test_makemigrations_default_merge_name(self): out = io.StringIO() with self.temporary_migration_module( module='migrations.test_migrations_conflict' ) as migration_dir: call_command('makemigrations', 'migrations', merge=True, interactive=False, stdout=out) merge_file = os.path.join( migration_dir, '0003_merge_0002_conflicting_second_0002_second.py', ) self.assertIs(os.path.exists(merge_file), True) self.assertIn('Created new merge migration %s' % merge_file, out.getvalue()) @mock.patch('django.db.migrations.utils.datetime') def test_makemigrations_auto_merge_name(self, mock_datetime): mock_datetime.datetime.now.return_value = datetime.datetime(2016, 1, 2, 3, 4) with mock.patch('builtins.input', mock.Mock(return_value='y')): out = io.StringIO() with self.temporary_migration_module( module='migrations.test_migrations_conflict_long_name' ) as migration_dir: call_command("makemigrations", "migrations", merge=True, interactive=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge_20160102_0304.py') self.assertTrue(os.path.exists(merge_file)) self.assertIn("Created new merge migration", out.getvalue()) def test_makemigrations_non_interactive_not_null_addition(self): """ Non-interactive makemigrations fails when a default is missing on a new not-null field. """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) silly_int = models.IntegerField() class Meta: app_label = "migrations" with self.assertRaises(SystemExit): with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", interactive=False) def test_makemigrations_non_interactive_not_null_alteration(self): """ Non-interactive makemigrations fails when a default is missing on a field changed to not-null. """ class Author(models.Model): name = models.CharField(max_length=255) slug = models.SlugField() age = models.IntegerField(default=0) class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("makemigrations", "migrations", interactive=False, stdout=out) self.assertIn("Alter field slug on author", out.getvalue()) def test_makemigrations_non_interactive_no_model_rename(self): """ makemigrations adds and removes a possible model rename in non-interactive mode. """ class RenamedModel(models.Model): silly_field = models.BooleanField(default=False) class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", interactive=False, stdout=out) self.assertIn("Delete model SillyModel", out.getvalue()) self.assertIn("Create model RenamedModel", out.getvalue()) def test_makemigrations_non_interactive_no_field_rename(self): """ makemigrations adds and removes a possible field rename in non-interactive mode. """ class SillyModel(models.Model): silly_rename = models.BooleanField(default=False) class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", interactive=False, stdout=out) self.assertIn("Remove field silly_field from sillymodel", out.getvalue()) self.assertIn("Add field silly_rename to sillymodel", out.getvalue()) def test_makemigrations_handle_merge(self): """ makemigrations properly merges the conflicting migrations with --noinput. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, interactive=False, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertTrue(os.path.exists(merge_file)) output = out.getvalue() self.assertIn("Merging migrations", output) self.assertIn("Branch 0002_second", output) self.assertIn("Branch 0002_conflicting_second", output) self.assertIn("Created new merge migration", output) def test_makemigration_merge_dry_run(self): """ makemigrations respects --dry-run option when fixing migration conflicts (#24427). """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command( "makemigrations", "migrations", name="merge", dry_run=True, merge=True, interactive=False, stdout=out, ) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) output = out.getvalue() self.assertIn("Merging migrations", output) self.assertIn("Branch 0002_second", output) self.assertIn("Branch 0002_conflicting_second", output) self.assertNotIn("Created new merge migration", output) def test_makemigration_merge_dry_run_verbosity_3(self): """ `makemigrations --merge --dry-run` writes the merge migration file to stdout with `verbosity == 3` (#24427). """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command( "makemigrations", "migrations", name="merge", dry_run=True, merge=True, interactive=False, stdout=out, verbosity=3, ) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) output = out.getvalue() self.assertIn("Merging migrations", output) self.assertIn("Branch 0002_second", output) self.assertIn("Branch 0002_conflicting_second", output) self.assertNotIn("Created new merge migration", output) # Additional output caused by verbosity 3 # The complete merge migration file that would be written self.assertIn("class Migration(migrations.Migration):", output) self.assertIn("dependencies = [", output) self.assertIn("('migrations', '0002_second')", output) self.assertIn("('migrations', '0002_conflicting_second')", output) self.assertIn("operations = [", output) self.assertIn("]", output) def test_makemigrations_dry_run(self): """ `makemigrations --dry-run` should not ask for defaults. """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) silly_date = models.DateField() # Added field without a default class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", dry_run=True, stdout=out) # Output the expected changes directly, without asking for defaults self.assertIn("Add field silly_date to sillymodel", out.getvalue()) def test_makemigrations_dry_run_verbosity_3(self): """ Allow `makemigrations --dry-run` to output the migrations file to stdout (with verbosity == 3). """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) silly_char = models.CharField(default="") class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", dry_run=True, stdout=out, verbosity=3) # Normal --dry-run output self.assertIn("- Add field silly_char to sillymodel", out.getvalue()) # Additional output caused by verbosity 3 # The complete migrations file that would be written self.assertIn("class Migration(migrations.Migration):", out.getvalue()) self.assertIn("dependencies = [", out.getvalue()) self.assertIn("('migrations', '0001_initial'),", out.getvalue()) self.assertIn("migrations.AddField(", out.getvalue()) self.assertIn("model_name='sillymodel',", out.getvalue()) self.assertIn("name='silly_char',", out.getvalue()) def test_makemigrations_migrations_modules_path_not_exist(self): """ makemigrations creates migrations when specifying a custom location for migration files using MIGRATION_MODULES if the custom path doesn't already exist. """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) class Meta: app_label = "migrations" out = io.StringIO() migration_module = "migrations.test_migrations_path_doesnt_exist.foo.bar" with self.temporary_migration_module(module=migration_module) as migration_dir: call_command("makemigrations", "migrations", stdout=out) # Migrations file is actually created in the expected path. initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) # Command output indicates the migration is created. self.assertIn(" - Create model SillyModel", out.getvalue()) @override_settings(MIGRATION_MODULES={'migrations': 'some.nonexistent.path'}) def test_makemigrations_migrations_modules_nonexistent_toplevel_package(self): msg = ( 'Could not locate an appropriate location to create migrations ' 'package some.nonexistent.path. Make sure the toplevel package ' 'exists and can be imported.' ) with self.assertRaisesMessage(ValueError, msg): call_command('makemigrations', 'migrations', empty=True, verbosity=0) def test_makemigrations_interactive_by_default(self): """ The user is prompted to merge by default if there are conflicts and merge is True. Answer negative to differentiate it from behavior when --noinput is specified. """ # Monkeypatch interactive questioner to auto reject out = io.StringIO() with mock.patch('builtins.input', mock.Mock(return_value='N')): with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') # This will fail if interactive is False by default self.assertFalse(os.path.exists(merge_file)) self.assertNotIn("Created new merge migration", out.getvalue()) @override_settings( INSTALLED_APPS=[ "migrations", "migrations.migrations_test_apps.unspecified_app_with_conflict"]) def test_makemigrations_unspecified_app_with_conflict_no_merge(self): """ makemigrations does not raise a CommandError when an unspecified app has conflicting migrations. """ with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): call_command("makemigrations", "migrations", merge=False, verbosity=0) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.migrated_app", "migrations.migrations_test_apps.unspecified_app_with_conflict"]) def test_makemigrations_unspecified_app_with_conflict_merge(self): """ makemigrations does not create a merge for an unspecified app even if it has conflicting migrations. """ # Monkeypatch interactive questioner to auto accept with mock.patch('builtins.input', mock.Mock(return_value='y')): out = io.StringIO() with self.temporary_migration_module(app_label="migrated_app") as migration_dir: call_command("makemigrations", "migrated_app", name="merge", merge=True, interactive=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) self.assertIn("No conflicts detected to merge.", out.getvalue()) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.migrated_app", "migrations.migrations_test_apps.conflicting_app_with_dependencies"]) def test_makemigrations_merge_dont_output_dependency_operations(self): """ makemigrations --merge does not output any operations from apps that don't belong to a given app. """ # Monkeypatch interactive questioner to auto accept with mock.patch('builtins.input', mock.Mock(return_value='N')): out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: False): call_command( "makemigrations", "conflicting_app_with_dependencies", merge=True, interactive=True, stdout=out ) self.assertEqual( out.getvalue().lower(), 'merging conflicting_app_with_dependencies\n' ' branch 0002_conflicting_second\n' ' - create model something\n' ' branch 0002_second\n' ' - delete model tribble\n' ' - remove field silly_field from author\n' ' - add field rating to author\n' ' - create model book\n' ) def test_makemigrations_with_custom_name(self): """ makemigrations --name generate a custom migration name. """ with self.temporary_migration_module() as migration_dir: def cmd(migration_count, migration_name, *args): call_command("makemigrations", "migrations", "--verbosity", "0", "--name", migration_name, *args) migration_file = os.path.join(migration_dir, "%s_%s.py" % (migration_count, migration_name)) # Check for existing migration file in migration folder self.assertTrue(os.path.exists(migration_file)) with open(migration_file, encoding='utf-8') as fp: content = fp.read() content = content.replace(" ", "") return content # generate an initial migration migration_name_0001 = "my_initial_migration" content = cmd("0001", migration_name_0001) self.assertIn("dependencies=[\n]", content) # importlib caches os.listdir() on some platforms like macOS # (#23850). if hasattr(importlib, 'invalidate_caches'): importlib.invalidate_caches() # generate an empty migration migration_name_0002 = "my_custom_migration" content = cmd("0002", migration_name_0002, "--empty") self.assertIn("dependencies=[\n('migrations','0001_%s'),\n]" % migration_name_0001, content) self.assertIn("operations=[\n]", content) def test_makemigrations_with_invalid_custom_name(self): msg = 'The migration name must be a valid Python identifier.' with self.assertRaisesMessage(CommandError, msg): call_command('makemigrations', 'migrations', '--name', 'invalid name', '--empty') def test_makemigrations_check(self): """ makemigrations --check should exit with a non-zero status when there are changes to an app requiring migrations. """ with self.temporary_migration_module(): with self.assertRaises(SystemExit): call_command("makemigrations", "--check", "migrations", verbosity=0) with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): call_command("makemigrations", "--check", "migrations", verbosity=0) def test_makemigrations_migration_path_output(self): """ makemigrations should print the relative paths to the migrations unless they are outside of the current tree, in which case the absolute path should be shown. """ out = io.StringIO() apps.register_model('migrations', UnicodeModel) with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", stdout=out) self.assertIn(os.path.join(migration_dir, '0001_initial.py'), out.getvalue()) def test_makemigrations_migration_path_output_valueerror(self): """ makemigrations prints the absolute path if os.path.relpath() raises a ValueError when it's impossible to obtain a relative path, e.g. on Windows if Django is installed on a different drive than where the migration files are created. """ out = io.StringIO() with self.temporary_migration_module() as migration_dir: with mock.patch('os.path.relpath', side_effect=ValueError): call_command('makemigrations', 'migrations', stdout=out) self.assertIn(os.path.join(migration_dir, '0001_initial.py'), out.getvalue()) def test_makemigrations_inconsistent_history(self): """ makemigrations should raise InconsistentMigrationHistory exception if there are some migrations applied before their dependencies. """ recorder = MigrationRecorder(connection) recorder.record_applied('migrations', '0002_second') msg = "Migration migrations.0002_second is applied before its dependency migrations.0001_initial" with self.temporary_migration_module(module="migrations.test_migrations"): with self.assertRaisesMessage(InconsistentMigrationHistory, msg): call_command("makemigrations") def test_makemigrations_inconsistent_history_db_failure(self): msg = ( "Got an error checking a consistent migration history performed " "for database connection 'default': could not connect to server" ) with mock.patch( 'django.db.migrations.loader.MigrationLoader.check_consistent_history', side_effect=OperationalError('could not connect to server'), ): with self.temporary_migration_module(): with self.assertWarns(RuntimeWarning) as cm: call_command('makemigrations', verbosity=0) self.assertEqual(str(cm.warning), msg) @mock.patch('builtins.input', return_value='1') @mock.patch('django.db.migrations.questioner.sys.stdin', mock.MagicMock(encoding=sys.getdefaultencoding())) def test_makemigrations_auto_now_add_interactive(self, *args): """ makemigrations prompts the user when adding auto_now_add to an existing model. """ class Entry(models.Model): title = models.CharField(max_length=255) creation_date = models.DateTimeField(auto_now_add=True) class Meta: app_label = 'migrations' # Monkeypatch interactive questioner to auto accept with mock.patch('django.db.migrations.questioner.sys.stdout', new_callable=io.StringIO) as prompt_stdout: out = io.StringIO() with self.temporary_migration_module(module='migrations.test_auto_now_add'): call_command('makemigrations', 'migrations', interactive=True, stdout=out) output = out.getvalue() prompt_output = prompt_stdout.getvalue() self.assertIn("You can accept the default 'timezone.now' by pressing 'Enter'", prompt_output) self.assertIn("Add field creation_date to entry", output) class SquashMigrationsTests(MigrationTestBase): """ Tests running the squashmigrations command. """ def test_squashmigrations_squashes(self): """ squashmigrations squashes migrations. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir: call_command('squashmigrations', 'migrations', '0002', interactive=False, stdout=out, no_color=True) squashed_migration_file = os.path.join(migration_dir, "0001_squashed_0002_second.py") self.assertTrue(os.path.exists(squashed_migration_file)) self.assertEqual( out.getvalue(), 'Will squash the following migrations:\n' ' - 0001_initial\n' ' - 0002_second\n' 'Optimizing...\n' ' Optimized from 8 operations to 2 operations.\n' 'Created new squashed migration %s\n' ' You should commit this migration but leave the old ones in place;\n' ' the new migration will be used for new installs. Once you are sure\n' ' all instances of the codebase have applied the migrations you squashed,\n' ' you can delete them.\n' % squashed_migration_file ) def test_squashmigrations_initial_attribute(self): with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir: call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=0) squashed_migration_file = os.path.join(migration_dir, "0001_squashed_0002_second.py") with open(squashed_migration_file, encoding='utf-8') as fp: content = fp.read() self.assertIn("initial = True", content) def test_squashmigrations_optimizes(self): """ squashmigrations optimizes operations. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=1, stdout=out) self.assertIn("Optimized from 8 operations to 2 operations.", out.getvalue()) def test_ticket_23799_squashmigrations_no_optimize(self): """ squashmigrations --no-optimize doesn't optimize operations. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=1, no_optimize=True, stdout=out) self.assertIn("Skipping optimization", out.getvalue()) def test_squashmigrations_valid_start(self): """ squashmigrations accepts a starting migration. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_changes") as migration_dir: call_command("squashmigrations", "migrations", "0002", "0003", interactive=False, verbosity=1, stdout=out) squashed_migration_file = os.path.join(migration_dir, "0002_second_squashed_0003_third.py") with open(squashed_migration_file, encoding='utf-8') as fp: content = fp.read() self.assertIn(" ('migrations', '0001_initial')", content) self.assertNotIn("initial = True", content) out = out.getvalue() self.assertNotIn(" - 0001_initial", out) self.assertIn(" - 0002_second", out) self.assertIn(" - 0003_third", out) def test_squashmigrations_invalid_start(self): """ squashmigrations doesn't accept a starting migration after the ending migration. """ with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): msg = ( "The migration 'migrations.0003_third' cannot be found. Maybe " "it comes after the migration 'migrations.0002_second'" ) with self.assertRaisesMessage(CommandError, msg): call_command("squashmigrations", "migrations", "0003", "0002", interactive=False, verbosity=0) def test_squashed_name_with_start_migration_name(self): """--squashed-name specifies the new migration's name.""" squashed_name = 'squashed_name' with self.temporary_migration_module(module='migrations.test_migrations') as migration_dir: call_command( 'squashmigrations', 'migrations', '0001', '0002', squashed_name=squashed_name, interactive=False, verbosity=0, ) squashed_migration_file = os.path.join(migration_dir, '0001_%s.py' % squashed_name) self.assertTrue(os.path.exists(squashed_migration_file)) def test_squashed_name_without_start_migration_name(self): """--squashed-name also works if a start migration is omitted.""" squashed_name = 'squashed_name' with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir: call_command( 'squashmigrations', 'migrations', '0001', squashed_name=squashed_name, interactive=False, verbosity=0, ) squashed_migration_file = os.path.join(migration_dir, '0001_%s.py' % squashed_name) self.assertTrue(os.path.exists(squashed_migration_file)) class AppLabelErrorTests(TestCase): """ This class inherits TestCase because MigrationTestBase uses `available_apps = ['migrations']` which means that it's the only installed app. 'django.contrib.auth' must be in INSTALLED_APPS for some of these tests. """ nonexistent_app_error = "No installed app with label 'nonexistent_app'." did_you_mean_auth_error = ( "No installed app with label 'django.contrib.auth'. Did you mean " "'auth'?" ) def test_makemigrations_nonexistent_app_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('makemigrations', 'nonexistent_app', stderr=err) self.assertIn(self.nonexistent_app_error, err.getvalue()) def test_makemigrations_app_name_specified_as_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('makemigrations', 'django.contrib.auth', stderr=err) self.assertIn(self.did_you_mean_auth_error, err.getvalue()) def test_migrate_nonexistent_app_label(self): with self.assertRaisesMessage(CommandError, self.nonexistent_app_error): call_command('migrate', 'nonexistent_app') def test_migrate_app_name_specified_as_label(self): with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error): call_command('migrate', 'django.contrib.auth') def test_showmigrations_nonexistent_app_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('showmigrations', 'nonexistent_app', stderr=err) self.assertIn(self.nonexistent_app_error, err.getvalue()) def test_showmigrations_app_name_specified_as_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('showmigrations', 'django.contrib.auth', stderr=err) self.assertIn(self.did_you_mean_auth_error, err.getvalue()) def test_sqlmigrate_nonexistent_app_label(self): with self.assertRaisesMessage(CommandError, self.nonexistent_app_error): call_command('sqlmigrate', 'nonexistent_app', '0002') def test_sqlmigrate_app_name_specified_as_label(self): with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error): call_command('sqlmigrate', 'django.contrib.auth', '0002') def test_squashmigrations_nonexistent_app_label(self): with self.assertRaisesMessage(CommandError, self.nonexistent_app_error): call_command('squashmigrations', 'nonexistent_app', '0002') def test_squashmigrations_app_name_specified_as_label(self): with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error): call_command('squashmigrations', 'django.contrib.auth', '0002')
db1a15623a06623f8d8886b7f127c493dea861cce98b9c1f225cb4e7ad24f580
import datetime import decimal import enum import functools import math import os import pathlib import re import sys import uuid from unittest import mock import custom_migration_operations.more_operations import custom_migration_operations.operations from django import get_version from django.conf import SettingsReference, settings from django.core.validators import EmailValidator, RegexValidator from django.db import migrations, models from django.db.migrations.serializer import BaseSerializer from django.db.migrations.writer import MigrationWriter, OperationWriter from django.test import SimpleTestCase from django.utils.deconstruct import deconstructible from django.utils.functional import SimpleLazyObject from django.utils.timezone import get_default_timezone, get_fixed_timezone, utc from django.utils.translation import gettext_lazy as _ from .models import FoodManager, FoodQuerySet class DeconstructibleInstances: def deconstruct(self): return ('DeconstructibleInstances', [], {}) class Money(decimal.Decimal): def deconstruct(self): return ( '%s.%s' % (self.__class__.__module__, self.__class__.__name__), [str(self)], {} ) class TestModel1: def upload_to(self): return '/somewhere/dynamic/' thing = models.FileField(upload_to=upload_to) class TextEnum(enum.Enum): A = 'a-value' B = 'value-b' class TextTranslatedEnum(enum.Enum): A = _('a-value') B = _('value-b') class BinaryEnum(enum.Enum): A = b'a-value' B = b'value-b' class IntEnum(enum.IntEnum): A = 1 B = 2 class OperationWriterTests(SimpleTestCase): def test_empty_signature(self): operation = custom_migration_operations.operations.TestOperation() buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.TestOperation(\n' '),' ) def test_args_signature(self): operation = custom_migration_operations.operations.ArgsOperation(1, 2) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ArgsOperation(\n' ' arg1=1,\n' ' arg2=2,\n' '),' ) def test_kwargs_signature(self): operation = custom_migration_operations.operations.KwargsOperation(kwarg1=1) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.KwargsOperation(\n' ' kwarg1=1,\n' '),' ) def test_args_kwargs_signature(self): operation = custom_migration_operations.operations.ArgsKwargsOperation(1, 2, kwarg2=4) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ArgsKwargsOperation(\n' ' arg1=1,\n' ' arg2=2,\n' ' kwarg2=4,\n' '),' ) def test_nested_args_signature(self): operation = custom_migration_operations.operations.ArgsOperation( custom_migration_operations.operations.ArgsOperation(1, 2), custom_migration_operations.operations.KwargsOperation(kwarg1=3, kwarg2=4) ) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ArgsOperation(\n' ' arg1=custom_migration_operations.operations.ArgsOperation(\n' ' arg1=1,\n' ' arg2=2,\n' ' ),\n' ' arg2=custom_migration_operations.operations.KwargsOperation(\n' ' kwarg1=3,\n' ' kwarg2=4,\n' ' ),\n' '),' ) def test_multiline_args_signature(self): operation = custom_migration_operations.operations.ArgsOperation("test\n arg1", "test\narg2") buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, "custom_migration_operations.operations.ArgsOperation(\n" " arg1='test\\n arg1',\n" " arg2='test\\narg2',\n" ")," ) def test_expand_args_signature(self): operation = custom_migration_operations.operations.ExpandArgsOperation([1, 2]) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ExpandArgsOperation(\n' ' arg=[\n' ' 1,\n' ' 2,\n' ' ],\n' '),' ) def test_nested_operation_expand_args_signature(self): operation = custom_migration_operations.operations.ExpandArgsOperation( arg=[ custom_migration_operations.operations.KwargsOperation( kwarg1=1, kwarg2=2, ), ] ) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ExpandArgsOperation(\n' ' arg=[\n' ' custom_migration_operations.operations.KwargsOperation(\n' ' kwarg1=1,\n' ' kwarg2=2,\n' ' ),\n' ' ],\n' '),' ) class WriterTests(SimpleTestCase): """ Tests the migration writer (makes migration files from Migration instances) """ class NestedEnum(enum.IntEnum): A = 1 B = 2 class NestedChoices(models.TextChoices): X = 'X', 'X value' Y = 'Y', 'Y value' def safe_exec(self, string, value=None): d = {} try: exec(string, globals(), d) except Exception as e: if value: self.fail("Could not exec %r (from value %r): %s" % (string.strip(), value, e)) else: self.fail("Could not exec %r: %s" % (string.strip(), e)) return d def serialize_round_trip(self, value): string, imports = MigrationWriter.serialize(value) return self.safe_exec("%s\ntest_value_result = %s" % ("\n".join(imports), string), value)['test_value_result'] def assertSerializedEqual(self, value): self.assertEqual(self.serialize_round_trip(value), value) def assertSerializedResultEqual(self, value, target): self.assertEqual(MigrationWriter.serialize(value), target) def assertSerializedFieldEqual(self, value): new_value = self.serialize_round_trip(value) self.assertEqual(value.__class__, new_value.__class__) self.assertEqual(value.max_length, new_value.max_length) self.assertEqual(value.null, new_value.null) self.assertEqual(value.unique, new_value.unique) def test_serialize_numbers(self): self.assertSerializedEqual(1) self.assertSerializedEqual(1.2) self.assertTrue(math.isinf(self.serialize_round_trip(float("inf")))) self.assertTrue(math.isinf(self.serialize_round_trip(float("-inf")))) self.assertTrue(math.isnan(self.serialize_round_trip(float("nan")))) self.assertSerializedEqual(decimal.Decimal('1.3')) self.assertSerializedResultEqual( decimal.Decimal('1.3'), ("Decimal('1.3')", {'from decimal import Decimal'}) ) self.assertSerializedEqual(Money('1.3')) self.assertSerializedResultEqual( Money('1.3'), ("migrations.test_writer.Money('1.3')", {'import migrations.test_writer'}) ) def test_serialize_constants(self): self.assertSerializedEqual(None) self.assertSerializedEqual(True) self.assertSerializedEqual(False) def test_serialize_strings(self): self.assertSerializedEqual(b"foobar") string, imports = MigrationWriter.serialize(b"foobar") self.assertEqual(string, "b'foobar'") self.assertSerializedEqual("föobár") string, imports = MigrationWriter.serialize("foobar") self.assertEqual(string, "'foobar'") def test_serialize_multiline_strings(self): self.assertSerializedEqual(b"foo\nbar") string, imports = MigrationWriter.serialize(b"foo\nbar") self.assertEqual(string, "b'foo\\nbar'") self.assertSerializedEqual("föo\nbár") string, imports = MigrationWriter.serialize("foo\nbar") self.assertEqual(string, "'foo\\nbar'") def test_serialize_collections(self): self.assertSerializedEqual({1: 2}) self.assertSerializedEqual(["a", 2, True, None]) self.assertSerializedEqual({2, 3, "eighty"}) self.assertSerializedEqual({"lalalala": ["yeah", "no", "maybe"]}) self.assertSerializedEqual(_('Hello')) def test_serialize_builtin_types(self): self.assertSerializedEqual([list, tuple, dict, set, frozenset]) self.assertSerializedResultEqual( [list, tuple, dict, set, frozenset], ("[list, tuple, dict, set, frozenset]", set()) ) def test_serialize_lazy_objects(self): pattern = re.compile(r'^foo$') lazy_pattern = SimpleLazyObject(lambda: pattern) self.assertEqual(self.serialize_round_trip(lazy_pattern), pattern) def test_serialize_enums(self): self.assertSerializedResultEqual( TextEnum.A, ("migrations.test_writer.TextEnum['A']", {'import migrations.test_writer'}) ) self.assertSerializedResultEqual( TextTranslatedEnum.A, ("migrations.test_writer.TextTranslatedEnum['A']", {'import migrations.test_writer'}) ) self.assertSerializedResultEqual( BinaryEnum.A, ("migrations.test_writer.BinaryEnum['A']", {'import migrations.test_writer'}) ) self.assertSerializedResultEqual( IntEnum.B, ("migrations.test_writer.IntEnum['B']", {'import migrations.test_writer'}) ) self.assertSerializedResultEqual( self.NestedEnum.A, ( "migrations.test_writer.WriterTests.NestedEnum['A']", {'import migrations.test_writer'}, ), ) self.assertSerializedEqual(self.NestedEnum.A) field = models.CharField(default=TextEnum.B, choices=[(m.value, m) for m in TextEnum]) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.CharField(choices=[" "('a-value', migrations.test_writer.TextEnum['A']), " "('value-b', migrations.test_writer.TextEnum['B'])], " "default=migrations.test_writer.TextEnum['B'])" ) field = models.CharField( default=TextTranslatedEnum.A, choices=[(m.value, m) for m in TextTranslatedEnum], ) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.CharField(choices=[" "('a-value', migrations.test_writer.TextTranslatedEnum['A']), " "('value-b', migrations.test_writer.TextTranslatedEnum['B'])], " "default=migrations.test_writer.TextTranslatedEnum['A'])" ) field = models.CharField(default=BinaryEnum.B, choices=[(m.value, m) for m in BinaryEnum]) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.CharField(choices=[" "(b'a-value', migrations.test_writer.BinaryEnum['A']), " "(b'value-b', migrations.test_writer.BinaryEnum['B'])], " "default=migrations.test_writer.BinaryEnum['B'])" ) field = models.IntegerField(default=IntEnum.A, choices=[(m.value, m) for m in IntEnum]) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.IntegerField(choices=[" "(1, migrations.test_writer.IntEnum['A']), " "(2, migrations.test_writer.IntEnum['B'])], " "default=migrations.test_writer.IntEnum['A'])" ) def test_serialize_choices(self): class TextChoices(models.TextChoices): A = 'A', 'A value' B = 'B', 'B value' class IntegerChoices(models.IntegerChoices): A = 1, 'One' B = 2, 'Two' class DateChoices(datetime.date, models.Choices): DATE_1 = 1969, 7, 20, 'First date' DATE_2 = 1969, 11, 19, 'Second date' self.assertSerializedResultEqual(TextChoices.A, ("'A'", set())) self.assertSerializedResultEqual(IntegerChoices.A, ('1', set())) self.assertSerializedResultEqual( DateChoices.DATE_1, ('datetime.date(1969, 7, 20)', {'import datetime'}), ) field = models.CharField(default=TextChoices.B, choices=TextChoices.choices) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.CharField(choices=[('A', 'A value'), ('B', 'B value')], " "default='B')", ) field = models.IntegerField(default=IntegerChoices.B, choices=IntegerChoices.choices) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.IntegerField(choices=[(1, 'One'), (2, 'Two')], default=2)", ) field = models.DateField(default=DateChoices.DATE_2, choices=DateChoices.choices) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.DateField(choices=[" "(datetime.date(1969, 7, 20), 'First date'), " "(datetime.date(1969, 11, 19), 'Second date')], " "default=datetime.date(1969, 11, 19))" ) def test_serialize_nested_class(self): for nested_cls in [self.NestedEnum, self.NestedChoices]: cls_name = nested_cls.__name__ with self.subTest(cls_name): self.assertSerializedResultEqual( nested_cls, ( "migrations.test_writer.WriterTests.%s" % cls_name, {'import migrations.test_writer'}, ), ) def test_serialize_uuid(self): self.assertSerializedEqual(uuid.uuid1()) self.assertSerializedEqual(uuid.uuid4()) uuid_a = uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8') uuid_b = uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2') self.assertSerializedResultEqual( uuid_a, ("uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8')", {'import uuid'}) ) self.assertSerializedResultEqual( uuid_b, ("uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2')", {'import uuid'}) ) field = models.UUIDField(choices=((uuid_a, 'UUID A'), (uuid_b, 'UUID B')), default=uuid_a) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.UUIDField(choices=[" "(uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8'), 'UUID A'), " "(uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2'), 'UUID B')], " "default=uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8'))" ) def test_serialize_pathlib(self): # Pure path objects work in all platforms. self.assertSerializedEqual(pathlib.PurePosixPath()) self.assertSerializedEqual(pathlib.PureWindowsPath()) path = pathlib.PurePosixPath('/path/file.txt') expected = ("pathlib.PurePosixPath('/path/file.txt')", {'import pathlib'}) self.assertSerializedResultEqual(path, expected) path = pathlib.PureWindowsPath('A:\\File.txt') expected = ("pathlib.PureWindowsPath('A:/File.txt')", {'import pathlib'}) self.assertSerializedResultEqual(path, expected) # Concrete path objects work on supported platforms. if sys.platform == 'win32': self.assertSerializedEqual(pathlib.WindowsPath.cwd()) path = pathlib.WindowsPath('A:\\File.txt') expected = ("pathlib.PureWindowsPath('A:/File.txt')", {'import pathlib'}) self.assertSerializedResultEqual(path, expected) else: self.assertSerializedEqual(pathlib.PosixPath.cwd()) path = pathlib.PosixPath('/path/file.txt') expected = ("pathlib.PurePosixPath('/path/file.txt')", {'import pathlib'}) self.assertSerializedResultEqual(path, expected) field = models.FilePathField(path=pathlib.PurePosixPath('/home/user')) string, imports = MigrationWriter.serialize(field) self.assertEqual( string, "models.FilePathField(path=pathlib.PurePosixPath('/home/user'))", ) self.assertIn('import pathlib', imports) def test_serialize_path_like(self): path_like = list(os.scandir(os.path.dirname(__file__)))[0] expected = (repr(path_like.path), {}) self.assertSerializedResultEqual(path_like, expected) field = models.FilePathField(path=path_like) string = MigrationWriter.serialize(field)[0] self.assertEqual(string, 'models.FilePathField(path=%r)' % path_like.path) def test_serialize_functions(self): with self.assertRaisesMessage(ValueError, 'Cannot serialize function: lambda'): self.assertSerializedEqual(lambda x: 42) self.assertSerializedEqual(models.SET_NULL) string, imports = MigrationWriter.serialize(models.SET(42)) self.assertEqual(string, 'models.SET(42)') self.serialize_round_trip(models.SET(42)) def test_serialize_datetime(self): self.assertSerializedEqual(datetime.datetime.now()) self.assertSerializedEqual(datetime.datetime.now) self.assertSerializedEqual(datetime.datetime.today()) self.assertSerializedEqual(datetime.datetime.today) self.assertSerializedEqual(datetime.date.today()) self.assertSerializedEqual(datetime.date.today) self.assertSerializedEqual(datetime.datetime.now().time()) self.assertSerializedEqual(datetime.datetime(2014, 1, 1, 1, 1, tzinfo=get_default_timezone())) self.assertSerializedEqual(datetime.datetime(2013, 12, 31, 22, 1, tzinfo=get_fixed_timezone(180))) self.assertSerializedResultEqual( datetime.datetime(2014, 1, 1, 1, 1), ("datetime.datetime(2014, 1, 1, 1, 1)", {'import datetime'}) ) for tzinfo in (utc, datetime.timezone.utc): with self.subTest(tzinfo=tzinfo): self.assertSerializedResultEqual( datetime.datetime(2012, 1, 1, 1, 1, tzinfo=tzinfo), ( "datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc)", {'import datetime', 'from django.utils.timezone import utc'}, ) ) def test_serialize_fields(self): self.assertSerializedFieldEqual(models.CharField(max_length=255)) self.assertSerializedResultEqual( models.CharField(max_length=255), ("models.CharField(max_length=255)", {"from django.db import models"}) ) self.assertSerializedFieldEqual(models.TextField(null=True, blank=True)) self.assertSerializedResultEqual( models.TextField(null=True, blank=True), ("models.TextField(blank=True, null=True)", {'from django.db import models'}) ) def test_serialize_settings(self): self.assertSerializedEqual(SettingsReference(settings.AUTH_USER_MODEL, "AUTH_USER_MODEL")) self.assertSerializedResultEqual( SettingsReference("someapp.model", "AUTH_USER_MODEL"), ("settings.AUTH_USER_MODEL", {"from django.conf import settings"}) ) def test_serialize_iterators(self): self.assertSerializedResultEqual( ((x, x * x) for x in range(3)), ("((0, 0), (1, 1), (2, 4))", set()) ) def test_serialize_compiled_regex(self): """ Make sure compiled regex can be serialized. """ regex = re.compile(r'^\w+$') self.assertSerializedEqual(regex) def test_serialize_class_based_validators(self): """ Ticket #22943: Test serialization of class-based validators, including compiled regexes. """ validator = RegexValidator(message="hello") string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator(message='hello')") self.serialize_round_trip(validator) # Test with a compiled regex. validator = RegexValidator(regex=re.compile(r'^\w+$')) string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator(regex=re.compile('^\\\\w+$'))") self.serialize_round_trip(validator) # Test a string regex with flag validator = RegexValidator(r'^[0-9]+$', flags=re.S) string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator('^[0-9]+$', flags=re.RegexFlag['DOTALL'])") self.serialize_round_trip(validator) # Test message and code validator = RegexValidator('^[-a-zA-Z0-9_]+$', 'Invalid', 'invalid') string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator('^[-a-zA-Z0-9_]+$', 'Invalid', 'invalid')") self.serialize_round_trip(validator) # Test with a subclass. validator = EmailValidator(message="hello") string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.EmailValidator(message='hello')") self.serialize_round_trip(validator) validator = deconstructible(path="migrations.test_writer.EmailValidator")(EmailValidator)(message="hello") string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "migrations.test_writer.EmailValidator(message='hello')") validator = deconstructible(path="custom.EmailValidator")(EmailValidator)(message="hello") with self.assertRaisesMessage(ImportError, "No module named 'custom'"): MigrationWriter.serialize(validator) validator = deconstructible(path="django.core.validators.EmailValidator2")(EmailValidator)(message="hello") with self.assertRaisesMessage(ValueError, "Could not find object EmailValidator2 in django.core.validators."): MigrationWriter.serialize(validator) def test_serialize_empty_nonempty_tuple(self): """ Ticket #22679: makemigrations generates invalid code for (an empty tuple) default_permissions = () """ empty_tuple = () one_item_tuple = ('a',) many_items_tuple = ('a', 'b', 'c') self.assertSerializedEqual(empty_tuple) self.assertSerializedEqual(one_item_tuple) self.assertSerializedEqual(many_items_tuple) def test_serialize_range(self): string, imports = MigrationWriter.serialize(range(1, 5)) self.assertEqual(string, 'range(1, 5)') self.assertEqual(imports, set()) def test_serialize_builtins(self): string, imports = MigrationWriter.serialize(range) self.assertEqual(string, 'range') self.assertEqual(imports, set()) def test_serialize_unbound_method_reference(self): """An unbound method used within a class body can be serialized.""" self.serialize_round_trip(TestModel1.thing) def test_serialize_local_function_reference(self): """A reference in a local scope can't be serialized.""" class TestModel2: def upload_to(self): return "somewhere dynamic" thing = models.FileField(upload_to=upload_to) with self.assertRaisesMessage(ValueError, 'Could not find function upload_to in migrations.test_writer'): self.serialize_round_trip(TestModel2.thing) def test_serialize_managers(self): self.assertSerializedEqual(models.Manager()) self.assertSerializedResultEqual( FoodQuerySet.as_manager(), ('migrations.models.FoodQuerySet.as_manager()', {'import migrations.models'}) ) self.assertSerializedEqual(FoodManager('a', 'b')) self.assertSerializedEqual(FoodManager('x', 'y', c=3, d=4)) def test_serialize_frozensets(self): self.assertSerializedEqual(frozenset()) self.assertSerializedEqual(frozenset("let it go")) def test_serialize_set(self): self.assertSerializedEqual(set()) self.assertSerializedResultEqual(set(), ('set()', set())) self.assertSerializedEqual({'a'}) self.assertSerializedResultEqual({'a'}, ("{'a'}", set())) def test_serialize_timedelta(self): self.assertSerializedEqual(datetime.timedelta()) self.assertSerializedEqual(datetime.timedelta(minutes=42)) def test_serialize_functools_partial(self): value = functools.partial(datetime.timedelta, 1, seconds=2) result = self.serialize_round_trip(value) self.assertEqual(result.func, value.func) self.assertEqual(result.args, value.args) self.assertEqual(result.keywords, value.keywords) def test_serialize_functools_partialmethod(self): value = functools.partialmethod(datetime.timedelta, 1, seconds=2) result = self.serialize_round_trip(value) self.assertIsInstance(result, functools.partialmethod) self.assertEqual(result.func, value.func) self.assertEqual(result.args, value.args) self.assertEqual(result.keywords, value.keywords) def test_serialize_type_none(self): self.assertSerializedEqual(type(None)) def test_simple_migration(self): """ Tests serializing a simple migration. """ fields = { 'charfield': models.DateTimeField(default=datetime.datetime.now), 'datetimefield': models.DateTimeField(default=datetime.datetime.now), } options = { 'verbose_name': 'My model', 'verbose_name_plural': 'My models', } migration = type("Migration", (migrations.Migration,), { "operations": [ migrations.CreateModel("MyModel", tuple(fields.items()), options, (models.Model,)), migrations.CreateModel("MyModel2", tuple(fields.items()), bases=(models.Model,)), migrations.CreateModel( name="MyModel3", fields=tuple(fields.items()), options=options, bases=(models.Model,) ), migrations.DeleteModel("MyModel"), migrations.AddField("OtherModel", "datetimefield", fields["datetimefield"]), ], "dependencies": [("testapp", "some_other_one")], }) writer = MigrationWriter(migration) output = writer.as_string() # We don't test the output formatting - that's too fragile. # Just make sure it runs for now, and that things look alright. result = self.safe_exec(output) self.assertIn("Migration", result) def test_migration_path(self): test_apps = [ 'migrations.migrations_test_apps.normal', 'migrations.migrations_test_apps.with_package_model', 'migrations.migrations_test_apps.without_init_file', ] base_dir = os.path.dirname(os.path.dirname(__file__)) for app in test_apps: with self.modify_settings(INSTALLED_APPS={'append': app}): migration = migrations.Migration('0001_initial', app.split('.')[-1]) expected_path = os.path.join(base_dir, *(app.split('.') + ['migrations', '0001_initial.py'])) writer = MigrationWriter(migration) self.assertEqual(writer.path, expected_path) def test_custom_operation(self): migration = type("Migration", (migrations.Migration,), { "operations": [ custom_migration_operations.operations.TestOperation(), custom_migration_operations.operations.CreateModel(), migrations.CreateModel("MyModel", (), {}, (models.Model,)), custom_migration_operations.more_operations.TestOperation() ], "dependencies": [] }) writer = MigrationWriter(migration) output = writer.as_string() result = self.safe_exec(output) self.assertIn("custom_migration_operations", result) self.assertNotEqual( result['custom_migration_operations'].operations.TestOperation, result['custom_migration_operations'].more_operations.TestOperation ) def test_sorted_imports(self): """ #24155 - Tests ordering of imports. """ migration = type("Migration", (migrations.Migration,), { "operations": [ migrations.AddField("mymodel", "myfield", models.DateTimeField( default=datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc), )), ] }) writer = MigrationWriter(migration) output = writer.as_string() self.assertIn( "import datetime\n" "from django.db import migrations, models\n" "from django.utils.timezone import utc\n", output ) def test_migration_file_header_comments(self): """ Test comments at top of file. """ migration = type("Migration", (migrations.Migration,), { "operations": [] }) dt = datetime.datetime(2015, 7, 31, 4, 40, 0, 0, tzinfo=utc) with mock.patch('django.db.migrations.writer.now', lambda: dt): for include_header in (True, False): with self.subTest(include_header=include_header): writer = MigrationWriter(migration, include_header) output = writer.as_string() self.assertEqual( include_header, output.startswith( "# Generated by Django %s on 2015-07-31 04:40\n\n" % get_version() ) ) if not include_header: # Make sure the output starts with something that's not # a comment or indentation or blank line self.assertRegex(output.splitlines(keepends=True)[0], r"^[^#\s]+") def test_models_import_omitted(self): """ django.db.models shouldn't be imported if unused. """ migration = type("Migration", (migrations.Migration,), { "operations": [ migrations.AlterModelOptions( name='model', options={'verbose_name': 'model', 'verbose_name_plural': 'models'}, ), ] }) writer = MigrationWriter(migration) output = writer.as_string() self.assertIn("from django.db import migrations\n", output) def test_deconstruct_class_arguments(self): # Yes, it doesn't make sense to use a class as a default for a # CharField. It does make sense for custom fields though, for example # an enumfield that takes the enum class as an argument. string = MigrationWriter.serialize(models.CharField(default=DeconstructibleInstances))[0] self.assertEqual(string, "models.CharField(default=migrations.test_writer.DeconstructibleInstances)") def test_register_serializer(self): class ComplexSerializer(BaseSerializer): def serialize(self): return 'complex(%r)' % self.value, {} MigrationWriter.register_serializer(complex, ComplexSerializer) self.assertSerializedEqual(complex(1, 2)) MigrationWriter.unregister_serializer(complex) with self.assertRaisesMessage(ValueError, 'Cannot serialize: (1+2j)'): self.assertSerializedEqual(complex(1, 2)) def test_register_non_serializer(self): with self.assertRaisesMessage(ValueError, "'TestModel1' must inherit from 'BaseSerializer'."): MigrationWriter.register_serializer(complex, TestModel1)
ce900af454630df8f7f153f046955fa086452474eeee99d771cc03ef37a0f0fc
from django.core.exceptions import FieldDoesNotExist from django.db import ( IntegrityError, connection, migrations, models, transaction, ) from django.db.migrations.migration import Migration from django.db.migrations.operations.fields import FieldOperation from django.db.migrations.state import ModelState, ProjectState from django.db.models.functions import Abs from django.db.transaction import atomic from django.test import SimpleTestCase, override_settings, skipUnlessDBFeature from django.test.utils import CaptureQueriesContext from .models import FoodManager, FoodQuerySet, UnicodeModel from .test_base import OperationTestBase class Mixin: pass class OperationTests(OperationTestBase): """ Tests running the operations and making sure they do what they say they do. Each test looks at their state changing, and then their database operation - both forwards and backwards. """ def test_create_model(self): """ Tests the CreateModel operation. Most other tests use this operation as part of setup, so check failures here first. """ operation = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=1)), ], ) self.assertEqual(operation.describe(), "Create model Pony") self.assertEqual(operation.migration_name_fragment, 'pony') # Test the state alteration project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crmo", new_state) self.assertEqual(new_state.models["test_crmo", "pony"].name, "Pony") self.assertEqual(len(new_state.models["test_crmo", "pony"].fields), 2) # Test the database alteration self.assertTableNotExists("test_crmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crmo", editor, project_state, new_state) self.assertTableExists("test_crmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmo", editor, new_state, project_state) self.assertTableNotExists("test_crmo_pony") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "CreateModel") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["fields", "name"]) # And default manager not in set operation = migrations.CreateModel("Foo", fields=[], managers=[("objects", models.Manager())]) definition = operation.deconstruct() self.assertNotIn('managers', definition[2]) def test_create_model_with_duplicate_field_name(self): with self.assertRaisesMessage(ValueError, 'Found duplicate value pink in CreateModel fields argument.'): migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.TextField()), ("pink", models.IntegerField(default=1)), ], ) def test_create_model_with_duplicate_base(self): message = 'Found duplicate value test_crmo.pony in CreateModel bases argument.' with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=("test_crmo.Pony", "test_crmo.Pony",), ) with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=("test_crmo.Pony", "test_crmo.pony",), ) message = 'Found duplicate value migrations.unicodemodel in CreateModel bases argument.' with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(UnicodeModel, UnicodeModel,), ) with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(UnicodeModel, 'migrations.unicodemodel',), ) with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(UnicodeModel, 'migrations.UnicodeModel',), ) message = "Found duplicate value <class 'django.db.models.base.Model'> in CreateModel bases argument." with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(models.Model, models.Model,), ) message = "Found duplicate value <class 'migrations.test_operations.Mixin'> in CreateModel bases argument." with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(Mixin, Mixin,), ) def test_create_model_with_duplicate_manager_name(self): with self.assertRaisesMessage(ValueError, 'Found duplicate value objects in CreateModel managers argument.'): migrations.CreateModel( "Pony", fields=[], managers=[ ("objects", models.Manager()), ("objects", models.Manager()), ], ) def test_create_model_with_unique_after(self): """ Tests the CreateModel operation directly followed by an AlterUniqueTogether (bug #22844 - sqlite remake issues) """ operation1 = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=1)), ], ) operation2 = migrations.CreateModel( "Rider", [ ("id", models.AutoField(primary_key=True)), ("number", models.IntegerField(default=1)), ("pony", models.ForeignKey("test_crmoua.Pony", models.CASCADE)), ], ) operation3 = migrations.AlterUniqueTogether( "Rider", [ ("number", "pony"), ], ) # Test the database alteration project_state = ProjectState() self.assertTableNotExists("test_crmoua_pony") self.assertTableNotExists("test_crmoua_rider") with connection.schema_editor() as editor: new_state = project_state.clone() operation1.state_forwards("test_crmoua", new_state) operation1.database_forwards("test_crmoua", editor, project_state, new_state) project_state, new_state = new_state, new_state.clone() operation2.state_forwards("test_crmoua", new_state) operation2.database_forwards("test_crmoua", editor, project_state, new_state) project_state, new_state = new_state, new_state.clone() operation3.state_forwards("test_crmoua", new_state) operation3.database_forwards("test_crmoua", editor, project_state, new_state) self.assertTableExists("test_crmoua_pony") self.assertTableExists("test_crmoua_rider") def test_create_model_m2m(self): """ Test the creation of a model with a ManyToMany field and the auto-created "through" model. """ project_state = self.set_up_test_model("test_crmomm") operation = migrations.CreateModel( "Stable", [ ("id", models.AutoField(primary_key=True)), ("ponies", models.ManyToManyField("Pony", related_name="stables")) ] ) # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_crmomm", new_state) # Test the database alteration self.assertTableNotExists("test_crmomm_stable_ponies") with connection.schema_editor() as editor: operation.database_forwards("test_crmomm", editor, project_state, new_state) self.assertTableExists("test_crmomm_stable") self.assertTableExists("test_crmomm_stable_ponies") self.assertColumnNotExists("test_crmomm_stable", "ponies") # Make sure the M2M field actually works with atomic(): Pony = new_state.apps.get_model("test_crmomm", "Pony") Stable = new_state.apps.get_model("test_crmomm", "Stable") stable = Stable.objects.create() p1 = Pony.objects.create(pink=False, weight=4.55) p2 = Pony.objects.create(pink=True, weight=5.43) stable.ponies.add(p1, p2) self.assertEqual(stable.ponies.count(), 2) stable.ponies.all().delete() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmomm", editor, new_state, project_state) self.assertTableNotExists("test_crmomm_stable") self.assertTableNotExists("test_crmomm_stable_ponies") def test_create_model_inheritance(self): """ Tests the CreateModel operation on a multi-table inheritance setup. """ project_state = self.set_up_test_model("test_crmoih") # Test the state alteration operation = migrations.CreateModel( "ShetlandPony", [ ('pony_ptr', models.OneToOneField( 'test_crmoih.Pony', models.CASCADE, auto_created=True, primary_key=True, to_field='id', serialize=False, )), ("cuteness", models.IntegerField(default=1)), ], ) new_state = project_state.clone() operation.state_forwards("test_crmoih", new_state) self.assertIn(("test_crmoih", "shetlandpony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crmoih_shetlandpony") with connection.schema_editor() as editor: operation.database_forwards("test_crmoih", editor, project_state, new_state) self.assertTableExists("test_crmoih_shetlandpony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmoih", editor, new_state, project_state) self.assertTableNotExists("test_crmoih_shetlandpony") def test_create_proxy_model(self): """ CreateModel ignores proxy models. """ project_state = self.set_up_test_model("test_crprmo") # Test the state alteration operation = migrations.CreateModel( "ProxyPony", [], options={"proxy": True}, bases=("test_crprmo.Pony",), ) self.assertEqual(operation.describe(), "Create proxy model ProxyPony") new_state = project_state.clone() operation.state_forwards("test_crprmo", new_state) self.assertIn(("test_crprmo", "proxypony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crprmo", editor, project_state, new_state) self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crprmo", editor, new_state, project_state) self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "CreateModel") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["bases", "fields", "name", "options"]) def test_create_unmanaged_model(self): """ CreateModel ignores unmanaged models. """ project_state = self.set_up_test_model("test_crummo") # Test the state alteration operation = migrations.CreateModel( "UnmanagedPony", [], options={"proxy": True}, bases=("test_crummo.Pony",), ) self.assertEqual(operation.describe(), "Create proxy model UnmanagedPony") new_state = project_state.clone() operation.state_forwards("test_crummo", new_state) self.assertIn(("test_crummo", "unmanagedpony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crummo_unmanagedpony") self.assertTableExists("test_crummo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crummo", editor, project_state, new_state) self.assertTableNotExists("test_crummo_unmanagedpony") self.assertTableExists("test_crummo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crummo", editor, new_state, project_state) self.assertTableNotExists("test_crummo_unmanagedpony") self.assertTableExists("test_crummo_pony") @skipUnlessDBFeature('supports_table_check_constraints') def test_create_model_with_constraint(self): where = models.Q(pink__gt=2) check_constraint = models.CheckConstraint(check=where, name='test_constraint_pony_pink_gt_2') operation = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=3)), ], options={'constraints': [check_constraint]}, ) # Test the state alteration project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crmo", new_state) self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1) # Test database alteration self.assertTableNotExists("test_crmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crmo", editor, project_state, new_state) self.assertTableExists("test_crmo_pony") with connection.cursor() as cursor: with self.assertRaises(IntegrityError): cursor.execute("INSERT INTO test_crmo_pony (id, pink) VALUES (1, 1)") # Test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmo", editor, new_state, project_state) self.assertTableNotExists("test_crmo_pony") # Test deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "CreateModel") self.assertEqual(definition[1], []) self.assertEqual(definition[2]['options']['constraints'], [check_constraint]) def test_create_model_with_partial_unique_constraint(self): partial_unique_constraint = models.UniqueConstraint( fields=['pink'], condition=models.Q(weight__gt=5), name='test_constraint_pony_pink_for_weight_gt_5_uniq', ) operation = migrations.CreateModel( 'Pony', [ ('id', models.AutoField(primary_key=True)), ('pink', models.IntegerField(default=3)), ('weight', models.FloatField()), ], options={'constraints': [partial_unique_constraint]}, ) # Test the state alteration project_state = ProjectState() new_state = project_state.clone() operation.state_forwards('test_crmo', new_state) self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1) # Test database alteration self.assertTableNotExists('test_crmo_pony') with connection.schema_editor() as editor: operation.database_forwards('test_crmo', editor, project_state, new_state) self.assertTableExists('test_crmo_pony') # Test constraint works Pony = new_state.apps.get_model('test_crmo', 'Pony') Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=6.0) if connection.features.supports_partial_indexes: with self.assertRaises(IntegrityError): Pony.objects.create(pink=1, weight=7.0) else: Pony.objects.create(pink=1, weight=7.0) # Test reversal with connection.schema_editor() as editor: operation.database_backwards('test_crmo', editor, new_state, project_state) self.assertTableNotExists('test_crmo_pony') # Test deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], 'CreateModel') self.assertEqual(definition[1], []) self.assertEqual(definition[2]['options']['constraints'], [partial_unique_constraint]) def test_create_model_with_deferred_unique_constraint(self): deferred_unique_constraint = models.UniqueConstraint( fields=['pink'], name='deferrable_pink_constraint', deferrable=models.Deferrable.DEFERRED, ) operation = migrations.CreateModel( 'Pony', [ ('id', models.AutoField(primary_key=True)), ('pink', models.IntegerField(default=3)), ], options={'constraints': [deferred_unique_constraint]}, ) project_state = ProjectState() new_state = project_state.clone() operation.state_forwards('test_crmo', new_state) self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1) self.assertTableNotExists('test_crmo_pony') # Create table. with connection.schema_editor() as editor: operation.database_forwards('test_crmo', editor, project_state, new_state) self.assertTableExists('test_crmo_pony') Pony = new_state.apps.get_model('test_crmo', 'Pony') Pony.objects.create(pink=1) if connection.features.supports_deferrable_unique_constraints: # Unique constraint is deferred. with transaction.atomic(): obj = Pony.objects.create(pink=1) obj.pink = 2 obj.save() # Constraint behavior can be changed with SET CONSTRAINTS. with self.assertRaises(IntegrityError): with transaction.atomic(), connection.cursor() as cursor: quoted_name = connection.ops.quote_name(deferred_unique_constraint.name) cursor.execute('SET CONSTRAINTS %s IMMEDIATE' % quoted_name) obj = Pony.objects.create(pink=1) obj.pink = 3 obj.save() else: Pony.objects.create(pink=1) # Reversal. with connection.schema_editor() as editor: operation.database_backwards('test_crmo', editor, new_state, project_state) self.assertTableNotExists('test_crmo_pony') # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'CreateModel') self.assertEqual(definition[1], []) self.assertEqual( definition[2]['options']['constraints'], [deferred_unique_constraint], ) @skipUnlessDBFeature('supports_covering_indexes') def test_create_model_with_covering_unique_constraint(self): covering_unique_constraint = models.UniqueConstraint( fields=['pink'], include=['weight'], name='test_constraint_pony_pink_covering_weight', ) operation = migrations.CreateModel( 'Pony', [ ('id', models.AutoField(primary_key=True)), ('pink', models.IntegerField(default=3)), ('weight', models.FloatField()), ], options={'constraints': [covering_unique_constraint]}, ) project_state = ProjectState() new_state = project_state.clone() operation.state_forwards('test_crmo', new_state) self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1) self.assertTableNotExists('test_crmo_pony') # Create table. with connection.schema_editor() as editor: operation.database_forwards('test_crmo', editor, project_state, new_state) self.assertTableExists('test_crmo_pony') Pony = new_state.apps.get_model('test_crmo', 'Pony') Pony.objects.create(pink=1, weight=4.0) with self.assertRaises(IntegrityError): Pony.objects.create(pink=1, weight=7.0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards('test_crmo', editor, new_state, project_state) self.assertTableNotExists('test_crmo_pony') # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'CreateModel') self.assertEqual(definition[1], []) self.assertEqual( definition[2]['options']['constraints'], [covering_unique_constraint], ) def test_create_model_managers(self): """ The managers on a model are set. """ project_state = self.set_up_test_model("test_cmoma") # Test the state alteration operation = migrations.CreateModel( "Food", fields=[ ("id", models.AutoField(primary_key=True)), ], managers=[ ("food_qs", FoodQuerySet.as_manager()), ("food_mgr", FoodManager("a", "b")), ("food_mgr_kwargs", FoodManager("x", "y", 3, 4)), ] ) self.assertEqual(operation.describe(), "Create model Food") new_state = project_state.clone() operation.state_forwards("test_cmoma", new_state) self.assertIn(("test_cmoma", "food"), new_state.models) managers = new_state.models["test_cmoma", "food"].managers self.assertEqual(managers[0][0], "food_qs") self.assertIsInstance(managers[0][1], models.Manager) self.assertEqual(managers[1][0], "food_mgr") self.assertIsInstance(managers[1][1], FoodManager) self.assertEqual(managers[1][1].args, ("a", "b", 1, 2)) self.assertEqual(managers[2][0], "food_mgr_kwargs") self.assertIsInstance(managers[2][1], FoodManager) self.assertEqual(managers[2][1].args, ("x", "y", 3, 4)) def test_delete_model(self): """ Tests the DeleteModel operation. """ project_state = self.set_up_test_model("test_dlmo") # Test the state alteration operation = migrations.DeleteModel("Pony") self.assertEqual(operation.describe(), "Delete model Pony") self.assertEqual(operation.migration_name_fragment, 'delete_pony') new_state = project_state.clone() operation.state_forwards("test_dlmo", new_state) self.assertNotIn(("test_dlmo", "pony"), new_state.models) # Test the database alteration self.assertTableExists("test_dlmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_dlmo", editor, project_state, new_state) self.assertTableNotExists("test_dlmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_dlmo", editor, new_state, project_state) self.assertTableExists("test_dlmo_pony") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "DeleteModel") self.assertEqual(definition[1], []) self.assertEqual(list(definition[2]), ["name"]) def test_delete_proxy_model(self): """ Tests the DeleteModel operation ignores proxy models. """ project_state = self.set_up_test_model("test_dlprmo", proxy_model=True) # Test the state alteration operation = migrations.DeleteModel("ProxyPony") new_state = project_state.clone() operation.state_forwards("test_dlprmo", new_state) self.assertIn(("test_dlprmo", "proxypony"), project_state.models) self.assertNotIn(("test_dlprmo", "proxypony"), new_state.models) # Test the database alteration self.assertTableExists("test_dlprmo_pony") self.assertTableNotExists("test_dlprmo_proxypony") with connection.schema_editor() as editor: operation.database_forwards("test_dlprmo", editor, project_state, new_state) self.assertTableExists("test_dlprmo_pony") self.assertTableNotExists("test_dlprmo_proxypony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_dlprmo", editor, new_state, project_state) self.assertTableExists("test_dlprmo_pony") self.assertTableNotExists("test_dlprmo_proxypony") def test_delete_mti_model(self): project_state = self.set_up_test_model('test_dlmtimo', mti_model=True) # Test the state alteration operation = migrations.DeleteModel('ShetlandPony') new_state = project_state.clone() operation.state_forwards('test_dlmtimo', new_state) self.assertIn(('test_dlmtimo', 'shetlandpony'), project_state.models) self.assertNotIn(('test_dlmtimo', 'shetlandpony'), new_state.models) # Test the database alteration self.assertTableExists('test_dlmtimo_pony') self.assertTableExists('test_dlmtimo_shetlandpony') self.assertColumnExists('test_dlmtimo_shetlandpony', 'pony_ptr_id') with connection.schema_editor() as editor: operation.database_forwards('test_dlmtimo', editor, project_state, new_state) self.assertTableExists('test_dlmtimo_pony') self.assertTableNotExists('test_dlmtimo_shetlandpony') # And test reversal with connection.schema_editor() as editor: operation.database_backwards('test_dlmtimo', editor, new_state, project_state) self.assertTableExists('test_dlmtimo_pony') self.assertTableExists('test_dlmtimo_shetlandpony') self.assertColumnExists('test_dlmtimo_shetlandpony', 'pony_ptr_id') def test_rename_model(self): """ Tests the RenameModel operation. """ project_state = self.set_up_test_model("test_rnmo", related_model=True) # Test the state alteration operation = migrations.RenameModel("Pony", "Horse") self.assertEqual(operation.describe(), "Rename model Pony to Horse") self.assertEqual(operation.migration_name_fragment, 'rename_pony_horse') # Test initial state and database self.assertIn(("test_rnmo", "pony"), project_state.models) self.assertNotIn(("test_rnmo", "horse"), project_state.models) self.assertTableExists("test_rnmo_pony") self.assertTableNotExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) # Migrate forwards new_state = project_state.clone() atomic_rename = connection.features.supports_atomic_references_rename new_state = self.apply_operations("test_rnmo", new_state, [operation], atomic=atomic_rename) # Test new state and database self.assertNotIn(("test_rnmo", "pony"), new_state.models) self.assertIn(("test_rnmo", "horse"), new_state.models) # RenameModel also repoints all incoming FKs and M2Ms self.assertEqual( new_state.models['test_rnmo', 'rider'].fields['pony'].remote_field.model, 'test_rnmo.Horse', ) self.assertTableNotExists("test_rnmo_pony") self.assertTableExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) # Migrate backwards original_state = self.unapply_operations("test_rnmo", project_state, [operation], atomic=atomic_rename) # Test original state and database self.assertIn(("test_rnmo", "pony"), original_state.models) self.assertNotIn(("test_rnmo", "horse"), original_state.models) self.assertEqual( original_state.models['test_rnmo', 'rider'].fields['pony'].remote_field.model, 'Pony', ) self.assertTableExists("test_rnmo_pony") self.assertTableNotExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RenameModel") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'old_name': "Pony", 'new_name': "Horse"}) def test_rename_model_state_forwards(self): """ RenameModel operations shouldn't trigger the caching of rendered apps on state without prior apps. """ state = ProjectState() state.add_model(ModelState('migrations', 'Foo', [])) operation = migrations.RenameModel('Foo', 'Bar') operation.state_forwards('migrations', state) self.assertNotIn('apps', state.__dict__) self.assertNotIn(('migrations', 'foo'), state.models) self.assertIn(('migrations', 'bar'), state.models) # Now with apps cached. apps = state.apps operation = migrations.RenameModel('Bar', 'Foo') operation.state_forwards('migrations', state) self.assertIs(state.apps, apps) self.assertNotIn(('migrations', 'bar'), state.models) self.assertIn(('migrations', 'foo'), state.models) def test_rename_model_with_self_referential_fk(self): """ Tests the RenameModel operation on model with self referential FK. """ project_state = self.set_up_test_model("test_rmwsrf", related_model=True) # Test the state alteration operation = migrations.RenameModel("Rider", "HorseRider") self.assertEqual(operation.describe(), "Rename model Rider to HorseRider") new_state = project_state.clone() operation.state_forwards("test_rmwsrf", new_state) self.assertNotIn(("test_rmwsrf", "rider"), new_state.models) self.assertIn(("test_rmwsrf", "horserider"), new_state.models) # Remember, RenameModel also repoints all incoming FKs and M2Ms self.assertEqual( 'self', new_state.models["test_rmwsrf", "horserider"].fields['friend'].remote_field.model ) HorseRider = new_state.apps.get_model('test_rmwsrf', 'horserider') self.assertIs(HorseRider._meta.get_field('horserider').remote_field.model, HorseRider) # Test the database alteration self.assertTableExists("test_rmwsrf_rider") self.assertTableNotExists("test_rmwsrf_horserider") if connection.features.supports_foreign_keys: self.assertFKExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id")) self.assertFKNotExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id")) atomic_rename = connection.features.supports_atomic_references_rename with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_forwards("test_rmwsrf", editor, project_state, new_state) self.assertTableNotExists("test_rmwsrf_rider") self.assertTableExists("test_rmwsrf_horserider") if connection.features.supports_foreign_keys: self.assertFKNotExists("test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_rider", "id")) self.assertFKExists("test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_horserider", "id")) # And test reversal with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_backwards("test_rmwsrf", editor, new_state, project_state) self.assertTableExists("test_rmwsrf_rider") self.assertTableNotExists("test_rmwsrf_horserider") if connection.features.supports_foreign_keys: self.assertFKExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id")) self.assertFKNotExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id")) def test_rename_model_with_superclass_fk(self): """ Tests the RenameModel operation on a model which has a superclass that has a foreign key. """ project_state = self.set_up_test_model("test_rmwsc", related_model=True, mti_model=True) # Test the state alteration operation = migrations.RenameModel("ShetlandPony", "LittleHorse") self.assertEqual(operation.describe(), "Rename model ShetlandPony to LittleHorse") new_state = project_state.clone() operation.state_forwards("test_rmwsc", new_state) self.assertNotIn(("test_rmwsc", "shetlandpony"), new_state.models) self.assertIn(("test_rmwsc", "littlehorse"), new_state.models) # RenameModel shouldn't repoint the superclass's relations, only local ones self.assertEqual( project_state.models['test_rmwsc', 'rider'].fields['pony'].remote_field.model, new_state.models['test_rmwsc', 'rider'].fields['pony'].remote_field.model, ) # Before running the migration we have a table for Shetland Pony, not Little Horse self.assertTableExists("test_rmwsc_shetlandpony") self.assertTableNotExists("test_rmwsc_littlehorse") if connection.features.supports_foreign_keys: # and the foreign key on rider points to pony, not shetland pony self.assertFKExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id")) self.assertFKNotExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_shetlandpony", "id")) with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor: operation.database_forwards("test_rmwsc", editor, project_state, new_state) # Now we have a little horse table, not shetland pony self.assertTableNotExists("test_rmwsc_shetlandpony") self.assertTableExists("test_rmwsc_littlehorse") if connection.features.supports_foreign_keys: # but the Foreign keys still point at pony, not little horse self.assertFKExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id")) self.assertFKNotExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_littlehorse", "id")) def test_rename_model_with_self_referential_m2m(self): app_label = "test_rename_model_with_self_referential_m2m" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("ReflexivePony", fields=[ ("id", models.AutoField(primary_key=True)), ("ponies", models.ManyToManyField("self")), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("ReflexivePony", "ReflexivePony2"), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, "ReflexivePony2") pony = Pony.objects.create() pony.ponies.add(pony) def test_rename_model_with_m2m(self): app_label = "test_rename_model_with_m2m" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("Rider", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("Pony", fields=[ ("id", models.AutoField(primary_key=True)), ("riders", models.ManyToManyField("Rider")), ]), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("Pony", "Pony2"), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, "Pony2") Rider = project_state.apps.get_model(app_label, "Rider") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) self.assertEqual(Pony.objects.count(), 2) self.assertEqual(Rider.objects.count(), 2) self.assertEqual(Pony._meta.get_field('riders').remote_field.through.objects.count(), 2) def test_rename_m2m_target_model(self): app_label = "test_rename_m2m_target_model" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("Rider", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("Pony", fields=[ ("id", models.AutoField(primary_key=True)), ("riders", models.ManyToManyField("Rider")), ]), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("Rider", "Rider2"), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider2") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) self.assertEqual(Pony.objects.count(), 2) self.assertEqual(Rider.objects.count(), 2) self.assertEqual(Pony._meta.get_field('riders').remote_field.through.objects.count(), 2) def test_rename_m2m_through_model(self): app_label = "test_rename_through" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("Rider", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("Pony", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("PonyRider", fields=[ ("id", models.AutoField(primary_key=True)), ("rider", models.ForeignKey("test_rename_through.Rider", models.CASCADE)), ("pony", models.ForeignKey("test_rename_through.Pony", models.CASCADE)), ]), migrations.AddField( "Pony", "riders", models.ManyToManyField("test_rename_through.Rider", through="test_rename_through.PonyRider"), ), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") PonyRider = project_state.apps.get_model(app_label, "PonyRider") pony = Pony.objects.create() rider = Rider.objects.create() PonyRider.objects.create(pony=pony, rider=rider) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("PonyRider", "PonyRider2"), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") PonyRider = project_state.apps.get_model(app_label, "PonyRider2") pony = Pony.objects.first() rider = Rider.objects.create() PonyRider.objects.create(pony=pony, rider=rider) self.assertEqual(Pony.objects.count(), 1) self.assertEqual(Rider.objects.count(), 2) self.assertEqual(PonyRider.objects.count(), 2) self.assertEqual(pony.riders.count(), 2) def test_rename_m2m_model_after_rename_field(self): """RenameModel renames a many-to-many column after a RenameField.""" app_label = 'test_rename_multiple' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=20)), ]), migrations.CreateModel('Rider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('test_rename_multiple.Pony', models.CASCADE)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('riders', models.ManyToManyField('Rider')), ]), migrations.RenameField(model_name='pony', old_name='name', new_name='fancy_name'), migrations.RenameModel(old_name='Rider', new_name='Jockey'), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, 'Pony') Jockey = project_state.apps.get_model(app_label, 'Jockey') PonyRider = project_state.apps.get_model(app_label, 'PonyRider') # No "no such column" error means the column was renamed correctly. pony = Pony.objects.create(fancy_name='a good name') jockey = Jockey.objects.create(pony=pony) ponyrider = PonyRider.objects.create() ponyrider.riders.add(jockey) def test_add_field(self): """ Tests the AddField operation. """ # Test the state alteration operation = migrations.AddField( "Pony", "height", models.FloatField(null=True, default=5), ) self.assertEqual(operation.describe(), "Add field height to Pony") self.assertEqual(operation.migration_name_fragment, 'pony_height') project_state, new_state = self.make_test_state("test_adfl", operation) self.assertEqual(len(new_state.models["test_adfl", "pony"].fields), 4) field = new_state.models['test_adfl', 'pony'].fields['height'] self.assertEqual(field.default, 5) # Test the database alteration self.assertColumnNotExists("test_adfl_pony", "height") with connection.schema_editor() as editor: operation.database_forwards("test_adfl", editor, project_state, new_state) self.assertColumnExists("test_adfl_pony", "height") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adfl", editor, new_state, project_state) self.assertColumnNotExists("test_adfl_pony", "height") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AddField") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"]) def test_add_charfield(self): """ Tests the AddField operation on TextField. """ project_state = self.set_up_test_model("test_adchfl") Pony = project_state.apps.get_model("test_adchfl", "Pony") pony = Pony.objects.create(weight=42) new_state = self.apply_operations("test_adchfl", project_state, [ migrations.AddField( "Pony", "text", models.CharField(max_length=10, default="some text"), ), migrations.AddField( "Pony", "empty", models.CharField(max_length=10, default=""), ), # If not properly quoted digits would be interpreted as an int. migrations.AddField( "Pony", "digits", models.CharField(max_length=10, default="42"), ), # Manual quoting is fragile and could trip on quotes. Refs #xyz. migrations.AddField( "Pony", "quotes", models.CharField(max_length=10, default='"\'"'), ), ]) Pony = new_state.apps.get_model("test_adchfl", "Pony") pony = Pony.objects.get(pk=pony.pk) self.assertEqual(pony.text, "some text") self.assertEqual(pony.empty, "") self.assertEqual(pony.digits, "42") self.assertEqual(pony.quotes, '"\'"') def test_add_textfield(self): """ Tests the AddField operation on TextField. """ project_state = self.set_up_test_model("test_adtxtfl") Pony = project_state.apps.get_model("test_adtxtfl", "Pony") pony = Pony.objects.create(weight=42) new_state = self.apply_operations("test_adtxtfl", project_state, [ migrations.AddField( "Pony", "text", models.TextField(default="some text"), ), migrations.AddField( "Pony", "empty", models.TextField(default=""), ), # If not properly quoted digits would be interpreted as an int. migrations.AddField( "Pony", "digits", models.TextField(default="42"), ), # Manual quoting is fragile and could trip on quotes. Refs #xyz. migrations.AddField( "Pony", "quotes", models.TextField(default='"\'"'), ), ]) Pony = new_state.apps.get_model("test_adtxtfl", "Pony") pony = Pony.objects.get(pk=pony.pk) self.assertEqual(pony.text, "some text") self.assertEqual(pony.empty, "") self.assertEqual(pony.digits, "42") self.assertEqual(pony.quotes, '"\'"') def test_add_binaryfield(self): """ Tests the AddField operation on TextField/BinaryField. """ project_state = self.set_up_test_model("test_adbinfl") Pony = project_state.apps.get_model("test_adbinfl", "Pony") pony = Pony.objects.create(weight=42) new_state = self.apply_operations("test_adbinfl", project_state, [ migrations.AddField( "Pony", "blob", models.BinaryField(default=b"some text"), ), migrations.AddField( "Pony", "empty", models.BinaryField(default=b""), ), # If not properly quoted digits would be interpreted as an int. migrations.AddField( "Pony", "digits", models.BinaryField(default=b"42"), ), # Manual quoting is fragile and could trip on quotes. Refs #xyz. migrations.AddField( "Pony", "quotes", models.BinaryField(default=b'"\'"'), ), ]) Pony = new_state.apps.get_model("test_adbinfl", "Pony") pony = Pony.objects.get(pk=pony.pk) # SQLite returns buffer/memoryview, cast to bytes for checking. self.assertEqual(bytes(pony.blob), b"some text") self.assertEqual(bytes(pony.empty), b"") self.assertEqual(bytes(pony.digits), b"42") self.assertEqual(bytes(pony.quotes), b'"\'"') def test_column_name_quoting(self): """ Column names that are SQL keywords shouldn't cause problems when used in migrations (#22168). """ project_state = self.set_up_test_model("test_regr22168") operation = migrations.AddField( "Pony", "order", models.IntegerField(default=0), ) new_state = project_state.clone() operation.state_forwards("test_regr22168", new_state) with connection.schema_editor() as editor: operation.database_forwards("test_regr22168", editor, project_state, new_state) self.assertColumnExists("test_regr22168_pony", "order") def test_add_field_preserve_default(self): """ Tests the AddField operation's state alteration when preserve_default = False. """ project_state = self.set_up_test_model("test_adflpd") # Test the state alteration operation = migrations.AddField( "Pony", "height", models.FloatField(null=True, default=4), preserve_default=False, ) new_state = project_state.clone() operation.state_forwards("test_adflpd", new_state) self.assertEqual(len(new_state.models["test_adflpd", "pony"].fields), 4) field = new_state.models['test_adflpd', 'pony'].fields['height'] self.assertEqual(field.default, models.NOT_PROVIDED) # Test the database alteration project_state.apps.get_model("test_adflpd", "pony").objects.create( weight=4, ) self.assertColumnNotExists("test_adflpd_pony", "height") with connection.schema_editor() as editor: operation.database_forwards("test_adflpd", editor, project_state, new_state) self.assertColumnExists("test_adflpd_pony", "height") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AddField") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["field", "model_name", "name", "preserve_default"]) def test_add_field_m2m(self): """ Tests the AddField operation with a ManyToManyField. """ project_state = self.set_up_test_model("test_adflmm", second_model=True) # Test the state alteration operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) new_state = project_state.clone() operation.state_forwards("test_adflmm", new_state) self.assertEqual(len(new_state.models["test_adflmm", "pony"].fields), 4) # Test the database alteration self.assertTableNotExists("test_adflmm_pony_stables") with connection.schema_editor() as editor: operation.database_forwards("test_adflmm", editor, project_state, new_state) self.assertTableExists("test_adflmm_pony_stables") self.assertColumnNotExists("test_adflmm_pony", "stables") # Make sure the M2M field actually works with atomic(): Pony = new_state.apps.get_model("test_adflmm", "Pony") p = Pony.objects.create(pink=False, weight=4.55) p.stables.create() self.assertEqual(p.stables.count(), 1) p.stables.all().delete() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adflmm", editor, new_state, project_state) self.assertTableNotExists("test_adflmm_pony_stables") def test_alter_field_m2m(self): project_state = self.set_up_test_model("test_alflmm", second_model=True) project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) ]) Pony = project_state.apps.get_model("test_alflmm", "Pony") self.assertFalse(Pony._meta.get_field('stables').blank) project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AlterField( "Pony", "stables", models.ManyToManyField(to="Stable", related_name="ponies", blank=True) ) ]) Pony = project_state.apps.get_model("test_alflmm", "Pony") self.assertTrue(Pony._meta.get_field('stables').blank) def test_repoint_field_m2m(self): project_state = self.set_up_test_model("test_alflmm", second_model=True, third_model=True) project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AddField("Pony", "places", models.ManyToManyField("Stable", related_name="ponies")) ]) Pony = project_state.apps.get_model("test_alflmm", "Pony") project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AlterField("Pony", "places", models.ManyToManyField(to="Van", related_name="ponies")) ]) # Ensure the new field actually works Pony = project_state.apps.get_model("test_alflmm", "Pony") p = Pony.objects.create(pink=False, weight=4.55) p.places.create() self.assertEqual(p.places.count(), 1) p.places.all().delete() def test_remove_field_m2m(self): project_state = self.set_up_test_model("test_rmflmm", second_model=True) project_state = self.apply_operations("test_rmflmm", project_state, operations=[ migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) ]) self.assertTableExists("test_rmflmm_pony_stables") with_field_state = project_state.clone() operations = [migrations.RemoveField("Pony", "stables")] project_state = self.apply_operations("test_rmflmm", project_state, operations=operations) self.assertTableNotExists("test_rmflmm_pony_stables") # And test reversal self.unapply_operations("test_rmflmm", with_field_state, operations=operations) self.assertTableExists("test_rmflmm_pony_stables") def test_remove_field_m2m_with_through(self): project_state = self.set_up_test_model("test_rmflmmwt", second_model=True) self.assertTableNotExists("test_rmflmmwt_ponystables") project_state = self.apply_operations("test_rmflmmwt", project_state, operations=[ migrations.CreateModel("PonyStables", fields=[ ("pony", models.ForeignKey('test_rmflmmwt.Pony', models.CASCADE)), ("stable", models.ForeignKey('test_rmflmmwt.Stable', models.CASCADE)), ]), migrations.AddField( "Pony", "stables", models.ManyToManyField("Stable", related_name="ponies", through='test_rmflmmwt.PonyStables') ) ]) self.assertTableExists("test_rmflmmwt_ponystables") operations = [migrations.RemoveField("Pony", "stables"), migrations.DeleteModel("PonyStables")] self.apply_operations("test_rmflmmwt", project_state, operations=operations) def test_remove_field(self): """ Tests the RemoveField operation. """ project_state = self.set_up_test_model("test_rmfl") # Test the state alteration operation = migrations.RemoveField("Pony", "pink") self.assertEqual(operation.describe(), "Remove field pink from Pony") self.assertEqual(operation.migration_name_fragment, 'remove_pony_pink') new_state = project_state.clone() operation.state_forwards("test_rmfl", new_state) self.assertEqual(len(new_state.models["test_rmfl", "pony"].fields), 2) # Test the database alteration self.assertColumnExists("test_rmfl_pony", "pink") with connection.schema_editor() as editor: operation.database_forwards("test_rmfl", editor, project_state, new_state) self.assertColumnNotExists("test_rmfl_pony", "pink") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rmfl", editor, new_state, project_state) self.assertColumnExists("test_rmfl_pony", "pink") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RemoveField") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'name': 'pink'}) def test_remove_fk(self): """ Tests the RemoveField operation on a foreign key. """ project_state = self.set_up_test_model("test_rfk", related_model=True) self.assertColumnExists("test_rfk_rider", "pony_id") operation = migrations.RemoveField("Rider", "pony") new_state = project_state.clone() operation.state_forwards("test_rfk", new_state) with connection.schema_editor() as editor: operation.database_forwards("test_rfk", editor, project_state, new_state) self.assertColumnNotExists("test_rfk_rider", "pony_id") with connection.schema_editor() as editor: operation.database_backwards("test_rfk", editor, new_state, project_state) self.assertColumnExists("test_rfk_rider", "pony_id") def test_alter_model_table(self): """ Tests the AlterModelTable operation. """ project_state = self.set_up_test_model("test_almota") # Test the state alteration operation = migrations.AlterModelTable("Pony", "test_almota_pony_2") self.assertEqual(operation.describe(), "Rename table for Pony to test_almota_pony_2") self.assertEqual(operation.migration_name_fragment, 'alter_pony_table') new_state = project_state.clone() operation.state_forwards("test_almota", new_state) self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony_2") # Test the database alteration self.assertTableExists("test_almota_pony") self.assertTableNotExists("test_almota_pony_2") with connection.schema_editor() as editor: operation.database_forwards("test_almota", editor, project_state, new_state) self.assertTableNotExists("test_almota_pony") self.assertTableExists("test_almota_pony_2") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_almota", editor, new_state, project_state) self.assertTableExists("test_almota_pony") self.assertTableNotExists("test_almota_pony_2") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterModelTable") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'table': "test_almota_pony_2"}) def test_alter_model_table_none(self): """ Tests the AlterModelTable operation if the table name is set to None. """ operation = migrations.AlterModelTable("Pony", None) self.assertEqual(operation.describe(), "Rename table for Pony to (default)") def test_alter_model_table_noop(self): """ Tests the AlterModelTable operation if the table name is not changed. """ project_state = self.set_up_test_model("test_almota") # Test the state alteration operation = migrations.AlterModelTable("Pony", "test_almota_pony") new_state = project_state.clone() operation.state_forwards("test_almota", new_state) self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony") # Test the database alteration self.assertTableExists("test_almota_pony") with connection.schema_editor() as editor: operation.database_forwards("test_almota", editor, project_state, new_state) self.assertTableExists("test_almota_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_almota", editor, new_state, project_state) self.assertTableExists("test_almota_pony") def test_alter_model_table_m2m(self): """ AlterModelTable should rename auto-generated M2M tables. """ app_label = "test_talflmltlm2m" pony_db_table = 'pony_foo' project_state = self.set_up_test_model(app_label, second_model=True, db_table=pony_db_table) # Add the M2M field first_state = project_state.clone() operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable")) operation.state_forwards(app_label, first_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, first_state) original_m2m_table = "%s_%s" % (pony_db_table, "stables") new_m2m_table = "%s_%s" % (app_label, "pony_stables") self.assertTableExists(original_m2m_table) self.assertTableNotExists(new_m2m_table) # Rename the Pony db_table which should also rename the m2m table. second_state = first_state.clone() operation = migrations.AlterModelTable(name='pony', table=None) operation.state_forwards(app_label, second_state) atomic_rename = connection.features.supports_atomic_references_rename with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_forwards(app_label, editor, first_state, second_state) self.assertTableExists(new_m2m_table) self.assertTableNotExists(original_m2m_table) # And test reversal with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_backwards(app_label, editor, second_state, first_state) self.assertTableExists(original_m2m_table) self.assertTableNotExists(new_m2m_table) def test_alter_field(self): """ Tests the AlterField operation. """ project_state = self.set_up_test_model("test_alfl") # Test the state alteration operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True)) self.assertEqual(operation.describe(), "Alter field pink on Pony") self.assertEqual(operation.migration_name_fragment, 'alter_pony_pink') new_state = project_state.clone() operation.state_forwards("test_alfl", new_state) self.assertIs(project_state.models['test_alfl', 'pony'].fields['pink'].null, False) self.assertIs(new_state.models['test_alfl', 'pony'].fields['pink'].null, True) # Test the database alteration self.assertColumnNotNull("test_alfl_pony", "pink") with connection.schema_editor() as editor: operation.database_forwards("test_alfl", editor, project_state, new_state) self.assertColumnNull("test_alfl_pony", "pink") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alfl", editor, new_state, project_state) self.assertColumnNotNull("test_alfl_pony", "pink") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterField") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"]) def test_alter_field_add_db_column_noop(self): """ AlterField operation is a noop when adding only a db_column and the column name is not changed. """ app_label = 'test_afadbn' project_state = self.set_up_test_model(app_label, related_model=True) pony_table = '%s_pony' % app_label new_state = project_state.clone() operation = migrations.AlterField('Pony', 'weight', models.FloatField(db_column='weight')) operation.state_forwards(app_label, new_state) self.assertIsNone( project_state.models[app_label, 'pony'].fields['weight'].db_column, ) self.assertEqual( new_state.models[app_label, 'pony'].fields['weight'].db_column, 'weight', ) self.assertColumnExists(pony_table, 'weight') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_forwards(app_label, editor, project_state, new_state) self.assertColumnExists(pony_table, 'weight') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_backwards(app_label, editor, new_state, project_state) self.assertColumnExists(pony_table, 'weight') rider_table = '%s_rider' % app_label new_state = project_state.clone() operation = migrations.AlterField( 'Rider', 'pony', models.ForeignKey('Pony', models.CASCADE, db_column='pony_id'), ) operation.state_forwards(app_label, new_state) self.assertIsNone( project_state.models[app_label, 'rider'].fields['pony'].db_column, ) self.assertIs( new_state.models[app_label, 'rider'].fields['pony'].db_column, 'pony_id', ) self.assertColumnExists(rider_table, 'pony_id') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_forwards(app_label, editor, project_state, new_state) self.assertColumnExists(rider_table, 'pony_id') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_forwards(app_label, editor, new_state, project_state) self.assertColumnExists(rider_table, 'pony_id') def test_alter_field_pk(self): """ Tests the AlterField operation on primary keys (for things like PostgreSQL's SERIAL weirdness) """ project_state = self.set_up_test_model("test_alflpk") # Test the state alteration operation = migrations.AlterField("Pony", "id", models.IntegerField(primary_key=True)) new_state = project_state.clone() operation.state_forwards("test_alflpk", new_state) self.assertIsInstance( project_state.models['test_alflpk', 'pony'].fields['id'], models.AutoField, ) self.assertIsInstance( new_state.models['test_alflpk', 'pony'].fields['id'], models.IntegerField, ) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alflpk", editor, project_state, new_state) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alflpk", editor, new_state, project_state) @skipUnlessDBFeature('supports_foreign_keys') def test_alter_field_pk_fk(self): """ Tests the AlterField operation on primary keys changes any FKs pointing to it. """ project_state = self.set_up_test_model("test_alflpkfk", related_model=True) project_state = self.apply_operations('test_alflpkfk', project_state, [ migrations.CreateModel('Stable', fields=[ ('ponies', models.ManyToManyField('Pony')), ]), migrations.AddField( 'Pony', 'stables', models.ManyToManyField('Stable'), ), ]) # Test the state alteration operation = migrations.AlterField("Pony", "id", models.FloatField(primary_key=True)) new_state = project_state.clone() operation.state_forwards("test_alflpkfk", new_state) self.assertIsInstance( project_state.models['test_alflpkfk', 'pony'].fields['id'], models.AutoField, ) self.assertIsInstance( new_state.models['test_alflpkfk', 'pony'].fields['id'], models.FloatField, ) def assertIdTypeEqualsFkType(): with connection.cursor() as cursor: id_type, id_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description(cursor, "test_alflpkfk_pony") if c.name == "id" ][0] fk_type, fk_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description(cursor, "test_alflpkfk_rider") if c.name == "pony_id" ][0] m2m_fk_type, m2m_fk_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description( cursor, 'test_alflpkfk_pony_stables', ) if c.name == 'pony_id' ][0] remote_m2m_fk_type, remote_m2m_fk_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description( cursor, 'test_alflpkfk_stable_ponies', ) if c.name == 'pony_id' ][0] self.assertEqual(id_type, fk_type) self.assertEqual(id_type, m2m_fk_type) self.assertEqual(id_type, remote_m2m_fk_type) self.assertEqual(id_null, fk_null) self.assertEqual(id_null, m2m_fk_null) self.assertEqual(id_null, remote_m2m_fk_null) assertIdTypeEqualsFkType() # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alflpkfk", editor, project_state, new_state) assertIdTypeEqualsFkType() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alflpkfk", editor, new_state, project_state) assertIdTypeEqualsFkType() @skipUnlessDBFeature('supports_foreign_keys') def test_alter_field_reloads_state_on_fk_with_to_field_target_type_change(self): app_label = 'test_alflrsfkwtflttc' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.AutoField(primary_key=True)), ('code', models.IntegerField(unique=True)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE, to_field='code')), ]), ]) operation = migrations.AlterField( 'Rider', 'code', models.CharField(max_length=100, unique=True), ) self.apply_operations(app_label, project_state, operations=[operation]) id_type, id_null = [ (c.type_code, c.null_ok) for c in self.get_table_description('%s_rider' % app_label) if c.name == 'code' ][0] fk_type, fk_null = [ (c.type_code, c.null_ok) for c in self.get_table_description('%s_pony' % app_label) if c.name == 'rider_id' ][0] self.assertEqual(id_type, fk_type) self.assertEqual(id_null, fk_null) @skipUnlessDBFeature('supports_foreign_keys') def test_alter_field_reloads_state_on_fk_with_to_field_related_name_target_type_change(self): app_label = 'test_alflrsfkwtflrnttc' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.AutoField(primary_key=True)), ('code', models.PositiveIntegerField(unique=True)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('rider', models.ForeignKey( '%s.Rider' % app_label, models.CASCADE, to_field='code', related_name='+', )), ]), ]) operation = migrations.AlterField( 'Rider', 'code', models.CharField(max_length=100, unique=True), ) self.apply_operations(app_label, project_state, operations=[operation]) def test_alter_field_reloads_state_on_fk_target_changes(self): """ If AlterField doesn't reload state appropriately, the second AlterField crashes on MySQL due to not dropping the PonyRider.pony foreign key constraint before modifying the column. """ app_label = 'alter_alter_field_reloads_state_on_fk_target_changes' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE)), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.AlterField('Rider', 'id', models.CharField(primary_key=True, max_length=99)), migrations.AlterField('Pony', 'id', models.CharField(primary_key=True, max_length=99)), ]) def test_alter_field_reloads_state_on_fk_with_to_field_target_changes(self): """ If AlterField doesn't reload state appropriately, the second AlterField crashes on MySQL due to not dropping the PonyRider.pony foreign key constraint before modifying the column. """ app_label = 'alter_alter_field_reloads_state_on_fk_with_to_field_target_changes' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('slug', models.CharField(unique=True, max_length=100)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE, to_field='slug')), ('slug', models.CharField(unique=True, max_length=100)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE, to_field='slug')), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.AlterField('Rider', 'slug', models.CharField(unique=True, max_length=99)), migrations.AlterField('Pony', 'slug', models.CharField(unique=True, max_length=99)), ]) def test_rename_field_reloads_state_on_fk_target_changes(self): """ If RenameField doesn't reload state appropriately, the AlterField crashes on MySQL due to not dropping the PonyRider.pony foreign key constraint before modifying the column. """ app_label = 'alter_rename_field_reloads_state_on_fk_target_changes' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE)), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameField('Rider', 'id', 'id2'), migrations.AlterField('Pony', 'id', models.CharField(primary_key=True, max_length=99)), ], atomic=connection.features.supports_atomic_references_rename) def test_rename_field(self): """ Tests the RenameField operation. """ project_state = self.set_up_test_model("test_rnfl", unique_together=True, index_together=True) # Test the state alteration operation = migrations.RenameField("Pony", "pink", "blue") self.assertEqual(operation.describe(), "Rename field pink on Pony to blue") self.assertEqual(operation.migration_name_fragment, 'rename_pink_pony_blue') new_state = project_state.clone() operation.state_forwards("test_rnfl", new_state) self.assertIn("blue", new_state.models["test_rnfl", "pony"].fields) self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].fields) # Make sure the unique_together has the renamed column too self.assertIn("blue", new_state.models["test_rnfl", "pony"].options['unique_together'][0]) self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].options['unique_together'][0]) # Make sure the index_together has the renamed column too self.assertIn("blue", new_state.models["test_rnfl", "pony"].options['index_together'][0]) self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].options['index_together'][0]) # Test the database alteration self.assertColumnExists("test_rnfl_pony", "pink") self.assertColumnNotExists("test_rnfl_pony", "blue") with connection.schema_editor() as editor: operation.database_forwards("test_rnfl", editor, project_state, new_state) self.assertColumnExists("test_rnfl_pony", "blue") self.assertColumnNotExists("test_rnfl_pony", "pink") # Ensure the unique constraint has been ported over with connection.cursor() as cursor: cursor.execute("INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)") with self.assertRaises(IntegrityError): with atomic(): cursor.execute("INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_rnfl_pony") # Ensure the index constraint has been ported over self.assertIndexExists("test_rnfl_pony", ["weight", "blue"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rnfl", editor, new_state, project_state) self.assertColumnExists("test_rnfl_pony", "pink") self.assertColumnNotExists("test_rnfl_pony", "blue") # Ensure the index constraint has been reset self.assertIndexExists("test_rnfl_pony", ["weight", "pink"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RenameField") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'old_name': "pink", 'new_name': "blue"}) def test_rename_field_with_db_column(self): project_state = self.apply_operations('test_rfwdbc', ProjectState(), operations=[ migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('field', models.IntegerField(db_column='db_field')), ('fk_field', models.ForeignKey( 'Pony', models.CASCADE, db_column='db_fk_field', )), ]), ]) new_state = project_state.clone() operation = migrations.RenameField('Pony', 'field', 'renamed_field') operation.state_forwards('test_rfwdbc', new_state) self.assertIn('renamed_field', new_state.models['test_rfwdbc', 'pony'].fields) self.assertNotIn('field', new_state.models['test_rfwdbc', 'pony'].fields) self.assertColumnExists('test_rfwdbc_pony', 'db_field') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_forwards('test_rfwdbc', editor, project_state, new_state) self.assertColumnExists('test_rfwdbc_pony', 'db_field') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_backwards('test_rfwdbc', editor, new_state, project_state) self.assertColumnExists('test_rfwdbc_pony', 'db_field') new_state = project_state.clone() operation = migrations.RenameField('Pony', 'fk_field', 'renamed_fk_field') operation.state_forwards('test_rfwdbc', new_state) self.assertIn('renamed_fk_field', new_state.models['test_rfwdbc', 'pony'].fields) self.assertNotIn('fk_field', new_state.models['test_rfwdbc', 'pony'].fields) self.assertColumnExists('test_rfwdbc_pony', 'db_fk_field') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_forwards('test_rfwdbc', editor, project_state, new_state) self.assertColumnExists('test_rfwdbc_pony', 'db_fk_field') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_backwards('test_rfwdbc', editor, new_state, project_state) self.assertColumnExists('test_rfwdbc_pony', 'db_fk_field') def test_rename_field_case(self): project_state = self.apply_operations('test_rfmx', ProjectState(), operations=[ migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('field', models.IntegerField()), ]), ]) new_state = project_state.clone() operation = migrations.RenameField('Pony', 'field', 'FiElD') operation.state_forwards('test_rfmx', new_state) self.assertIn('FiElD', new_state.models['test_rfmx', 'pony'].fields) self.assertColumnExists('test_rfmx_pony', 'field') with connection.schema_editor() as editor: operation.database_forwards('test_rfmx', editor, project_state, new_state) self.assertColumnExists( 'test_rfmx_pony', connection.introspection.identifier_converter('FiElD'), ) with connection.schema_editor() as editor: operation.database_backwards('test_rfmx', editor, new_state, project_state) self.assertColumnExists('test_rfmx_pony', 'field') def test_rename_missing_field(self): state = ProjectState() state.add_model(ModelState('app', 'model', [])) with self.assertRaisesMessage(FieldDoesNotExist, "app.model has no field named 'field'"): migrations.RenameField('model', 'field', 'new_field').state_forwards('app', state) def test_rename_referenced_field_state_forward(self): state = ProjectState() state.add_model(ModelState('app', 'Model', [ ('id', models.AutoField(primary_key=True)), ('field', models.IntegerField(unique=True)), ])) state.add_model(ModelState('app', 'OtherModel', [ ('id', models.AutoField(primary_key=True)), ('fk', models.ForeignKey('Model', models.CASCADE, to_field='field')), ('fo', models.ForeignObject('Model', models.CASCADE, from_fields=('fk',), to_fields=('field',))), ])) operation = migrations.RenameField('Model', 'field', 'renamed') new_state = state.clone() operation.state_forwards('app', new_state) self.assertEqual(new_state.models['app', 'othermodel'].fields['fk'].remote_field.field_name, 'renamed') self.assertEqual(new_state.models['app', 'othermodel'].fields['fk'].from_fields, ['self']) self.assertEqual(new_state.models['app', 'othermodel'].fields['fk'].to_fields, ('renamed',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['fo'].from_fields, ('fk',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['fo'].to_fields, ('renamed',)) operation = migrations.RenameField('OtherModel', 'fk', 'renamed_fk') new_state = state.clone() operation.state_forwards('app', new_state) self.assertEqual(new_state.models['app', 'othermodel'].fields['renamed_fk'].remote_field.field_name, 'renamed') self.assertEqual(new_state.models['app', 'othermodel'].fields['renamed_fk'].from_fields, ('self',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['renamed_fk'].to_fields, ('renamed',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['fo'].from_fields, ('renamed_fk',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['fo'].to_fields, ('renamed',)) def test_alter_unique_together(self): """ Tests the AlterUniqueTogether operation. """ project_state = self.set_up_test_model("test_alunto") # Test the state alteration operation = migrations.AlterUniqueTogether("Pony", [("pink", "weight")]) self.assertEqual(operation.describe(), "Alter unique_together for Pony (1 constraint(s))") self.assertEqual( operation.migration_name_fragment, 'alter_pony_unique_together', ) new_state = project_state.clone() operation.state_forwards("test_alunto", new_state) self.assertEqual(len(project_state.models["test_alunto", "pony"].options.get("unique_together", set())), 0) self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1) # Make sure we can insert duplicate rows with connection.cursor() as cursor: cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alunto", editor, project_state, new_state) cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") with self.assertRaises(IntegrityError): with atomic(): cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alunto", editor, new_state, project_state) cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # Test flat unique_together operation = migrations.AlterUniqueTogether("Pony", ("pink", "weight")) operation.state_forwards("test_alunto", new_state) self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterUniqueTogether") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'unique_together': {("pink", "weight")}}) def test_alter_unique_together_remove(self): operation = migrations.AlterUniqueTogether("Pony", None) self.assertEqual(operation.describe(), "Alter unique_together for Pony (0 constraint(s))") def test_add_index(self): """ Test the AddIndex operation. """ project_state = self.set_up_test_model("test_adin") msg = ( "Indexes passed to AddIndex operations require a name argument. " "<Index: fields=['pink']> doesn't have one." ) with self.assertRaisesMessage(ValueError, msg): migrations.AddIndex("Pony", models.Index(fields=["pink"])) index = models.Index(fields=["pink"], name="test_adin_pony_pink_idx") operation = migrations.AddIndex("Pony", index) self.assertEqual(operation.describe(), "Create index test_adin_pony_pink_idx on field(s) pink of model Pony") self.assertEqual( operation.migration_name_fragment, 'pony_test_adin_pony_pink_idx', ) new_state = project_state.clone() operation.state_forwards("test_adin", new_state) # Test the database alteration self.assertEqual(len(new_state.models["test_adin", "pony"].options['indexes']), 1) self.assertIndexNotExists("test_adin_pony", ["pink"]) with connection.schema_editor() as editor: operation.database_forwards("test_adin", editor, project_state, new_state) self.assertIndexExists("test_adin_pony", ["pink"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adin", editor, new_state, project_state) self.assertIndexNotExists("test_adin_pony", ["pink"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AddIndex") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'index': index}) def test_remove_index(self): """ Test the RemoveIndex operation. """ project_state = self.set_up_test_model("test_rmin", multicol_index=True) self.assertTableExists("test_rmin_pony") self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) operation = migrations.RemoveIndex("Pony", "pony_test_idx") self.assertEqual(operation.describe(), "Remove index pony_test_idx from Pony") self.assertEqual( operation.migration_name_fragment, 'remove_pony_pony_test_idx', ) new_state = project_state.clone() operation.state_forwards("test_rmin", new_state) # Test the state alteration self.assertEqual(len(new_state.models["test_rmin", "pony"].options['indexes']), 0) self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_rmin", editor, project_state, new_state) self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rmin", editor, new_state, project_state) self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RemoveIndex") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'name': "pony_test_idx"}) # Also test a field dropped with index - sqlite remake issue operations = [ migrations.RemoveIndex("Pony", "pony_test_idx"), migrations.RemoveField("Pony", "pink"), ] self.assertColumnExists("test_rmin_pony", "pink") self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) # Test database alteration new_state = project_state.clone() self.apply_operations('test_rmin', new_state, operations=operations) self.assertColumnNotExists("test_rmin_pony", "pink") self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"]) # And test reversal self.unapply_operations("test_rmin", project_state, operations=operations) self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) def test_add_index_state_forwards(self): project_state = self.set_up_test_model('test_adinsf') index = models.Index(fields=['pink'], name='test_adinsf_pony_pink_idx') old_model = project_state.apps.get_model('test_adinsf', 'Pony') new_state = project_state.clone() operation = migrations.AddIndex('Pony', index) operation.state_forwards('test_adinsf', new_state) new_model = new_state.apps.get_model('test_adinsf', 'Pony') self.assertIsNot(old_model, new_model) def test_remove_index_state_forwards(self): project_state = self.set_up_test_model('test_rminsf') index = models.Index(fields=['pink'], name='test_rminsf_pony_pink_idx') migrations.AddIndex('Pony', index).state_forwards('test_rminsf', project_state) old_model = project_state.apps.get_model('test_rminsf', 'Pony') new_state = project_state.clone() operation = migrations.RemoveIndex('Pony', 'test_rminsf_pony_pink_idx') operation.state_forwards('test_rminsf', new_state) new_model = new_state.apps.get_model('test_rminsf', 'Pony') self.assertIsNot(old_model, new_model) @skipUnlessDBFeature('supports_expression_indexes') def test_add_func_index(self): app_label = 'test_addfuncin' index_name = f'{app_label}_pony_abs_idx' table_name = f'{app_label}_pony' project_state = self.set_up_test_model(app_label) index = models.Index(Abs('weight'), name=index_name) operation = migrations.AddIndex('Pony', index) self.assertEqual( operation.describe(), 'Create index test_addfuncin_pony_abs_idx on Abs(F(weight)) on model Pony', ) self.assertEqual( operation.migration_name_fragment, 'pony_test_addfuncin_pony_abs_idx', ) new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['indexes']), 1) self.assertIndexNameNotExists(table_name, index_name) # Add index. with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) self.assertIndexNameExists(table_name, index_name) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) self.assertIndexNameNotExists(table_name, index_name) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'AddIndex') self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': 'Pony', 'index': index}) @skipUnlessDBFeature('supports_expression_indexes') def test_remove_func_index(self): app_label = 'test_rmfuncin' index_name = f'{app_label}_pony_abs_idx' table_name = f'{app_label}_pony' project_state = self.set_up_test_model(app_label, indexes=[ models.Index(Abs('weight'), name=index_name), ]) self.assertTableExists(table_name) self.assertIndexNameExists(table_name, index_name) operation = migrations.RemoveIndex('Pony', index_name) self.assertEqual( operation.describe(), 'Remove index test_rmfuncin_pony_abs_idx from Pony', ) self.assertEqual( operation.migration_name_fragment, 'remove_pony_test_rmfuncin_pony_abs_idx', ) new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['indexes']), 0) # Remove index. with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) self.assertIndexNameNotExists(table_name, index_name) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) self.assertIndexNameExists(table_name, index_name) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'RemoveIndex') self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': 'Pony', 'name': index_name}) def test_alter_field_with_index(self): """ Test AlterField operation with an index to ensure indexes created via Meta.indexes don't get dropped with sqlite3 remake. """ project_state = self.set_up_test_model("test_alflin", index=True) operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True)) new_state = project_state.clone() operation.state_forwards("test_alflin", new_state) # Test the database alteration self.assertColumnNotNull("test_alflin_pony", "pink") with connection.schema_editor() as editor: operation.database_forwards("test_alflin", editor, project_state, new_state) # Index hasn't been dropped self.assertIndexExists("test_alflin_pony", ["pink"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alflin", editor, new_state, project_state) # Ensure the index is still there self.assertIndexExists("test_alflin_pony", ["pink"]) def test_alter_index_together(self): """ Tests the AlterIndexTogether operation. """ project_state = self.set_up_test_model("test_alinto") # Test the state alteration operation = migrations.AlterIndexTogether("Pony", [("pink", "weight")]) self.assertEqual(operation.describe(), "Alter index_together for Pony (1 constraint(s))") self.assertEqual( operation.migration_name_fragment, 'alter_pony_index_together', ) new_state = project_state.clone() operation.state_forwards("test_alinto", new_state) self.assertEqual(len(project_state.models["test_alinto", "pony"].options.get("index_together", set())), 0) self.assertEqual(len(new_state.models["test_alinto", "pony"].options.get("index_together", set())), 1) # Make sure there's no matching index self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"]) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alinto", editor, project_state, new_state) self.assertIndexExists("test_alinto_pony", ["pink", "weight"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alinto", editor, new_state, project_state) self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterIndexTogether") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'index_together': {("pink", "weight")}}) def test_alter_index_together_remove(self): operation = migrations.AlterIndexTogether("Pony", None) self.assertEqual(operation.describe(), "Alter index_together for Pony (0 constraint(s))") @skipUnlessDBFeature('allows_multiple_constraints_on_same_fields') def test_alter_index_together_remove_with_unique_together(self): app_label = 'test_alintoremove_wunto' table_name = '%s_pony' % app_label project_state = self.set_up_test_model(app_label, unique_together=True) self.assertUniqueConstraintExists(table_name, ['pink', 'weight']) # Add index together. new_state = project_state.clone() operation = migrations.AlterIndexTogether('Pony', [('pink', 'weight')]) operation.state_forwards(app_label, new_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) self.assertIndexExists(table_name, ['pink', 'weight']) # Remove index together. project_state = new_state new_state = project_state.clone() operation = migrations.AlterIndexTogether('Pony', set()) operation.state_forwards(app_label, new_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) self.assertIndexNotExists(table_name, ['pink', 'weight']) self.assertUniqueConstraintExists(table_name, ['pink', 'weight']) @skipUnlessDBFeature('supports_table_check_constraints') def test_add_constraint(self): project_state = self.set_up_test_model("test_addconstraint") gt_check = models.Q(pink__gt=2) gt_constraint = models.CheckConstraint(check=gt_check, name="test_add_constraint_pony_pink_gt_2") gt_operation = migrations.AddConstraint("Pony", gt_constraint) self.assertEqual( gt_operation.describe(), "Create constraint test_add_constraint_pony_pink_gt_2 on model Pony" ) self.assertEqual( gt_operation.migration_name_fragment, 'pony_test_add_constraint_pony_pink_gt_2', ) # Test the state alteration new_state = project_state.clone() gt_operation.state_forwards("test_addconstraint", new_state) self.assertEqual(len(new_state.models["test_addconstraint", "pony"].options["constraints"]), 1) Pony = new_state.apps.get_model("test_addconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 1) # Test the database alteration with connection.schema_editor() as editor: gt_operation.database_forwards("test_addconstraint", editor, project_state, new_state) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=1.0) # Add another one. lt_check = models.Q(pink__lt=100) lt_constraint = models.CheckConstraint(check=lt_check, name="test_add_constraint_pony_pink_lt_100") lt_operation = migrations.AddConstraint("Pony", lt_constraint) lt_operation.state_forwards("test_addconstraint", new_state) self.assertEqual(len(new_state.models["test_addconstraint", "pony"].options["constraints"]), 2) Pony = new_state.apps.get_model("test_addconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 2) with connection.schema_editor() as editor: lt_operation.database_forwards("test_addconstraint", editor, project_state, new_state) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=100, weight=1.0) # Test reversal with connection.schema_editor() as editor: gt_operation.database_backwards("test_addconstraint", editor, new_state, project_state) Pony.objects.create(pink=1, weight=1.0) # Test deconstruction definition = gt_operation.deconstruct() self.assertEqual(definition[0], "AddConstraint") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'constraint': gt_constraint}) @skipUnlessDBFeature('supports_table_check_constraints') def test_add_constraint_percent_escaping(self): app_label = 'add_constraint_string_quoting' operations = [ migrations.CreateModel( 'Author', fields=[ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=100)), ('surname', models.CharField(max_length=100, default='')), ('rebate', models.CharField(max_length=100)), ], ), ] from_state = self.apply_operations(app_label, ProjectState(), operations) # "%" generated in startswith lookup should be escaped in a way that is # considered a leading wildcard. check = models.Q(name__startswith='Albert') constraint = models.CheckConstraint(check=check, name='name_constraint') operation = migrations.AddConstraint('Author', constraint) to_state = from_state.clone() operation.state_forwards(app_label, to_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Author = to_state.apps.get_model(app_label, 'Author') with self.assertRaises(IntegrityError), transaction.atomic(): Author.objects.create(name='Artur') # Literal "%" should be escaped in a way that is not a considered a # wildcard. check = models.Q(rebate__endswith='%') constraint = models.CheckConstraint(check=check, name='rebate_constraint') operation = migrations.AddConstraint('Author', constraint) from_state = to_state to_state = from_state.clone() operation.state_forwards(app_label, to_state) Author = to_state.apps.get_model(app_label, 'Author') with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Author = to_state.apps.get_model(app_label, 'Author') with self.assertRaises(IntegrityError), transaction.atomic(): Author.objects.create(name='Albert', rebate='10$') author = Author.objects.create(name='Albert', rebate='10%') self.assertEqual(Author.objects.get(), author) # Right-hand-side baked "%" literals should not be used for parameters # interpolation. check = ~models.Q(surname__startswith=models.F('name')) constraint = models.CheckConstraint(check=check, name='name_constraint_rhs') operation = migrations.AddConstraint('Author', constraint) from_state = to_state to_state = from_state.clone() operation.state_forwards(app_label, to_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Author = to_state.apps.get_model(app_label, 'Author') with self.assertRaises(IntegrityError), transaction.atomic(): Author.objects.create(name='Albert', surname='Alberto') @skipUnlessDBFeature('supports_table_check_constraints') def test_add_or_constraint(self): app_label = 'test_addorconstraint' constraint_name = 'add_constraint_or' from_state = self.set_up_test_model(app_label) check = models.Q(pink__gt=2, weight__gt=2) | models.Q(weight__lt=0) constraint = models.CheckConstraint(check=check, name=constraint_name) operation = migrations.AddConstraint('Pony', constraint) to_state = from_state.clone() operation.state_forwards(app_label, to_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Pony = to_state.apps.get_model(app_label, 'Pony') with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=2, weight=3.0) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=3, weight=1.0) Pony.objects.bulk_create([ Pony(pink=3, weight=-1.0), Pony(pink=1, weight=-1.0), Pony(pink=3, weight=3.0), ]) @skipUnlessDBFeature('supports_table_check_constraints') def test_add_constraint_combinable(self): app_label = 'test_addconstraint_combinable' operations = [ migrations.CreateModel( 'Book', fields=[ ('id', models.AutoField(primary_key=True)), ('read', models.PositiveIntegerField()), ('unread', models.PositiveIntegerField()), ], ), ] from_state = self.apply_operations(app_label, ProjectState(), operations) constraint = models.CheckConstraint( check=models.Q(read=(100 - models.F('unread'))), name='test_addconstraint_combinable_sum_100', ) operation = migrations.AddConstraint('Book', constraint) to_state = from_state.clone() operation.state_forwards(app_label, to_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Book = to_state.apps.get_model(app_label, 'Book') with self.assertRaises(IntegrityError), transaction.atomic(): Book.objects.create(read=70, unread=10) Book.objects.create(read=70, unread=30) @skipUnlessDBFeature('supports_table_check_constraints') def test_remove_constraint(self): project_state = self.set_up_test_model("test_removeconstraint", constraints=[ models.CheckConstraint(check=models.Q(pink__gt=2), name="test_remove_constraint_pony_pink_gt_2"), models.CheckConstraint(check=models.Q(pink__lt=100), name="test_remove_constraint_pony_pink_lt_100"), ]) gt_operation = migrations.RemoveConstraint("Pony", "test_remove_constraint_pony_pink_gt_2") self.assertEqual( gt_operation.describe(), "Remove constraint test_remove_constraint_pony_pink_gt_2 from model Pony" ) self.assertEqual( gt_operation.migration_name_fragment, 'remove_pony_test_remove_constraint_pony_pink_gt_2', ) # Test state alteration new_state = project_state.clone() gt_operation.state_forwards("test_removeconstraint", new_state) self.assertEqual(len(new_state.models["test_removeconstraint", "pony"].options['constraints']), 1) Pony = new_state.apps.get_model("test_removeconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 1) # Test database alteration with connection.schema_editor() as editor: gt_operation.database_forwards("test_removeconstraint", editor, project_state, new_state) Pony.objects.create(pink=1, weight=1.0).delete() with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=100, weight=1.0) # Remove the other one. lt_operation = migrations.RemoveConstraint("Pony", "test_remove_constraint_pony_pink_lt_100") lt_operation.state_forwards("test_removeconstraint", new_state) self.assertEqual(len(new_state.models["test_removeconstraint", "pony"].options['constraints']), 0) Pony = new_state.apps.get_model("test_removeconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 0) with connection.schema_editor() as editor: lt_operation.database_forwards("test_removeconstraint", editor, project_state, new_state) Pony.objects.create(pink=100, weight=1.0).delete() # Test reversal with connection.schema_editor() as editor: gt_operation.database_backwards("test_removeconstraint", editor, new_state, project_state) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=1.0) # Test deconstruction definition = gt_operation.deconstruct() self.assertEqual(definition[0], "RemoveConstraint") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'name': "test_remove_constraint_pony_pink_gt_2"}) def test_add_partial_unique_constraint(self): project_state = self.set_up_test_model('test_addpartialuniqueconstraint') partial_unique_constraint = models.UniqueConstraint( fields=['pink'], condition=models.Q(weight__gt=5), name='test_constraint_pony_pink_for_weight_gt_5_uniq', ) operation = migrations.AddConstraint('Pony', partial_unique_constraint) self.assertEqual( operation.describe(), 'Create constraint test_constraint_pony_pink_for_weight_gt_5_uniq ' 'on model Pony' ) # Test the state alteration new_state = project_state.clone() operation.state_forwards('test_addpartialuniqueconstraint', new_state) self.assertEqual(len(new_state.models['test_addpartialuniqueconstraint', 'pony'].options['constraints']), 1) Pony = new_state.apps.get_model('test_addpartialuniqueconstraint', 'Pony') self.assertEqual(len(Pony._meta.constraints), 1) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards('test_addpartialuniqueconstraint', editor, project_state, new_state) # Test constraint works Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=6.0) if connection.features.supports_partial_indexes: with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=7.0) else: Pony.objects.create(pink=1, weight=7.0) # Test reversal with connection.schema_editor() as editor: operation.database_backwards('test_addpartialuniqueconstraint', editor, new_state, project_state) # Test constraint doesn't work Pony.objects.create(pink=1, weight=7.0) # Test deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], 'AddConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': 'Pony', 'constraint': partial_unique_constraint}) def test_remove_partial_unique_constraint(self): project_state = self.set_up_test_model('test_removepartialuniqueconstraint', constraints=[ models.UniqueConstraint( fields=['pink'], condition=models.Q(weight__gt=5), name='test_constraint_pony_pink_for_weight_gt_5_uniq', ), ]) gt_operation = migrations.RemoveConstraint('Pony', 'test_constraint_pony_pink_for_weight_gt_5_uniq') self.assertEqual( gt_operation.describe(), 'Remove constraint test_constraint_pony_pink_for_weight_gt_5_uniq from model Pony' ) # Test state alteration new_state = project_state.clone() gt_operation.state_forwards('test_removepartialuniqueconstraint', new_state) self.assertEqual(len(new_state.models['test_removepartialuniqueconstraint', 'pony'].options['constraints']), 0) Pony = new_state.apps.get_model('test_removepartialuniqueconstraint', 'Pony') self.assertEqual(len(Pony._meta.constraints), 0) # Test database alteration with connection.schema_editor() as editor: gt_operation.database_forwards('test_removepartialuniqueconstraint', editor, project_state, new_state) # Test constraint doesn't work Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=6.0) Pony.objects.create(pink=1, weight=7.0).delete() # Test reversal with connection.schema_editor() as editor: gt_operation.database_backwards('test_removepartialuniqueconstraint', editor, new_state, project_state) # Test constraint works if connection.features.supports_partial_indexes: with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=7.0) else: Pony.objects.create(pink=1, weight=7.0) # Test deconstruction definition = gt_operation.deconstruct() self.assertEqual(definition[0], 'RemoveConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], { 'model_name': 'Pony', 'name': 'test_constraint_pony_pink_for_weight_gt_5_uniq', }) def test_add_deferred_unique_constraint(self): app_label = 'test_adddeferred_uc' project_state = self.set_up_test_model(app_label) deferred_unique_constraint = models.UniqueConstraint( fields=['pink'], name='deferred_pink_constraint_add', deferrable=models.Deferrable.DEFERRED, ) operation = migrations.AddConstraint('Pony', deferred_unique_constraint) self.assertEqual( operation.describe(), 'Create constraint deferred_pink_constraint_add on model Pony', ) # Add constraint. new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 1) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 1) with connection.schema_editor() as editor, CaptureQueriesContext(connection) as ctx: operation.database_forwards(app_label, editor, project_state, new_state) Pony.objects.create(pink=1, weight=4.0) if connection.features.supports_deferrable_unique_constraints: # Unique constraint is deferred. with transaction.atomic(): obj = Pony.objects.create(pink=1, weight=4.0) obj.pink = 2 obj.save() # Constraint behavior can be changed with SET CONSTRAINTS. with self.assertRaises(IntegrityError): with transaction.atomic(), connection.cursor() as cursor: quoted_name = connection.ops.quote_name(deferred_unique_constraint.name) cursor.execute('SET CONSTRAINTS %s IMMEDIATE' % quoted_name) obj = Pony.objects.create(pink=1, weight=4.0) obj.pink = 3 obj.save() else: self.assertEqual(len(ctx), 0) Pony.objects.create(pink=1, weight=4.0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'AddConstraint') self.assertEqual(definition[1], []) self.assertEqual( definition[2], {'model_name': 'Pony', 'constraint': deferred_unique_constraint}, ) def test_remove_deferred_unique_constraint(self): app_label = 'test_removedeferred_uc' deferred_unique_constraint = models.UniqueConstraint( fields=['pink'], name='deferred_pink_constraint_rm', deferrable=models.Deferrable.DEFERRED, ) project_state = self.set_up_test_model(app_label, constraints=[deferred_unique_constraint]) operation = migrations.RemoveConstraint('Pony', deferred_unique_constraint.name) self.assertEqual( operation.describe(), 'Remove constraint deferred_pink_constraint_rm from model Pony', ) # Remove constraint. new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 0) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 0) with connection.schema_editor() as editor, CaptureQueriesContext(connection) as ctx: operation.database_forwards(app_label, editor, project_state, new_state) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0).delete() if not connection.features.supports_deferrable_unique_constraints: self.assertEqual(len(ctx), 0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) if connection.features.supports_deferrable_unique_constraints: # Unique constraint is deferred. with transaction.atomic(): obj = Pony.objects.create(pink=1, weight=4.0) obj.pink = 2 obj.save() # Constraint behavior can be changed with SET CONSTRAINTS. with self.assertRaises(IntegrityError): with transaction.atomic(), connection.cursor() as cursor: quoted_name = connection.ops.quote_name(deferred_unique_constraint.name) cursor.execute('SET CONSTRAINTS %s IMMEDIATE' % quoted_name) obj = Pony.objects.create(pink=1, weight=4.0) obj.pink = 3 obj.save() else: Pony.objects.create(pink=1, weight=4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'RemoveConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], { 'model_name': 'Pony', 'name': 'deferred_pink_constraint_rm', }) def test_add_covering_unique_constraint(self): app_label = 'test_addcovering_uc' project_state = self.set_up_test_model(app_label) covering_unique_constraint = models.UniqueConstraint( fields=['pink'], name='covering_pink_constraint_add', include=['weight'], ) operation = migrations.AddConstraint('Pony', covering_unique_constraint) self.assertEqual( operation.describe(), 'Create constraint covering_pink_constraint_add on model Pony', ) # Add constraint. new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 1) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 1) with connection.schema_editor() as editor, CaptureQueriesContext(connection) as ctx: operation.database_forwards(app_label, editor, project_state, new_state) Pony.objects.create(pink=1, weight=4.0) if connection.features.supports_covering_indexes: with self.assertRaises(IntegrityError): Pony.objects.create(pink=1, weight=4.0) else: self.assertEqual(len(ctx), 0) Pony.objects.create(pink=1, weight=4.0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'AddConstraint') self.assertEqual(definition[1], []) self.assertEqual( definition[2], {'model_name': 'Pony', 'constraint': covering_unique_constraint}, ) def test_remove_covering_unique_constraint(self): app_label = 'test_removecovering_uc' covering_unique_constraint = models.UniqueConstraint( fields=['pink'], name='covering_pink_constraint_rm', include=['weight'], ) project_state = self.set_up_test_model(app_label, constraints=[covering_unique_constraint]) operation = migrations.RemoveConstraint('Pony', covering_unique_constraint.name) self.assertEqual( operation.describe(), 'Remove constraint covering_pink_constraint_rm from model Pony', ) # Remove constraint. new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 0) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 0) with connection.schema_editor() as editor, CaptureQueriesContext(connection) as ctx: operation.database_forwards(app_label, editor, project_state, new_state) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0).delete() if not connection.features.supports_covering_indexes: self.assertEqual(len(ctx), 0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) if connection.features.supports_covering_indexes: with self.assertRaises(IntegrityError): Pony.objects.create(pink=1, weight=4.0) else: Pony.objects.create(pink=1, weight=4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'RemoveConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], { 'model_name': 'Pony', 'name': 'covering_pink_constraint_rm', }) def test_add_func_unique_constraint(self): app_label = 'test_adfuncuc' constraint_name = f'{app_label}_pony_abs_uq' table_name = f'{app_label}_pony' project_state = self.set_up_test_model(app_label) constraint = models.UniqueConstraint(Abs('weight'), name=constraint_name) operation = migrations.AddConstraint('Pony', constraint) self.assertEqual( operation.describe(), 'Create constraint test_adfuncuc_pony_abs_uq on model Pony', ) self.assertEqual( operation.migration_name_fragment, 'pony_test_adfuncuc_pony_abs_uq', ) new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 1) self.assertIndexNameNotExists(table_name, constraint_name) # Add constraint. with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) Pony = new_state.apps.get_model(app_label, 'Pony') Pony.objects.create(weight=4.0) if connection.features.supports_expression_indexes: self.assertIndexNameExists(table_name, constraint_name) with self.assertRaises(IntegrityError): Pony.objects.create(weight=-4.0) else: self.assertIndexNameNotExists(table_name, constraint_name) Pony.objects.create(weight=-4.0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) self.assertIndexNameNotExists(table_name, constraint_name) # Constraint doesn't work. Pony.objects.create(weight=-4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'AddConstraint') self.assertEqual(definition[1], []) self.assertEqual( definition[2], {'model_name': 'Pony', 'constraint': constraint}, ) def test_remove_func_unique_constraint(self): app_label = 'test_rmfuncuc' constraint_name = f'{app_label}_pony_abs_uq' table_name = f'{app_label}_pony' project_state = self.set_up_test_model(app_label, constraints=[ models.UniqueConstraint(Abs('weight'), name=constraint_name), ]) self.assertTableExists(table_name) if connection.features.supports_expression_indexes: self.assertIndexNameExists(table_name, constraint_name) operation = migrations.RemoveConstraint('Pony', constraint_name) self.assertEqual( operation.describe(), 'Remove constraint test_rmfuncuc_pony_abs_uq from model Pony', ) self.assertEqual( operation.migration_name_fragment, 'remove_pony_test_rmfuncuc_pony_abs_uq', ) new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 0) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 0) # Remove constraint. with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) self.assertIndexNameNotExists(table_name, constraint_name) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=-4.0).delete() # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) if connection.features.supports_expression_indexes: self.assertIndexNameExists(table_name, constraint_name) with self.assertRaises(IntegrityError): Pony.objects.create(weight=-4.0) else: self.assertIndexNameNotExists(table_name, constraint_name) Pony.objects.create(weight=-4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'RemoveConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': 'Pony', 'name': constraint_name}) def test_alter_model_options(self): """ Tests the AlterModelOptions operation. """ project_state = self.set_up_test_model("test_almoop") # Test the state alteration (no DB alteration to test) operation = migrations.AlterModelOptions("Pony", {"permissions": [("can_groom", "Can groom")]}) self.assertEqual(operation.describe(), "Change Meta options on Pony") self.assertEqual(operation.migration_name_fragment, 'alter_pony_options') new_state = project_state.clone() operation.state_forwards("test_almoop", new_state) self.assertEqual(len(project_state.models["test_almoop", "pony"].options.get("permissions", [])), 0) self.assertEqual(len(new_state.models["test_almoop", "pony"].options.get("permissions", [])), 1) self.assertEqual(new_state.models["test_almoop", "pony"].options["permissions"][0][0], "can_groom") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterModelOptions") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'options': {"permissions": [("can_groom", "Can groom")]}}) def test_alter_model_options_emptying(self): """ The AlterModelOptions operation removes keys from the dict (#23121) """ project_state = self.set_up_test_model("test_almoop", options=True) # Test the state alteration (no DB alteration to test) operation = migrations.AlterModelOptions("Pony", {}) self.assertEqual(operation.describe(), "Change Meta options on Pony") new_state = project_state.clone() operation.state_forwards("test_almoop", new_state) self.assertEqual(len(project_state.models["test_almoop", "pony"].options.get("permissions", [])), 1) self.assertEqual(len(new_state.models["test_almoop", "pony"].options.get("permissions", [])), 0) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterModelOptions") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'options': {}}) def test_alter_order_with_respect_to(self): """ Tests the AlterOrderWithRespectTo operation. """ project_state = self.set_up_test_model("test_alorwrtto", related_model=True) # Test the state alteration operation = migrations.AlterOrderWithRespectTo("Rider", "pony") self.assertEqual(operation.describe(), "Set order_with_respect_to on Rider to pony") self.assertEqual( operation.migration_name_fragment, 'alter_rider_order_with_respect_to', ) new_state = project_state.clone() operation.state_forwards("test_alorwrtto", new_state) self.assertIsNone( project_state.models["test_alorwrtto", "rider"].options.get("order_with_respect_to", None) ) self.assertEqual( new_state.models["test_alorwrtto", "rider"].options.get("order_with_respect_to", None), "pony" ) # Make sure there's no matching index self.assertColumnNotExists("test_alorwrtto_rider", "_order") # Create some rows before alteration rendered_state = project_state.apps pony = rendered_state.get_model("test_alorwrtto", "Pony").objects.create(weight=50) rider1 = rendered_state.get_model("test_alorwrtto", "Rider").objects.create(pony=pony) rider1.friend = rider1 rider1.save() rider2 = rendered_state.get_model("test_alorwrtto", "Rider").objects.create(pony=pony) rider2.friend = rider2 rider2.save() # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alorwrtto", editor, project_state, new_state) self.assertColumnExists("test_alorwrtto_rider", "_order") # Check for correct value in rows updated_riders = new_state.apps.get_model("test_alorwrtto", "Rider").objects.all() self.assertEqual(updated_riders[0]._order, 0) self.assertEqual(updated_riders[1]._order, 0) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alorwrtto", editor, new_state, project_state) self.assertColumnNotExists("test_alorwrtto_rider", "_order") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterOrderWithRespectTo") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Rider", 'order_with_respect_to': "pony"}) def test_alter_model_managers(self): """ The managers on a model are set. """ project_state = self.set_up_test_model("test_almoma") # Test the state alteration operation = migrations.AlterModelManagers( "Pony", managers=[ ("food_qs", FoodQuerySet.as_manager()), ("food_mgr", FoodManager("a", "b")), ("food_mgr_kwargs", FoodManager("x", "y", 3, 4)), ] ) self.assertEqual(operation.describe(), "Change managers on Pony") self.assertEqual(operation.migration_name_fragment, 'alter_pony_managers') managers = project_state.models["test_almoma", "pony"].managers self.assertEqual(managers, []) new_state = project_state.clone() operation.state_forwards("test_almoma", new_state) self.assertIn(("test_almoma", "pony"), new_state.models) managers = new_state.models["test_almoma", "pony"].managers self.assertEqual(managers[0][0], "food_qs") self.assertIsInstance(managers[0][1], models.Manager) self.assertEqual(managers[1][0], "food_mgr") self.assertIsInstance(managers[1][1], FoodManager) self.assertEqual(managers[1][1].args, ("a", "b", 1, 2)) self.assertEqual(managers[2][0], "food_mgr_kwargs") self.assertIsInstance(managers[2][1], FoodManager) self.assertEqual(managers[2][1].args, ("x", "y", 3, 4)) rendered_state = new_state.apps model = rendered_state.get_model('test_almoma', 'pony') self.assertIsInstance(model.food_qs, models.Manager) self.assertIsInstance(model.food_mgr, FoodManager) self.assertIsInstance(model.food_mgr_kwargs, FoodManager) def test_alter_model_managers_emptying(self): """ The managers on a model are set. """ project_state = self.set_up_test_model("test_almomae", manager_model=True) # Test the state alteration operation = migrations.AlterModelManagers("Food", managers=[]) self.assertEqual(operation.describe(), "Change managers on Food") self.assertIn(("test_almomae", "food"), project_state.models) managers = project_state.models["test_almomae", "food"].managers self.assertEqual(managers[0][0], "food_qs") self.assertIsInstance(managers[0][1], models.Manager) self.assertEqual(managers[1][0], "food_mgr") self.assertIsInstance(managers[1][1], FoodManager) self.assertEqual(managers[1][1].args, ("a", "b", 1, 2)) self.assertEqual(managers[2][0], "food_mgr_kwargs") self.assertIsInstance(managers[2][1], FoodManager) self.assertEqual(managers[2][1].args, ("x", "y", 3, 4)) new_state = project_state.clone() operation.state_forwards("test_almomae", new_state) managers = new_state.models["test_almomae", "food"].managers self.assertEqual(managers, []) def test_alter_fk(self): """ Creating and then altering an FK works correctly and deals with the pending SQL (#23091) """ project_state = self.set_up_test_model("test_alfk") # Test adding and then altering the FK in one go create_operation = migrations.CreateModel( name="Rider", fields=[ ("id", models.AutoField(primary_key=True)), ("pony", models.ForeignKey("Pony", models.CASCADE)), ], ) create_state = project_state.clone() create_operation.state_forwards("test_alfk", create_state) alter_operation = migrations.AlterField( model_name='Rider', name='pony', field=models.ForeignKey("Pony", models.CASCADE, editable=False), ) alter_state = create_state.clone() alter_operation.state_forwards("test_alfk", alter_state) with connection.schema_editor() as editor: create_operation.database_forwards("test_alfk", editor, project_state, create_state) alter_operation.database_forwards("test_alfk", editor, create_state, alter_state) def test_alter_fk_non_fk(self): """ Altering an FK to a non-FK works (#23244) """ # Test the state alteration operation = migrations.AlterField( model_name="Rider", name="pony", field=models.FloatField(), ) project_state, new_state = self.make_test_state("test_afknfk", operation, related_model=True) # Test the database alteration self.assertColumnExists("test_afknfk_rider", "pony_id") self.assertColumnNotExists("test_afknfk_rider", "pony") with connection.schema_editor() as editor: operation.database_forwards("test_afknfk", editor, project_state, new_state) self.assertColumnExists("test_afknfk_rider", "pony") self.assertColumnNotExists("test_afknfk_rider", "pony_id") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_afknfk", editor, new_state, project_state) self.assertColumnExists("test_afknfk_rider", "pony_id") self.assertColumnNotExists("test_afknfk_rider", "pony") def test_run_sql(self): """ Tests the RunSQL operation. """ project_state = self.set_up_test_model("test_runsql") # Create the operation operation = migrations.RunSQL( # Use a multi-line string with a comment to test splitting on SQLite and MySQL respectively "CREATE TABLE i_love_ponies (id int, special_thing varchar(15));\n" "INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'i love ponies'); -- this is magic!\n" "INSERT INTO i_love_ponies (id, special_thing) VALUES (2, 'i love django');\n" "UPDATE i_love_ponies SET special_thing = 'Ponies' WHERE special_thing LIKE '%%ponies';" "UPDATE i_love_ponies SET special_thing = 'Django' WHERE special_thing LIKE '%django';", # Run delete queries to test for parameter substitution failure # reported in #23426 "DELETE FROM i_love_ponies WHERE special_thing LIKE '%Django%';" "DELETE FROM i_love_ponies WHERE special_thing LIKE '%%Ponies%%';" "DROP TABLE i_love_ponies", state_operations=[migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))])], ) self.assertEqual(operation.describe(), "Raw SQL operation") # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_runsql", new_state) self.assertEqual(len(new_state.models["test_runsql", "somethingelse"].fields), 1) # Make sure there's no table self.assertTableNotExists("i_love_ponies") # Test SQL collection with connection.schema_editor(collect_sql=True) as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) self.assertIn("LIKE '%%ponies';", "\n".join(editor.collected_sql)) operation.database_backwards("test_runsql", editor, project_state, new_state) self.assertIn("LIKE '%%Ponies%%';", "\n".join(editor.collected_sql)) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) self.assertTableExists("i_love_ponies") # Make sure all the SQL was processed with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM i_love_ponies") self.assertEqual(cursor.fetchall()[0][0], 2) cursor.execute("SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Django'") self.assertEqual(cursor.fetchall()[0][0], 1) cursor.execute("SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Ponies'") self.assertEqual(cursor.fetchall()[0][0], 1) # And test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_runsql", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RunSQL") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["reverse_sql", "sql", "state_operations"]) # And elidable reduction self.assertIs(False, operation.reduce(operation, [])) elidable_operation = migrations.RunSQL('SELECT 1 FROM void;', elidable=True) self.assertEqual(elidable_operation.reduce(operation, []), [operation]) def test_run_sql_params(self): """ #23426 - RunSQL should accept parameters. """ project_state = self.set_up_test_model("test_runsql") # Create the operation operation = migrations.RunSQL( ["CREATE TABLE i_love_ponies (id int, special_thing varchar(15));"], ["DROP TABLE i_love_ponies"], ) param_operation = migrations.RunSQL( # forwards ( "INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'Django');", ["INSERT INTO i_love_ponies (id, special_thing) VALUES (2, %s);", ['Ponies']], ("INSERT INTO i_love_ponies (id, special_thing) VALUES (%s, %s);", (3, 'Python',)), ), # backwards [ "DELETE FROM i_love_ponies WHERE special_thing = 'Django';", ["DELETE FROM i_love_ponies WHERE special_thing = 'Ponies';", None], ("DELETE FROM i_love_ponies WHERE id = %s OR special_thing = %s;", [3, 'Python']), ] ) # Make sure there's no table self.assertTableNotExists("i_love_ponies") new_state = project_state.clone() # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) # Test parameter passing with connection.schema_editor() as editor: param_operation.database_forwards("test_runsql", editor, project_state, new_state) # Make sure all the SQL was processed with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM i_love_ponies") self.assertEqual(cursor.fetchall()[0][0], 3) with connection.schema_editor() as editor: param_operation.database_backwards("test_runsql", editor, new_state, project_state) with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM i_love_ponies") self.assertEqual(cursor.fetchall()[0][0], 0) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_runsql", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies") def test_run_sql_params_invalid(self): """ #23426 - RunSQL should fail when a list of statements with an incorrect number of tuples is given. """ project_state = self.set_up_test_model("test_runsql") new_state = project_state.clone() operation = migrations.RunSQL( # forwards [ ["INSERT INTO foo (bar) VALUES ('buz');"] ], # backwards ( ("DELETE FROM foo WHERE bar = 'buz';", 'invalid', 'parameter count'), ), ) with connection.schema_editor() as editor: with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 1"): operation.database_forwards("test_runsql", editor, project_state, new_state) with connection.schema_editor() as editor: with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 3"): operation.database_backwards("test_runsql", editor, new_state, project_state) def test_run_sql_noop(self): """ #24098 - Tests no-op RunSQL operations. """ operation = migrations.RunSQL(migrations.RunSQL.noop, migrations.RunSQL.noop) with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, None, None) operation.database_backwards("test_runsql", editor, None, None) def test_run_sql_add_missing_semicolon_on_collect_sql(self): project_state = self.set_up_test_model('test_runsql') new_state = project_state.clone() tests = [ 'INSERT INTO test_runsql_pony (pink, weight) VALUES (1, 1);\n', 'INSERT INTO test_runsql_pony (pink, weight) VALUES (1, 1)\n', ] for sql in tests: with self.subTest(sql=sql): operation = migrations.RunSQL(sql, migrations.RunPython.noop) with connection.schema_editor(collect_sql=True) as editor: operation.database_forwards('test_runsql', editor, project_state, new_state) collected_sql = '\n'.join(editor.collected_sql) self.assertEqual(collected_sql.count(';'), 1) def test_run_python(self): """ Tests the RunPython operation """ project_state = self.set_up_test_model("test_runpython", mti_model=True) # Create the operation def inner_method(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") Pony.objects.create(pink=1, weight=3.55) Pony.objects.create(weight=5) def inner_method_reverse(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") Pony.objects.filter(pink=1, weight=3.55).delete() Pony.objects.filter(weight=5).delete() operation = migrations.RunPython(inner_method, reverse_code=inner_method_reverse) self.assertEqual(operation.describe(), "Raw Python operation") # Test the state alteration does nothing new_state = project_state.clone() operation.state_forwards("test_runpython", new_state) self.assertEqual(new_state, project_state) # Test the database alteration self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2) # Now test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0) # Now test we can't use a string with self.assertRaisesMessage(ValueError, 'RunPython must be supplied with a callable'): migrations.RunPython("print 'ahahaha'") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RunPython") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["code", "reverse_code"]) # Also test reversal fails, with an operation identical to above but without reverse_code set no_reverse_operation = migrations.RunPython(inner_method) self.assertFalse(no_reverse_operation.reversible) with connection.schema_editor() as editor: no_reverse_operation.database_forwards("test_runpython", editor, project_state, new_state) with self.assertRaises(NotImplementedError): no_reverse_operation.database_backwards("test_runpython", editor, new_state, project_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2) def create_ponies(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") pony1 = Pony.objects.create(pink=1, weight=3.55) self.assertIsNot(pony1.pk, None) pony2 = Pony.objects.create(weight=5) self.assertIsNot(pony2.pk, None) self.assertNotEqual(pony1.pk, pony2.pk) operation = migrations.RunPython(create_ponies) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 4) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RunPython") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["code"]) def create_shetlandponies(models, schema_editor): ShetlandPony = models.get_model("test_runpython", "ShetlandPony") pony1 = ShetlandPony.objects.create(weight=4.0) self.assertIsNot(pony1.pk, None) pony2 = ShetlandPony.objects.create(weight=5.0) self.assertIsNot(pony2.pk, None) self.assertNotEqual(pony1.pk, pony2.pk) operation = migrations.RunPython(create_shetlandponies) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 6) self.assertEqual(project_state.apps.get_model("test_runpython", "ShetlandPony").objects.count(), 2) # And elidable reduction self.assertIs(False, operation.reduce(operation, [])) elidable_operation = migrations.RunPython(inner_method, elidable=True) self.assertEqual(elidable_operation.reduce(operation, []), [operation]) def test_run_python_atomic(self): """ Tests the RunPython operation correctly handles the "atomic" keyword """ project_state = self.set_up_test_model("test_runpythonatomic", mti_model=True) def inner_method(models, schema_editor): Pony = models.get_model("test_runpythonatomic", "Pony") Pony.objects.create(pink=1, weight=3.55) raise ValueError("Adrian hates ponies.") # Verify atomicity when applying. atomic_migration = Migration("test", "test_runpythonatomic") atomic_migration.operations = [migrations.RunPython(inner_method, reverse_code=inner_method)] non_atomic_migration = Migration("test", "test_runpythonatomic") non_atomic_migration.operations = [migrations.RunPython(inner_method, reverse_code=inner_method, atomic=False)] # If we're a fully-transactional database, both versions should rollback if connection.features.can_rollback_ddl: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) # Otherwise, the non-atomic operation should leave a row there else: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 1) # Reset object count to zero and verify atomicity when unapplying. project_state.apps.get_model("test_runpythonatomic", "Pony").objects.all().delete() # On a fully-transactional database, both versions rollback. if connection.features.can_rollback_ddl: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) # Otherwise, the non-atomic operation leaves a row there. else: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 1) # Verify deconstruction. definition = non_atomic_migration.operations[0].deconstruct() self.assertEqual(definition[0], "RunPython") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["atomic", "code", "reverse_code"]) def test_run_python_related_assignment(self): """ #24282 - Model changes to a FK reverse side update the model on the FK side as well. """ def inner_method(models, schema_editor): Author = models.get_model("test_authors", "Author") Book = models.get_model("test_books", "Book") author = Author.objects.create(name="Hemingway") Book.objects.create(title="Old Man and The Sea", author=author) create_author = migrations.CreateModel( "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ], options={}, ) create_book = migrations.CreateModel( "Book", [ ("id", models.AutoField(primary_key=True)), ("title", models.CharField(max_length=100)), ("author", models.ForeignKey("test_authors.Author", models.CASCADE)) ], options={}, ) add_hometown = migrations.AddField( "Author", "hometown", models.CharField(max_length=100), ) create_old_man = migrations.RunPython(inner_method, inner_method) project_state = ProjectState() new_state = project_state.clone() with connection.schema_editor() as editor: create_author.state_forwards("test_authors", new_state) create_author.database_forwards("test_authors", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_book.state_forwards("test_books", new_state) create_book.database_forwards("test_books", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: add_hometown.state_forwards("test_authors", new_state) add_hometown.database_forwards("test_authors", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_old_man.state_forwards("test_books", new_state) create_old_man.database_forwards("test_books", editor, project_state, new_state) def test_model_with_bigautofield(self): """ A model with BigAutoField can be created. """ def create_data(models, schema_editor): Author = models.get_model("test_author", "Author") Book = models.get_model("test_book", "Book") author1 = Author.objects.create(name="Hemingway") Book.objects.create(title="Old Man and The Sea", author=author1) Book.objects.create(id=2 ** 33, title="A farewell to arms", author=author1) author2 = Author.objects.create(id=2 ** 33, name="Remarque") Book.objects.create(title="All quiet on the western front", author=author2) Book.objects.create(title="Arc de Triomphe", author=author2) create_author = migrations.CreateModel( "Author", [ ("id", models.BigAutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ], options={}, ) create_book = migrations.CreateModel( "Book", [ ("id", models.BigAutoField(primary_key=True)), ("title", models.CharField(max_length=100)), ("author", models.ForeignKey(to="test_author.Author", on_delete=models.CASCADE)) ], options={}, ) fill_data = migrations.RunPython(create_data) project_state = ProjectState() new_state = project_state.clone() with connection.schema_editor() as editor: create_author.state_forwards("test_author", new_state) create_author.database_forwards("test_author", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_book.state_forwards("test_book", new_state) create_book.database_forwards("test_book", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: fill_data.state_forwards("fill_data", new_state) fill_data.database_forwards("fill_data", editor, project_state, new_state) def _test_autofield_foreignfield_growth(self, source_field, target_field, target_value): """ A field may be migrated in the following ways: - AutoField to BigAutoField - SmallAutoField to AutoField - SmallAutoField to BigAutoField """ def create_initial_data(models, schema_editor): Article = models.get_model("test_article", "Article") Blog = models.get_model("test_blog", "Blog") blog = Blog.objects.create(name="web development done right") Article.objects.create(name="Frameworks", blog=blog) Article.objects.create(name="Programming Languages", blog=blog) def create_big_data(models, schema_editor): Article = models.get_model("test_article", "Article") Blog = models.get_model("test_blog", "Blog") blog2 = Blog.objects.create(name="Frameworks", id=target_value) Article.objects.create(name="Django", blog=blog2) Article.objects.create(id=target_value, name="Django2", blog=blog2) create_blog = migrations.CreateModel( "Blog", [ ("id", source_field(primary_key=True)), ("name", models.CharField(max_length=100)), ], options={}, ) create_article = migrations.CreateModel( "Article", [ ("id", source_field(primary_key=True)), ("blog", models.ForeignKey(to="test_blog.Blog", on_delete=models.CASCADE)), ("name", models.CharField(max_length=100)), ("data", models.TextField(default="")), ], options={}, ) fill_initial_data = migrations.RunPython(create_initial_data, create_initial_data) fill_big_data = migrations.RunPython(create_big_data, create_big_data) grow_article_id = migrations.AlterField('Article', 'id', target_field(primary_key=True)) grow_blog_id = migrations.AlterField('Blog', 'id', target_field(primary_key=True)) project_state = ProjectState() new_state = project_state.clone() with connection.schema_editor() as editor: create_blog.state_forwards("test_blog", new_state) create_blog.database_forwards("test_blog", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_article.state_forwards("test_article", new_state) create_article.database_forwards("test_article", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: fill_initial_data.state_forwards("fill_initial_data", new_state) fill_initial_data.database_forwards("fill_initial_data", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: grow_article_id.state_forwards("test_article", new_state) grow_article_id.database_forwards("test_article", editor, project_state, new_state) state = new_state.clone() article = state.apps.get_model("test_article.Article") self.assertIsInstance(article._meta.pk, target_field) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: grow_blog_id.state_forwards("test_blog", new_state) grow_blog_id.database_forwards("test_blog", editor, project_state, new_state) state = new_state.clone() blog = state.apps.get_model("test_blog.Blog") self.assertIsInstance(blog._meta.pk, target_field) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: fill_big_data.state_forwards("fill_big_data", new_state) fill_big_data.database_forwards("fill_big_data", editor, project_state, new_state) def test_autofield__bigautofield_foreignfield_growth(self): """A field may be migrated from AutoField to BigAutoField.""" self._test_autofield_foreignfield_growth( models.AutoField, models.BigAutoField, 2 ** 33, ) def test_smallfield_autofield_foreignfield_growth(self): """A field may be migrated from SmallAutoField to AutoField.""" self._test_autofield_foreignfield_growth( models.SmallAutoField, models.AutoField, 2 ** 22, ) def test_smallfield_bigautofield_foreignfield_growth(self): """A field may be migrated from SmallAutoField to BigAutoField.""" self._test_autofield_foreignfield_growth( models.SmallAutoField, models.BigAutoField, 2 ** 33, ) def test_run_python_noop(self): """ #24098 - Tests no-op RunPython operations. """ project_state = ProjectState() new_state = project_state.clone() operation = migrations.RunPython(migrations.RunPython.noop, migrations.RunPython.noop) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) operation.database_backwards("test_runpython", editor, new_state, project_state) def test_separate_database_and_state(self): """ Tests the SeparateDatabaseAndState operation. """ project_state = self.set_up_test_model("test_separatedatabaseandstate") # Create the operation database_operation = migrations.RunSQL( "CREATE TABLE i_love_ponies (id int, special_thing int);", "DROP TABLE i_love_ponies;" ) state_operation = migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))]) operation = migrations.SeparateDatabaseAndState( state_operations=[state_operation], database_operations=[database_operation] ) self.assertEqual(operation.describe(), "Custom state/database change combination") # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_separatedatabaseandstate", new_state) self.assertEqual(len(new_state.models["test_separatedatabaseandstate", "somethingelse"].fields), 1) # Make sure there's no table self.assertTableNotExists("i_love_ponies") # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_separatedatabaseandstate", editor, project_state, new_state) self.assertTableExists("i_love_ponies") # And test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_separatedatabaseandstate", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "SeparateDatabaseAndState") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["database_operations", "state_operations"]) def test_separate_database_and_state2(self): """ A complex SeparateDatabaseAndState operation: Multiple operations both for state and database. Verify the state dependencies within each list and that state ops don't affect the database. """ app_label = "test_separatedatabaseandstate2" project_state = self.set_up_test_model(app_label) # Create the operation database_operations = [ migrations.CreateModel( "ILovePonies", [("id", models.AutoField(primary_key=True))], options={"db_table": "iloveponies"}, ), migrations.CreateModel( "ILoveMorePonies", # We use IntegerField and not AutoField because # the model is going to be deleted immediately # and with an AutoField this fails on Oracle [("id", models.IntegerField(primary_key=True))], options={"db_table": "ilovemoreponies"}, ), migrations.DeleteModel("ILoveMorePonies"), migrations.CreateModel( "ILoveEvenMorePonies", [("id", models.AutoField(primary_key=True))], options={"db_table": "iloveevenmoreponies"}, ), ] state_operations = [ migrations.CreateModel( "SomethingElse", [("id", models.AutoField(primary_key=True))], options={"db_table": "somethingelse"}, ), migrations.DeleteModel("SomethingElse"), migrations.CreateModel( "SomethingCompletelyDifferent", [("id", models.AutoField(primary_key=True))], options={"db_table": "somethingcompletelydifferent"}, ), ] operation = migrations.SeparateDatabaseAndState( state_operations=state_operations, database_operations=database_operations, ) # Test the state alteration new_state = project_state.clone() operation.state_forwards(app_label, new_state) def assertModelsAndTables(after_db): # Tables and models exist, or don't, as they should: self.assertNotIn((app_label, "somethingelse"), new_state.models) self.assertEqual(len(new_state.models[app_label, "somethingcompletelydifferent"].fields), 1) self.assertNotIn((app_label, "iloveponiesonies"), new_state.models) self.assertNotIn((app_label, "ilovemoreponies"), new_state.models) self.assertNotIn((app_label, "iloveevenmoreponies"), new_state.models) self.assertTableNotExists("somethingelse") self.assertTableNotExists("somethingcompletelydifferent") self.assertTableNotExists("ilovemoreponies") if after_db: self.assertTableExists("iloveponies") self.assertTableExists("iloveevenmoreponies") else: self.assertTableNotExists("iloveponies") self.assertTableNotExists("iloveevenmoreponies") assertModelsAndTables(after_db=False) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) assertModelsAndTables(after_db=True) # And test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) assertModelsAndTables(after_db=False) class SwappableOperationTests(OperationTestBase): """ Key operations ignore swappable models (we don't want to replicate all of them here, as the functionality is in a common base class anyway) """ available_apps = ['migrations'] @override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel") def test_create_ignore_swapped(self): """ The CreateTable operation ignores swapped models. """ operation = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=1)), ], options={ "swappable": "TEST_SWAP_MODEL", }, ) # Test the state alteration (it should still be there!) project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crigsw", new_state) self.assertEqual(new_state.models["test_crigsw", "pony"].name, "Pony") self.assertEqual(len(new_state.models["test_crigsw", "pony"].fields), 2) # Test the database alteration self.assertTableNotExists("test_crigsw_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crigsw", editor, project_state, new_state) self.assertTableNotExists("test_crigsw_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crigsw", editor, new_state, project_state) self.assertTableNotExists("test_crigsw_pony") @override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel") def test_delete_ignore_swapped(self): """ Tests the DeleteModel operation ignores swapped models. """ operation = migrations.DeleteModel("Pony") project_state, new_state = self.make_test_state("test_dligsw", operation) # Test the database alteration self.assertTableNotExists("test_dligsw_pony") with connection.schema_editor() as editor: operation.database_forwards("test_dligsw", editor, project_state, new_state) self.assertTableNotExists("test_dligsw_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_dligsw", editor, new_state, project_state) self.assertTableNotExists("test_dligsw_pony") @override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel") def test_add_field_ignore_swapped(self): """ Tests the AddField operation. """ # Test the state alteration operation = migrations.AddField( "Pony", "height", models.FloatField(null=True, default=5), ) project_state, new_state = self.make_test_state("test_adfligsw", operation) # Test the database alteration self.assertTableNotExists("test_adfligsw_pony") with connection.schema_editor() as editor: operation.database_forwards("test_adfligsw", editor, project_state, new_state) self.assertTableNotExists("test_adfligsw_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adfligsw", editor, new_state, project_state) self.assertTableNotExists("test_adfligsw_pony") @override_settings(TEST_SWAP_MODEL='migrations.SomeFakeModel') def test_indexes_ignore_swapped(self): """ Add/RemoveIndex operations ignore swapped models. """ operation = migrations.AddIndex('Pony', models.Index(fields=['pink'], name='my_name_idx')) project_state, new_state = self.make_test_state('test_adinigsw', operation) with connection.schema_editor() as editor: # No database queries should be run for swapped models operation.database_forwards('test_adinigsw', editor, project_state, new_state) operation.database_backwards('test_adinigsw', editor, new_state, project_state) operation = migrations.RemoveIndex('Pony', models.Index(fields=['pink'], name='my_name_idx')) project_state, new_state = self.make_test_state("test_rminigsw", operation) with connection.schema_editor() as editor: operation.database_forwards('test_rminigsw', editor, project_state, new_state) operation.database_backwards('test_rminigsw', editor, new_state, project_state) class TestCreateModel(SimpleTestCase): def test_references_model_mixin(self): migrations.CreateModel( 'name', fields=[], bases=(Mixin, models.Model), ).references_model('other_model', 'migrations') class FieldOperationTests(SimpleTestCase): def test_references_model(self): operation = FieldOperation('MoDel', 'field', models.ForeignKey('Other', models.CASCADE)) # Model name match. self.assertIs(operation.references_model('mOdEl', 'migrations'), True) # Referenced field. self.assertIs(operation.references_model('oTher', 'migrations'), True) # Doesn't reference. self.assertIs(operation.references_model('Whatever', 'migrations'), False) def test_references_field_by_name(self): operation = FieldOperation('MoDel', 'field', models.BooleanField(default=False)) self.assertIs(operation.references_field('model', 'field', 'migrations'), True) def test_references_field_by_remote_field_model(self): operation = FieldOperation('Model', 'field', models.ForeignKey('Other', models.CASCADE)) self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), True) self.assertIs(operation.references_field('Missing', 'whatever', 'migrations'), False) def test_references_field_by_from_fields(self): operation = FieldOperation( 'Model', 'field', models.fields.related.ForeignObject('Other', models.CASCADE, ['from'], ['to']) ) self.assertIs(operation.references_field('Model', 'from', 'migrations'), True) self.assertIs(operation.references_field('Model', 'to', 'migrations'), False) self.assertIs(operation.references_field('Other', 'from', 'migrations'), False) self.assertIs(operation.references_field('Model', 'to', 'migrations'), False) def test_references_field_by_to_fields(self): operation = FieldOperation('Model', 'field', models.ForeignKey('Other', models.CASCADE, to_field='field')) self.assertIs(operation.references_field('Other', 'field', 'migrations'), True) self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), False) self.assertIs(operation.references_field('Missing', 'whatever', 'migrations'), False) def test_references_field_by_through(self): operation = FieldOperation('Model', 'field', models.ManyToManyField('Other', through='Through')) self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), True) self.assertIs(operation.references_field('Through', 'whatever', 'migrations'), True) self.assertIs(operation.references_field('Missing', 'whatever', 'migrations'), False) def test_reference_field_by_through_fields(self): operation = FieldOperation( 'Model', 'field', models.ManyToManyField('Other', through='Through', through_fields=('first', 'second')) ) self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), True) self.assertIs(operation.references_field('Through', 'whatever', 'migrations'), False) self.assertIs(operation.references_field('Through', 'first', 'migrations'), True) self.assertIs(operation.references_field('Through', 'second', 'migrations'), True)
5133a1da08d436c61c53d7d35e598a1811ac6036e7625807771cfb467d406363
import datetime import re from unittest import mock from django.contrib.auth.forms import ( AdminPasswordChangeForm, AuthenticationForm, PasswordChangeForm, PasswordResetForm, ReadOnlyPasswordHashField, ReadOnlyPasswordHashWidget, SetPasswordForm, UserChangeForm, UserCreationForm, ) from django.contrib.auth.models import User from django.contrib.auth.signals import user_login_failed from django.contrib.sites.models import Site from django.core import mail from django.core.exceptions import ValidationError from django.core.mail import EmailMultiAlternatives from django.forms import forms from django.forms.fields import CharField, Field, IntegerField from django.test import SimpleTestCase, TestCase, override_settings from django.utils import translation from django.utils.text import capfirst from django.utils.translation import gettext as _ from .models.custom_user import ( CustomUser, CustomUserWithoutIsActiveField, ExtensionUser, ) from .models.with_custom_email_field import CustomEmailField from .models.with_integer_username import IntegerUsernameUser from .settings import AUTH_TEMPLATES class TestDataMixin: @classmethod def setUpTestData(cls): cls.u1 = User.objects.create_user(username='testclient', password='password', email='[email protected]') cls.u2 = User.objects.create_user(username='inactive', password='password', is_active=False) cls.u3 = User.objects.create_user(username='staff', password='password') cls.u4 = User.objects.create(username='empty_password', password='') cls.u5 = User.objects.create(username='unmanageable_password', password='$') cls.u6 = User.objects.create(username='unknown_password', password='foo$bar') class UserCreationFormTest(TestDataMixin, TestCase): def test_user_already_exists(self): data = { 'username': 'testclient', 'password1': 'test123', 'password2': 'test123', } form = UserCreationForm(data) self.assertFalse(form.is_valid()) self.assertEqual(form["username"].errors, [str(User._meta.get_field('username').error_messages['unique'])]) def test_invalid_data(self): data = { 'username': 'jsmith!', 'password1': 'test123', 'password2': 'test123', } form = UserCreationForm(data) self.assertFalse(form.is_valid()) validator = next(v for v in User._meta.get_field('username').validators if v.code == 'invalid') self.assertEqual(form["username"].errors, [str(validator.message)]) def test_password_verification(self): # The verification password is incorrect. data = { 'username': 'jsmith', 'password1': 'test123', 'password2': 'test', } form = UserCreationForm(data) self.assertFalse(form.is_valid()) self.assertEqual(form["password2"].errors, [str(form.error_messages['password_mismatch'])]) def test_both_passwords(self): # One (or both) passwords weren't given data = {'username': 'jsmith'} form = UserCreationForm(data) required_error = [str(Field.default_error_messages['required'])] self.assertFalse(form.is_valid()) self.assertEqual(form['password1'].errors, required_error) self.assertEqual(form['password2'].errors, required_error) data['password2'] = 'test123' form = UserCreationForm(data) self.assertFalse(form.is_valid()) self.assertEqual(form['password1'].errors, required_error) self.assertEqual(form['password2'].errors, []) @mock.patch('django.contrib.auth.password_validation.password_changed') def test_success(self, password_changed): # The success case. data = { 'username': '[email protected]', 'password1': 'test123', 'password2': 'test123', } form = UserCreationForm(data) self.assertTrue(form.is_valid()) form.save(commit=False) self.assertEqual(password_changed.call_count, 0) u = form.save() self.assertEqual(password_changed.call_count, 1) self.assertEqual(repr(u), '<User: [email protected]>') def test_unicode_username(self): data = { 'username': '宝', 'password1': 'test123', 'password2': 'test123', } form = UserCreationForm(data) self.assertTrue(form.is_valid()) u = form.save() self.assertEqual(u.username, '宝') def test_normalize_username(self): # The normalization happens in AbstractBaseUser.clean() and ModelForm # validation calls Model.clean(). ohm_username = 'testΩ' # U+2126 OHM SIGN data = { 'username': ohm_username, 'password1': 'pwd2', 'password2': 'pwd2', } form = UserCreationForm(data) self.assertTrue(form.is_valid()) user = form.save() self.assertNotEqual(user.username, ohm_username) self.assertEqual(user.username, 'testΩ') # U+03A9 GREEK CAPITAL LETTER OMEGA def test_duplicate_normalized_unicode(self): """ To prevent almost identical usernames, visually identical but differing by their unicode code points only, Unicode NFKC normalization should make appear them equal to Django. """ omega_username = 'iamtheΩ' # U+03A9 GREEK CAPITAL LETTER OMEGA ohm_username = 'iamtheΩ' # U+2126 OHM SIGN self.assertNotEqual(omega_username, ohm_username) User.objects.create_user(username=omega_username, password='pwd') data = { 'username': ohm_username, 'password1': 'pwd2', 'password2': 'pwd2', } form = UserCreationForm(data) self.assertFalse(form.is_valid()) self.assertEqual( form.errors['username'], ["A user with that username already exists."] ) @override_settings(AUTH_PASSWORD_VALIDATORS=[ {'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'}, {'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 'OPTIONS': { 'min_length': 12, }}, ]) def test_validates_password(self): data = { 'username': 'testclient', 'password1': 'testclient', 'password2': 'testclient', } form = UserCreationForm(data) self.assertFalse(form.is_valid()) self.assertEqual(len(form['password2'].errors), 2) self.assertIn('The password is too similar to the username.', form['password2'].errors) self.assertIn( 'This password is too short. It must contain at least 12 characters.', form['password2'].errors ) def test_custom_form(self): class CustomUserCreationForm(UserCreationForm): class Meta(UserCreationForm.Meta): model = ExtensionUser fields = UserCreationForm.Meta.fields + ('date_of_birth',) data = { 'username': 'testclient', 'password1': 'testclient', 'password2': 'testclient', 'date_of_birth': '1988-02-24', } form = CustomUserCreationForm(data) self.assertTrue(form.is_valid()) def test_custom_form_with_different_username_field(self): class CustomUserCreationForm(UserCreationForm): class Meta(UserCreationForm.Meta): model = CustomUser fields = ('email', 'date_of_birth') data = { 'email': '[email protected]', 'password1': 'testclient', 'password2': 'testclient', 'date_of_birth': '1988-02-24', } form = CustomUserCreationForm(data) self.assertTrue(form.is_valid()) def test_custom_form_hidden_username_field(self): class CustomUserCreationForm(UserCreationForm): class Meta(UserCreationForm.Meta): model = CustomUserWithoutIsActiveField fields = ('email',) # without USERNAME_FIELD data = { 'email': '[email protected]', 'password1': 'testclient', 'password2': 'testclient', } form = CustomUserCreationForm(data) self.assertTrue(form.is_valid()) def test_password_whitespace_not_stripped(self): data = { 'username': 'testuser', 'password1': ' testpassword ', 'password2': ' testpassword ', } form = UserCreationForm(data) self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data['password1'], data['password1']) self.assertEqual(form.cleaned_data['password2'], data['password2']) @override_settings(AUTH_PASSWORD_VALIDATORS=[ {'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'}, ]) def test_password_help_text(self): form = UserCreationForm() self.assertEqual( form.fields['password1'].help_text, '<ul><li>Your password can’t be too similar to your other personal information.</li></ul>' ) @override_settings(AUTH_PASSWORD_VALIDATORS=[ {'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'}, ]) def test_user_create_form_validates_password_with_all_data(self): """UserCreationForm password validation uses all of the form's data.""" class CustomUserCreationForm(UserCreationForm): class Meta(UserCreationForm.Meta): model = User fields = ('username', 'email', 'first_name', 'last_name') form = CustomUserCreationForm({ 'username': 'testuser', 'password1': 'testpassword', 'password2': 'testpassword', 'first_name': 'testpassword', 'last_name': 'lastname', }) self.assertFalse(form.is_valid()) self.assertEqual( form.errors['password2'], ['The password is too similar to the first name.'], ) def test_username_field_autocapitalize_none(self): form = UserCreationForm() self.assertEqual(form.fields['username'].widget.attrs.get('autocapitalize'), 'none') def test_html_autocomplete_attributes(self): form = UserCreationForm() tests = ( ('username', 'username'), ('password1', 'new-password'), ('password2', 'new-password'), ) for field_name, autocomplete in tests: with self.subTest(field_name=field_name, autocomplete=autocomplete): self.assertEqual(form.fields[field_name].widget.attrs['autocomplete'], autocomplete) # To verify that the login form rejects inactive users, use an authentication # backend that allows them. @override_settings(AUTHENTICATION_BACKENDS=['django.contrib.auth.backends.AllowAllUsersModelBackend']) class AuthenticationFormTest(TestDataMixin, TestCase): def test_invalid_username(self): # The user submits an invalid username. data = { 'username': 'jsmith_does_not_exist', 'password': 'test123', } form = AuthenticationForm(None, data) self.assertFalse(form.is_valid()) self.assertEqual( form.non_field_errors(), [ form.error_messages['invalid_login'] % { 'username': User._meta.get_field('username').verbose_name } ] ) def test_inactive_user(self): # The user is inactive. data = { 'username': 'inactive', 'password': 'password', } form = AuthenticationForm(None, data) self.assertFalse(form.is_valid()) self.assertEqual(form.non_field_errors(), [str(form.error_messages['inactive'])]) # Use an authentication backend that rejects inactive users. @override_settings(AUTHENTICATION_BACKENDS=['django.contrib.auth.backends.ModelBackend']) def test_inactive_user_incorrect_password(self): """An invalid login doesn't leak the inactive status of a user.""" data = { 'username': 'inactive', 'password': 'incorrect', } form = AuthenticationForm(None, data) self.assertFalse(form.is_valid()) self.assertEqual( form.non_field_errors(), [ form.error_messages['invalid_login'] % { 'username': User._meta.get_field('username').verbose_name } ] ) def test_login_failed(self): signal_calls = [] def signal_handler(**kwargs): signal_calls.append(kwargs) user_login_failed.connect(signal_handler) fake_request = object() try: form = AuthenticationForm(fake_request, { 'username': 'testclient', 'password': 'incorrect', }) self.assertFalse(form.is_valid()) self.assertIs(signal_calls[0]['request'], fake_request) finally: user_login_failed.disconnect(signal_handler) def test_inactive_user_i18n(self): with self.settings(USE_I18N=True), translation.override('pt-br', deactivate=True): # The user is inactive. data = { 'username': 'inactive', 'password': 'password', } form = AuthenticationForm(None, data) self.assertFalse(form.is_valid()) self.assertEqual(form.non_field_errors(), [str(form.error_messages['inactive'])]) # Use an authentication backend that allows inactive users. @override_settings(AUTHENTICATION_BACKENDS=['django.contrib.auth.backends.AllowAllUsersModelBackend']) def test_custom_login_allowed_policy(self): # The user is inactive, but our custom form policy allows them to log in. data = { 'username': 'inactive', 'password': 'password', } class AuthenticationFormWithInactiveUsersOkay(AuthenticationForm): def confirm_login_allowed(self, user): pass form = AuthenticationFormWithInactiveUsersOkay(None, data) self.assertTrue(form.is_valid()) # Raise a ValidationError in the form to disallow some logins according # to custom logic. class PickyAuthenticationForm(AuthenticationForm): def confirm_login_allowed(self, user): if user.username == "inactive": raise ValidationError("This user is disallowed.") raise ValidationError("Sorry, nobody's allowed in.") form = PickyAuthenticationForm(None, data) self.assertFalse(form.is_valid()) self.assertEqual(form.non_field_errors(), ['This user is disallowed.']) data = { 'username': 'testclient', 'password': 'password', } form = PickyAuthenticationForm(None, data) self.assertFalse(form.is_valid()) self.assertEqual(form.non_field_errors(), ["Sorry, nobody's allowed in."]) def test_success(self): # The success case data = { 'username': 'testclient', 'password': 'password', } form = AuthenticationForm(None, data) self.assertTrue(form.is_valid()) self.assertEqual(form.non_field_errors(), []) def test_unicode_username(self): User.objects.create_user(username='Σαρα', password='pwd') data = { 'username': 'Σαρα', 'password': 'pwd', } form = AuthenticationForm(None, data) self.assertTrue(form.is_valid()) self.assertEqual(form.non_field_errors(), []) @override_settings(AUTH_USER_MODEL='auth_tests.CustomEmailField') def test_username_field_max_length_matches_user_model(self): self.assertEqual(CustomEmailField._meta.get_field('username').max_length, 255) data = { 'username': 'u' * 255, 'password': 'pwd', 'email': '[email protected]', } CustomEmailField.objects.create_user(**data) form = AuthenticationForm(None, data) self.assertEqual(form.fields['username'].max_length, 255) self.assertEqual(form.fields['username'].widget.attrs.get('maxlength'), 255) self.assertEqual(form.errors, {}) @override_settings(AUTH_USER_MODEL='auth_tests.IntegerUsernameUser') def test_username_field_max_length_defaults_to_254(self): self.assertIsNone(IntegerUsernameUser._meta.get_field('username').max_length) data = { 'username': '0123456', 'password': 'password', } IntegerUsernameUser.objects.create_user(**data) form = AuthenticationForm(None, data) self.assertEqual(form.fields['username'].max_length, 254) self.assertEqual(form.fields['username'].widget.attrs.get('maxlength'), 254) self.assertEqual(form.errors, {}) def test_username_field_label(self): class CustomAuthenticationForm(AuthenticationForm): username = CharField(label="Name", max_length=75) form = CustomAuthenticationForm() self.assertEqual(form['username'].label, "Name") def test_username_field_label_not_set(self): class CustomAuthenticationForm(AuthenticationForm): username = CharField() form = CustomAuthenticationForm() username_field = User._meta.get_field(User.USERNAME_FIELD) self.assertEqual(form.fields['username'].label, capfirst(username_field.verbose_name)) def test_username_field_autocapitalize_none(self): form = AuthenticationForm() self.assertEqual(form.fields['username'].widget.attrs.get('autocapitalize'), 'none') def test_username_field_label_empty_string(self): class CustomAuthenticationForm(AuthenticationForm): username = CharField(label='') form = CustomAuthenticationForm() self.assertEqual(form.fields['username'].label, "") def test_password_whitespace_not_stripped(self): data = { 'username': 'testuser', 'password': ' pass ', } form = AuthenticationForm(None, data) form.is_valid() # Not necessary to have valid credentails for the test. self.assertEqual(form.cleaned_data['password'], data['password']) @override_settings(AUTH_USER_MODEL='auth_tests.IntegerUsernameUser') def test_integer_username(self): class CustomAuthenticationForm(AuthenticationForm): username = IntegerField() user = IntegerUsernameUser.objects.create_user(username=0, password='pwd') data = { 'username': 0, 'password': 'pwd', } form = CustomAuthenticationForm(None, data) self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data['username'], data['username']) self.assertEqual(form.cleaned_data['password'], data['password']) self.assertEqual(form.errors, {}) self.assertEqual(form.user_cache, user) def test_get_invalid_login_error(self): error = AuthenticationForm().get_invalid_login_error() self.assertIsInstance(error, ValidationError) self.assertEqual( error.message, 'Please enter a correct %(username)s and password. Note that both ' 'fields may be case-sensitive.', ) self.assertEqual(error.code, 'invalid_login') self.assertEqual(error.params, {'username': 'username'}) def test_html_autocomplete_attributes(self): form = AuthenticationForm() tests = ( ('username', 'username'), ('password', 'current-password'), ) for field_name, autocomplete in tests: with self.subTest(field_name=field_name, autocomplete=autocomplete): self.assertEqual(form.fields[field_name].widget.attrs['autocomplete'], autocomplete) class SetPasswordFormTest(TestDataMixin, TestCase): def test_password_verification(self): # The two new passwords do not match. user = User.objects.get(username='testclient') data = { 'new_password1': 'abc123', 'new_password2': 'abc', } form = SetPasswordForm(user, data) self.assertFalse(form.is_valid()) self.assertEqual( form["new_password2"].errors, [str(form.error_messages['password_mismatch'])] ) @mock.patch('django.contrib.auth.password_validation.password_changed') def test_success(self, password_changed): user = User.objects.get(username='testclient') data = { 'new_password1': 'abc123', 'new_password2': 'abc123', } form = SetPasswordForm(user, data) self.assertTrue(form.is_valid()) form.save(commit=False) self.assertEqual(password_changed.call_count, 0) form.save() self.assertEqual(password_changed.call_count, 1) @override_settings(AUTH_PASSWORD_VALIDATORS=[ {'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'}, {'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 'OPTIONS': { 'min_length': 12, }}, ]) def test_validates_password(self): user = User.objects.get(username='testclient') data = { 'new_password1': 'testclient', 'new_password2': 'testclient', } form = SetPasswordForm(user, data) self.assertFalse(form.is_valid()) self.assertEqual(len(form["new_password2"].errors), 2) self.assertIn('The password is too similar to the username.', form["new_password2"].errors) self.assertIn( 'This password is too short. It must contain at least 12 characters.', form["new_password2"].errors ) def test_password_whitespace_not_stripped(self): user = User.objects.get(username='testclient') data = { 'new_password1': ' password ', 'new_password2': ' password ', } form = SetPasswordForm(user, data) self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data['new_password1'], data['new_password1']) self.assertEqual(form.cleaned_data['new_password2'], data['new_password2']) @override_settings(AUTH_PASSWORD_VALIDATORS=[ {'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'}, {'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 'OPTIONS': { 'min_length': 12, }}, ]) def test_help_text_translation(self): french_help_texts = [ 'Votre mot de passe ne peut pas trop ressembler à vos autres informations personnelles.', 'Votre mot de passe doit contenir au minimum 12 caractères.', ] form = SetPasswordForm(self.u1) with translation.override('fr'): html = form.as_p() for french_text in french_help_texts: self.assertIn(french_text, html) def test_html_autocomplete_attributes(self): form = SetPasswordForm(self.u1) tests = ( ('new_password1', 'new-password'), ('new_password2', 'new-password'), ) for field_name, autocomplete in tests: with self.subTest(field_name=field_name, autocomplete=autocomplete): self.assertEqual(form.fields[field_name].widget.attrs['autocomplete'], autocomplete) class PasswordChangeFormTest(TestDataMixin, TestCase): def test_incorrect_password(self): user = User.objects.get(username='testclient') data = { 'old_password': 'test', 'new_password1': 'abc123', 'new_password2': 'abc123', } form = PasswordChangeForm(user, data) self.assertFalse(form.is_valid()) self.assertEqual(form["old_password"].errors, [str(form.error_messages['password_incorrect'])]) def test_password_verification(self): # The two new passwords do not match. user = User.objects.get(username='testclient') data = { 'old_password': 'password', 'new_password1': 'abc123', 'new_password2': 'abc', } form = PasswordChangeForm(user, data) self.assertFalse(form.is_valid()) self.assertEqual(form["new_password2"].errors, [str(form.error_messages['password_mismatch'])]) @mock.patch('django.contrib.auth.password_validation.password_changed') def test_success(self, password_changed): # The success case. user = User.objects.get(username='testclient') data = { 'old_password': 'password', 'new_password1': 'abc123', 'new_password2': 'abc123', } form = PasswordChangeForm(user, data) self.assertTrue(form.is_valid()) form.save(commit=False) self.assertEqual(password_changed.call_count, 0) form.save() self.assertEqual(password_changed.call_count, 1) def test_field_order(self): # Regression test - check the order of fields: user = User.objects.get(username='testclient') self.assertEqual(list(PasswordChangeForm(user, {}).fields), ['old_password', 'new_password1', 'new_password2']) def test_password_whitespace_not_stripped(self): user = User.objects.get(username='testclient') user.set_password(' oldpassword ') data = { 'old_password': ' oldpassword ', 'new_password1': ' pass ', 'new_password2': ' pass ', } form = PasswordChangeForm(user, data) self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data['old_password'], data['old_password']) self.assertEqual(form.cleaned_data['new_password1'], data['new_password1']) self.assertEqual(form.cleaned_data['new_password2'], data['new_password2']) def test_html_autocomplete_attributes(self): user = User.objects.get(username='testclient') form = PasswordChangeForm(user) self.assertEqual(form.fields['old_password'].widget.attrs['autocomplete'], 'current-password') class UserChangeFormTest(TestDataMixin, TestCase): def test_username_validity(self): user = User.objects.get(username='testclient') data = {'username': 'not valid'} form = UserChangeForm(data, instance=user) self.assertFalse(form.is_valid()) validator = next(v for v in User._meta.get_field('username').validators if v.code == 'invalid') self.assertEqual(form["username"].errors, [str(validator.message)]) def test_bug_14242(self): # A regression test, introduce by adding an optimization for the # UserChangeForm. class MyUserForm(UserChangeForm): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['groups'].help_text = 'These groups give users different permissions' class Meta(UserChangeForm.Meta): fields = ('groups',) # Just check we can create it MyUserForm({}) def test_unusable_password(self): user = User.objects.get(username='empty_password') user.set_unusable_password() user.save() form = UserChangeForm(instance=user) self.assertIn(_("No password set."), form.as_table()) def test_bug_17944_empty_password(self): user = User.objects.get(username='empty_password') form = UserChangeForm(instance=user) self.assertIn(_("No password set."), form.as_table()) def test_bug_17944_unmanageable_password(self): user = User.objects.get(username='unmanageable_password') form = UserChangeForm(instance=user) self.assertIn(_("Invalid password format or unknown hashing algorithm."), form.as_table()) def test_bug_17944_unknown_password_algorithm(self): user = User.objects.get(username='unknown_password') form = UserChangeForm(instance=user) self.assertIn(_("Invalid password format or unknown hashing algorithm."), form.as_table()) def test_bug_19133(self): "The change form does not return the password value" # Use the form to construct the POST data user = User.objects.get(username='testclient') form_for_data = UserChangeForm(instance=user) post_data = form_for_data.initial # The password field should be readonly, so anything # posted here should be ignored; the form will be # valid, and give back the 'initial' value for the # password field. post_data['password'] = 'new password' form = UserChangeForm(instance=user, data=post_data) self.assertTrue(form.is_valid()) # original hashed password contains $ self.assertIn('$', form.cleaned_data['password']) def test_bug_19349_bound_password_field(self): user = User.objects.get(username='testclient') form = UserChangeForm(data={}, instance=user) # When rendering the bound password field, # ReadOnlyPasswordHashWidget needs the initial # value to render correctly self.assertEqual(form.initial['password'], form['password'].value()) def test_custom_form(self): class CustomUserChangeForm(UserChangeForm): class Meta(UserChangeForm.Meta): model = ExtensionUser fields = ('username', 'password', 'date_of_birth',) user = User.objects.get(username='testclient') data = { 'username': 'testclient', 'password': 'testclient', 'date_of_birth': '1998-02-24', } form = CustomUserChangeForm(data, instance=user) self.assertTrue(form.is_valid()) form.save() self.assertEqual(form.cleaned_data['username'], 'testclient') self.assertEqual(form.cleaned_data['date_of_birth'], datetime.date(1998, 2, 24)) def test_password_excluded(self): class UserChangeFormWithoutPassword(UserChangeForm): password = None class Meta: model = User exclude = ['password'] form = UserChangeFormWithoutPassword() self.assertNotIn('password', form.fields) def test_username_field_autocapitalize_none(self): form = UserChangeForm() self.assertEqual(form.fields['username'].widget.attrs.get('autocapitalize'), 'none') @override_settings(TEMPLATES=AUTH_TEMPLATES) class PasswordResetFormTest(TestDataMixin, TestCase): @classmethod def setUpClass(cls): super().setUpClass() # This cleanup is necessary because contrib.sites cache # makes tests interfere with each other, see #11505 Site.objects.clear_cache() def create_dummy_user(self): """ Create a user and return a tuple (user_object, username, email). """ username = 'jsmith' email = '[email protected]' user = User.objects.create_user(username, email, 'test123') return (user, username, email) def test_invalid_email(self): data = {'email': 'not valid'} form = PasswordResetForm(data) self.assertFalse(form.is_valid()) self.assertEqual(form['email'].errors, [_('Enter a valid email address.')]) def test_user_email_unicode_collision(self): User.objects.create_user('mike123', '[email protected]', 'test123') User.objects.create_user('mike456', 'mı[email protected]', 'test123') data = {'email': 'mı[email protected]'} form = PasswordResetForm(data) self.assertTrue(form.is_valid()) form.save() self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].to, ['mı[email protected]']) def test_user_email_domain_unicode_collision(self): User.objects.create_user('mike123', '[email protected]', 'test123') User.objects.create_user('mike456', 'mike@ıxample.org', 'test123') data = {'email': 'mike@ıxample.org'} form = PasswordResetForm(data) self.assertTrue(form.is_valid()) form.save() self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].to, ['mike@ıxample.org']) def test_user_email_unicode_collision_nonexistent(self): User.objects.create_user('mike123', '[email protected]', 'test123') data = {'email': 'mı[email protected]'} form = PasswordResetForm(data) self.assertTrue(form.is_valid()) form.save() self.assertEqual(len(mail.outbox), 0) def test_user_email_domain_unicode_collision_nonexistent(self): User.objects.create_user('mike123', '[email protected]', 'test123') data = {'email': 'mike@ıxample.org'} form = PasswordResetForm(data) self.assertTrue(form.is_valid()) form.save() self.assertEqual(len(mail.outbox), 0) def test_nonexistent_email(self): """ Test nonexistent email address. This should not fail because it would expose information about registered users. """ data = {'email': '[email protected]'} form = PasswordResetForm(data) self.assertTrue(form.is_valid()) self.assertEqual(len(mail.outbox), 0) def test_cleaned_data(self): (user, username, email) = self.create_dummy_user() data = {'email': email} form = PasswordResetForm(data) self.assertTrue(form.is_valid()) form.save(domain_override='example.com') self.assertEqual(form.cleaned_data['email'], email) self.assertEqual(len(mail.outbox), 1) def test_custom_email_subject(self): data = {'email': '[email protected]'} form = PasswordResetForm(data) self.assertTrue(form.is_valid()) # Since we're not providing a request object, we must provide a # domain_override to prevent the save operation from failing in the # potential case where contrib.sites is not installed. Refs #16412. form.save(domain_override='example.com') self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].subject, 'Custom password reset on example.com') def test_custom_email_constructor(self): data = {'email': '[email protected]'} class CustomEmailPasswordResetForm(PasswordResetForm): def send_mail(self, subject_template_name, email_template_name, context, from_email, to_email, html_email_template_name=None): EmailMultiAlternatives( "Forgot your password?", "Sorry to hear you forgot your password.", None, [to_email], ['[email protected]'], headers={'Reply-To': '[email protected]'}, alternatives=[ ("Really sorry to hear you forgot your password.", "text/html") ], ).send() form = CustomEmailPasswordResetForm(data) self.assertTrue(form.is_valid()) # Since we're not providing a request object, we must provide a # domain_override to prevent the save operation from failing in the # potential case where contrib.sites is not installed. Refs #16412. form.save(domain_override='example.com') self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].subject, 'Forgot your password?') self.assertEqual(mail.outbox[0].bcc, ['[email protected]']) self.assertEqual(mail.outbox[0].content_subtype, "plain") def test_preserve_username_case(self): """ Preserve the case of the user name (before the @ in the email address) when creating a user (#5605). """ user = User.objects.create_user('forms_test2', '[email protected]', 'test') self.assertEqual(user.email, '[email protected]') user = User.objects.create_user('forms_test3', 'tesT', 'test') self.assertEqual(user.email, 'tesT') def test_inactive_user(self): """ Inactive user cannot receive password reset email. """ (user, username, email) = self.create_dummy_user() user.is_active = False user.save() form = PasswordResetForm({'email': email}) self.assertTrue(form.is_valid()) form.save() self.assertEqual(len(mail.outbox), 0) def test_unusable_password(self): user = User.objects.create_user('testuser', '[email protected]', 'test') data = {"email": "[email protected]"} form = PasswordResetForm(data) self.assertTrue(form.is_valid()) user.set_unusable_password() user.save() form = PasswordResetForm(data) # The form itself is valid, but no email is sent self.assertTrue(form.is_valid()) form.save() self.assertEqual(len(mail.outbox), 0) def test_save_plaintext_email(self): """ Test the PasswordResetForm.save() method with no html_email_template_name parameter passed in. Test to ensure original behavior is unchanged after the parameter was added. """ (user, username, email) = self.create_dummy_user() form = PasswordResetForm({"email": email}) self.assertTrue(form.is_valid()) form.save() self.assertEqual(len(mail.outbox), 1) message = mail.outbox[0].message() self.assertFalse(message.is_multipart()) self.assertEqual(message.get_content_type(), 'text/plain') self.assertEqual(message.get('subject'), 'Custom password reset on example.com') self.assertEqual(len(mail.outbox[0].alternatives), 0) self.assertEqual(message.get_all('to'), [email]) self.assertTrue(re.match(r'^http://example.com/reset/[\w+/-]', message.get_payload())) def test_save_html_email_template_name(self): """ Test the PasswordResetForm.save() method with html_email_template_name parameter specified. Test to ensure that a multipart email is sent with both text/plain and text/html parts. """ (user, username, email) = self.create_dummy_user() form = PasswordResetForm({"email": email}) self.assertTrue(form.is_valid()) form.save(html_email_template_name='registration/html_password_reset_email.html') self.assertEqual(len(mail.outbox), 1) self.assertEqual(len(mail.outbox[0].alternatives), 1) message = mail.outbox[0].message() self.assertEqual(message.get('subject'), 'Custom password reset on example.com') self.assertEqual(len(message.get_payload()), 2) self.assertTrue(message.is_multipart()) self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain') self.assertEqual(message.get_payload(1).get_content_type(), 'text/html') self.assertEqual(message.get_all('to'), [email]) self.assertTrue(re.match(r'^http://example.com/reset/[\w/-]+', message.get_payload(0).get_payload())) self.assertTrue(re.match( r'^<html><a href="http://example.com/reset/[\w/-]+/">Link</a></html>$', message.get_payload(1).get_payload() )) @override_settings(AUTH_USER_MODEL='auth_tests.CustomEmailField') def test_custom_email_field(self): email = '[email protected]' CustomEmailField.objects.create_user('test name', 'test password', email) form = PasswordResetForm({'email': email}) self.assertTrue(form.is_valid()) form.save() self.assertEqual(form.cleaned_data['email'], email) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].to, [email]) def test_html_autocomplete_attributes(self): form = PasswordResetForm() self.assertEqual(form.fields['email'].widget.attrs['autocomplete'], 'email') class ReadOnlyPasswordHashTest(SimpleTestCase): def test_bug_19349_render_with_none_value(self): # Rendering the widget with value set to None # mustn't raise an exception. widget = ReadOnlyPasswordHashWidget() html = widget.render(name='password', value=None, attrs={}) self.assertIn(_("No password set."), html) @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.PBKDF2PasswordHasher']) def test_render(self): widget = ReadOnlyPasswordHashWidget() value = 'pbkdf2_sha256$100000$a6Pucb1qSFcD$WmCkn9Hqidj48NVe5x0FEM6A9YiOqQcl/83m2Z5udm0=' self.assertHTMLEqual( widget.render('name', value, {'id': 'id_password'}), """ <div id="id_password"> <strong>algorithm</strong>: pbkdf2_sha256 <strong>iterations</strong>: 100000 <strong>salt</strong>: a6Pucb****** <strong>hash</strong>: WmCkn9************************************** </div> """ ) def test_readonly_field_has_changed(self): field = ReadOnlyPasswordHashField() self.assertIs(field.disabled, True) self.assertFalse(field.has_changed('aaa', 'bbb')) def test_label(self): """ ReadOnlyPasswordHashWidget doesn't contain a for attribute in the <label> because it doesn't have any labelable elements. """ class TestForm(forms.Form): hash_field = ReadOnlyPasswordHashField() bound_field = TestForm()['hash_field'] self.assertEqual(bound_field.field.widget.id_for_label('id'), None) self.assertEqual(bound_field.label_tag(), '<label>Hash field:</label>') class AdminPasswordChangeFormTest(TestDataMixin, TestCase): @mock.patch('django.contrib.auth.password_validation.password_changed') def test_success(self, password_changed): user = User.objects.get(username='testclient') data = { 'password1': 'test123', 'password2': 'test123', } form = AdminPasswordChangeForm(user, data) self.assertTrue(form.is_valid()) form.save(commit=False) self.assertEqual(password_changed.call_count, 0) form.save() self.assertEqual(password_changed.call_count, 1) def test_password_whitespace_not_stripped(self): user = User.objects.get(username='testclient') data = { 'password1': ' pass ', 'password2': ' pass ', } form = AdminPasswordChangeForm(user, data) self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data['password1'], data['password1']) self.assertEqual(form.cleaned_data['password2'], data['password2']) def test_non_matching_passwords(self): user = User.objects.get(username='testclient') data = {'password1': 'password1', 'password2': 'password2'} form = AdminPasswordChangeForm(user, data) self.assertEqual(form.errors['password2'], [form.error_messages['password_mismatch']]) def test_missing_passwords(self): user = User.objects.get(username='testclient') data = {'password1': '', 'password2': ''} form = AdminPasswordChangeForm(user, data) required_error = [Field.default_error_messages['required']] self.assertEqual(form.errors['password1'], required_error) self.assertEqual(form.errors['password2'], required_error) def test_one_password(self): user = User.objects.get(username='testclient') form1 = AdminPasswordChangeForm(user, {'password1': '', 'password2': 'test'}) required_error = [Field.default_error_messages['required']] self.assertEqual(form1.errors['password1'], required_error) self.assertNotIn('password2', form1.errors) form2 = AdminPasswordChangeForm(user, {'password1': 'test', 'password2': ''}) self.assertEqual(form2.errors['password2'], required_error) self.assertNotIn('password1', form2.errors) def test_html_autocomplete_attributes(self): user = User.objects.get(username='testclient') form = AdminPasswordChangeForm(user) tests = ( ('password1', 'new-password'), ('password2', 'new-password'), ) for field_name, autocomplete in tests: with self.subTest(field_name=field_name, autocomplete=autocomplete): self.assertEqual(form.fields[field_name].widget.attrs['autocomplete'], autocomplete)
1a502972748f75e1ddc37ec6ccc3580d7a6c822dce8b4db0ec43972f964908b1
import datetime import os import re import unittest from unittest import mock from urllib.parse import parse_qsl, urljoin, urlparse import pytz try: import zoneinfo except ImportError: try: from backports import zoneinfo except ImportError: zoneinfo = None from django.contrib import admin from django.contrib.admin import AdminSite, ModelAdmin from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME from django.contrib.admin.models import ADDITION, DELETION, LogEntry from django.contrib.admin.options import TO_FIELD_VAR from django.contrib.admin.templatetags.admin_urls import add_preserved_filters from django.contrib.admin.tests import AdminSeleniumTestCase from django.contrib.admin.utils import quote from django.contrib.admin.views.main import IS_POPUP_VAR from django.contrib.auth import REDIRECT_FIELD_NAME, get_permission_codename from django.contrib.auth.models import Group, Permission, User from django.contrib.contenttypes.models import ContentType from django.core import mail from django.core.checks import Error from django.core.files import temp as tempfile from django.forms.utils import ErrorList from django.template.response import TemplateResponse from django.test import ( TestCase, modify_settings, override_settings, skipUnlessDBFeature, ) from django.test.utils import override_script_prefix from django.urls import NoReverseMatch, resolve, reverse from django.utils import formats, translation from django.utils.cache import get_max_age from django.utils.encoding import iri_to_uri from django.utils.html import escape from django.utils.http import urlencode from . import customadmin from .admin import CityAdmin, site, site2 from .models import ( Actor, AdminOrderedAdminMethod, AdminOrderedCallable, AdminOrderedField, AdminOrderedModelMethod, Album, Answer, Answer2, Article, BarAccount, Book, Bookmark, Category, Chapter, ChapterXtra1, ChapterXtra2, Character, Child, Choice, City, Collector, Color, ComplexSortedPerson, CoverLetter, CustomArticle, CyclicOne, CyclicTwo, DooHickey, Employee, EmptyModel, Fabric, FancyDoodad, FieldOverridePost, FilteredManager, FooAccount, FoodDelivery, FunkyTag, Gallery, Grommet, Inquisition, Language, Link, MainPrepopulated, Media, ModelWithStringPrimaryKey, OtherStory, Paper, Parent, ParentWithDependentChildren, ParentWithUUIDPK, Person, Persona, Picture, Pizza, Plot, PlotDetails, PluggableSearchPerson, Podcast, Post, PrePopulatedPost, Promo, Question, ReadablePizza, ReadOnlyPizza, ReadOnlyRelatedField, Recommendation, Recommender, RelatedPrepopulated, RelatedWithUUIDPKModel, Report, Restaurant, RowLevelChangePermissionModel, SecretHideout, Section, ShortMessage, Simple, Song, State, Story, SuperSecretHideout, SuperVillain, Telegram, TitleTranslation, Topping, UnchangeableObject, UndeletableObject, UnorderedObject, UserProxy, Villain, Vodcast, Whatsit, Widget, Worker, WorkHour, ) ERROR_MESSAGE = "Please enter the correct username and password \ for a staff account. Note that both fields may be case-sensitive." MULTIPART_ENCTYPE = 'enctype="multipart/form-data"' def make_aware_datetimes(dt, iana_key): """Makes one aware datetime for each supported time zone provider.""" yield pytz.timezone(iana_key).localize(dt, is_dst=None) if zoneinfo is not None: yield dt.replace(tzinfo=zoneinfo.ZoneInfo(iana_key)) class AdminFieldExtractionMixin: """ Helper methods for extracting data from AdminForm. """ def get_admin_form_fields(self, response): """ Return a list of AdminFields for the AdminForm in the response. """ fields = [] for fieldset in response.context['adminform']: for field_line in fieldset: fields.extend(field_line) return fields def get_admin_readonly_fields(self, response): """ Return the readonly fields for the response's AdminForm. """ return [f for f in self.get_admin_form_fields(response) if f.is_readonly] def get_admin_readonly_field(self, response, field_name): """ Return the readonly field for the given field_name. """ admin_readonly_fields = self.get_admin_readonly_fields(response) for field in admin_readonly_fields: if field.field['name'] == field_name: return field @override_settings(ROOT_URLCONF='admin_views.urls', USE_I18N=True, USE_L10N=False, LANGUAGE_CODE='en') class AdminViewBasicTestCase(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1, title='Article 1', ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1, title='Article 2', ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') cls.color1 = Color.objects.create(value='Red', warm=True) cls.color2 = Color.objects.create(value='Orange', warm=True) cls.color3 = Color.objects.create(value='Blue', warm=False) cls.color4 = Color.objects.create(value='Green', warm=False) cls.fab1 = Fabric.objects.create(surface='x') cls.fab2 = Fabric.objects.create(surface='y') cls.fab3 = Fabric.objects.create(surface='plain') cls.b1 = Book.objects.create(name='Book 1') cls.b2 = Book.objects.create(name='Book 2') cls.pro1 = Promo.objects.create(name='Promo 1', book=cls.b1) cls.pro1 = Promo.objects.create(name='Promo 2', book=cls.b2) cls.chap1 = Chapter.objects.create(title='Chapter 1', content='[ insert contents here ]', book=cls.b1) cls.chap2 = Chapter.objects.create(title='Chapter 2', content='[ insert contents here ]', book=cls.b1) cls.chap3 = Chapter.objects.create(title='Chapter 1', content='[ insert contents here ]', book=cls.b2) cls.chap4 = Chapter.objects.create(title='Chapter 2', content='[ insert contents here ]', book=cls.b2) cls.cx1 = ChapterXtra1.objects.create(chap=cls.chap1, xtra='ChapterXtra1 1') cls.cx2 = ChapterXtra1.objects.create(chap=cls.chap3, xtra='ChapterXtra1 2') Actor.objects.create(name='Palin', age=27) # Post data for edit inline cls.inline_post_data = { "name": "Test section", # inline data "article_set-TOTAL_FORMS": "6", "article_set-INITIAL_FORMS": "3", "article_set-MAX_NUM_FORMS": "0", "article_set-0-id": cls.a1.pk, # there is no title in database, give one here or formset will fail. "article_set-0-title": "Norske bostaver æøå skaper problemer", "article_set-0-content": "&lt;p&gt;Middle content&lt;/p&gt;", "article_set-0-date_0": "2008-03-18", "article_set-0-date_1": "11:54:58", "article_set-0-section": cls.s1.pk, "article_set-1-id": cls.a2.pk, "article_set-1-title": "Need a title.", "article_set-1-content": "&lt;p&gt;Oldest content&lt;/p&gt;", "article_set-1-date_0": "2000-03-18", "article_set-1-date_1": "11:54:58", "article_set-2-id": cls.a3.pk, "article_set-2-title": "Need a title.", "article_set-2-content": "&lt;p&gt;Newest content&lt;/p&gt;", "article_set-2-date_0": "2009-03-18", "article_set-2-date_1": "11:54:58", "article_set-3-id": "", "article_set-3-title": "", "article_set-3-content": "", "article_set-3-date_0": "", "article_set-3-date_1": "", "article_set-4-id": "", "article_set-4-title": "", "article_set-4-content": "", "article_set-4-date_0": "", "article_set-4-date_1": "", "article_set-5-id": "", "article_set-5-title": "", "article_set-5-content": "", "article_set-5-date_0": "", "article_set-5-date_1": "", } def setUp(self): self.client.force_login(self.superuser) def assertContentBefore(self, response, text1, text2, failing_msg=None): """ Testing utility asserting that text1 appears before text2 in response content. """ self.assertEqual(response.status_code, 200) self.assertLess( response.content.index(text1.encode()), response.content.index(text2.encode()), (failing_msg or '') + '\nResponse:\n' + response.content.decode(response.charset) ) class AdminViewBasicTest(AdminViewBasicTestCase): def test_trailing_slash_required(self): """ If you leave off the trailing slash, app should redirect and add it. """ add_url = reverse('admin:admin_views_article_add') response = self.client.get(add_url[:-1]) self.assertRedirects(response, add_url, status_code=301) def test_basic_add_GET(self): """ A smoke test to ensure GET on the add_view works. """ response = self.client.get(reverse('admin:admin_views_section_add')) self.assertIsInstance(response, TemplateResponse) self.assertEqual(response.status_code, 200) def test_add_with_GET_args(self): response = self.client.get(reverse('admin:admin_views_section_add'), {'name': 'My Section'}) self.assertContains( response, 'value="My Section"', msg_prefix="Couldn't find an input with the right value in the response" ) def test_basic_edit_GET(self): """ A smoke test to ensure GET on the change_view works. """ response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,))) self.assertIsInstance(response, TemplateResponse) self.assertEqual(response.status_code, 200) def test_basic_edit_GET_string_PK(self): """ GET on the change_view (when passing a string as the PK argument for a model with an integer PK field) redirects to the index page with a message saying the object doesn't exist. """ response = self.client.get(reverse('admin:admin_views_section_change', args=(quote("abc/<b>"),)), follow=True) self.assertRedirects(response, reverse('admin:index')) self.assertEqual( [m.message for m in response.context['messages']], ['section with ID “abc/<b>” doesn’t exist. Perhaps it was deleted?'] ) def test_basic_edit_GET_old_url_redirect(self): """ The change URL changed in Django 1.9, but the old one still redirects. """ response = self.client.get( reverse('admin:admin_views_section_change', args=(self.s1.pk,)).replace('change/', '') ) self.assertRedirects(response, reverse('admin:admin_views_section_change', args=(self.s1.pk,))) def test_basic_inheritance_GET_string_PK(self): """ GET on the change_view (for inherited models) redirects to the index page with a message saying the object doesn't exist. """ response = self.client.get(reverse('admin:admin_views_supervillain_change', args=('abc',)), follow=True) self.assertRedirects(response, reverse('admin:index')) self.assertEqual( [m.message for m in response.context['messages']], ['super villain with ID “abc” doesn’t exist. Perhaps it was deleted?'] ) def test_basic_add_POST(self): """ A smoke test to ensure POST on add_view works. """ post_data = { "name": "Another Section", # inline data "article_set-TOTAL_FORMS": "3", "article_set-INITIAL_FORMS": "0", "article_set-MAX_NUM_FORMS": "0", } response = self.client.post(reverse('admin:admin_views_section_add'), post_data) self.assertEqual(response.status_code, 302) # redirect somewhere def test_popup_add_POST(self): """ Ensure http response from a popup is properly escaped. """ post_data = { IS_POPUP_VAR: '1', 'title': 'title with a new\nline', 'content': 'some content', 'date_0': '2010-09-10', 'date_1': '14:55:39', } response = self.client.post(reverse('admin:admin_views_article_add'), post_data) self.assertContains(response, 'title with a new\\nline') def test_basic_edit_POST(self): """ A smoke test to ensure POST on edit_view works. """ url = reverse('admin:admin_views_section_change', args=(self.s1.pk,)) response = self.client.post(url, self.inline_post_data) self.assertEqual(response.status_code, 302) # redirect somewhere def test_edit_save_as(self): """ Test "save as". """ post_data = self.inline_post_data.copy() post_data.update({ '_saveasnew': 'Save+as+new', "article_set-1-section": "1", "article_set-2-section": "1", "article_set-3-section": "1", "article_set-4-section": "1", "article_set-5-section": "1", }) response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), post_data) self.assertEqual(response.status_code, 302) # redirect somewhere def test_edit_save_as_delete_inline(self): """ Should be able to "Save as new" while also deleting an inline. """ post_data = self.inline_post_data.copy() post_data.update({ '_saveasnew': 'Save+as+new', "article_set-1-section": "1", "article_set-2-section": "1", "article_set-2-DELETE": "1", "article_set-3-section": "1", }) response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), post_data) self.assertEqual(response.status_code, 302) # started with 3 articles, one was deleted. self.assertEqual(Section.objects.latest('id').article_set.count(), 2) def test_change_list_column_field_classes(self): response = self.client.get(reverse('admin:admin_views_article_changelist')) # callables display the callable name. self.assertContains(response, 'column-callable_year') self.assertContains(response, 'field-callable_year') # lambdas display as "lambda" + index that they appear in list_display. self.assertContains(response, 'column-lambda8') self.assertContains(response, 'field-lambda8') def test_change_list_sorting_callable(self): """ Ensure we can sort on a list_display field that is a callable (column 2 is callable_year in ArticleAdmin) """ response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': 2}) self.assertContentBefore( response, 'Oldest content', 'Middle content', "Results of sorting on callable are out of order." ) self.assertContentBefore( response, 'Middle content', 'Newest content', "Results of sorting on callable are out of order." ) def test_change_list_sorting_property(self): """ Sort on a list_display field that is a property (column 10 is a property in Article model). """ response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': 10}) self.assertContentBefore( response, 'Oldest content', 'Middle content', 'Results of sorting on property are out of order.', ) self.assertContentBefore( response, 'Middle content', 'Newest content', 'Results of sorting on property are out of order.', ) def test_change_list_sorting_callable_query_expression(self): """Query expressions may be used for admin_order_field.""" tests = [ ('order_by_expression', 9), ('order_by_f_expression', 12), ('order_by_orderby_expression', 13), ] for admin_order_field, index in tests: with self.subTest(admin_order_field): response = self.client.get( reverse('admin:admin_views_article_changelist'), {'o': index}, ) self.assertContentBefore( response, 'Oldest content', 'Middle content', 'Results of sorting on callable are out of order.' ) self.assertContentBefore( response, 'Middle content', 'Newest content', 'Results of sorting on callable are out of order.' ) def test_change_list_sorting_callable_query_expression_reverse(self): tests = [ ('order_by_expression', -9), ('order_by_f_expression', -12), ('order_by_orderby_expression', -13), ] for admin_order_field, index in tests: with self.subTest(admin_order_field): response = self.client.get( reverse('admin:admin_views_article_changelist'), {'o': index}, ) self.assertContentBefore( response, 'Middle content', 'Oldest content', 'Results of sorting on callable are out of order.' ) self.assertContentBefore( response, 'Newest content', 'Middle content', 'Results of sorting on callable are out of order.' ) def test_change_list_sorting_model(self): """ Ensure we can sort on a list_display field that is a Model method (column 3 is 'model_year' in ArticleAdmin) """ response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '-3'}) self.assertContentBefore( response, 'Newest content', 'Middle content', "Results of sorting on Model method are out of order." ) self.assertContentBefore( response, 'Middle content', 'Oldest content', "Results of sorting on Model method are out of order." ) def test_change_list_sorting_model_admin(self): """ Ensure we can sort on a list_display field that is a ModelAdmin method (column 4 is 'modeladmin_year' in ArticleAdmin) """ response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '4'}) self.assertContentBefore( response, 'Oldest content', 'Middle content', "Results of sorting on ModelAdmin method are out of order." ) self.assertContentBefore( response, 'Middle content', 'Newest content', "Results of sorting on ModelAdmin method are out of order." ) def test_change_list_sorting_model_admin_reverse(self): """ Ensure we can sort on a list_display field that is a ModelAdmin method in reverse order (i.e. admin_order_field uses the '-' prefix) (column 6 is 'model_year_reverse' in ArticleAdmin) """ td = '<td class="field-model_property_year">%s</td>' td_2000, td_2008, td_2009 = td % 2000, td % 2008, td % 2009 response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '6'}) self.assertContentBefore( response, td_2009, td_2008, "Results of sorting on ModelAdmin method are out of order." ) self.assertContentBefore( response, td_2008, td_2000, "Results of sorting on ModelAdmin method are out of order." ) # Let's make sure the ordering is right and that we don't get a # FieldError when we change to descending order response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '-6'}) self.assertContentBefore( response, td_2000, td_2008, "Results of sorting on ModelAdmin method are out of order." ) self.assertContentBefore( response, td_2008, td_2009, "Results of sorting on ModelAdmin method are out of order." ) def test_change_list_sorting_multiple(self): p1 = Person.objects.create(name="Chris", gender=1, alive=True) p2 = Person.objects.create(name="Chris", gender=2, alive=True) p3 = Person.objects.create(name="Bob", gender=1, alive=True) link1 = reverse('admin:admin_views_person_change', args=(p1.pk,)) link2 = reverse('admin:admin_views_person_change', args=(p2.pk,)) link3 = reverse('admin:admin_views_person_change', args=(p3.pk,)) # Sort by name, gender response = self.client.get(reverse('admin:admin_views_person_changelist'), {'o': '1.2'}) self.assertContentBefore(response, link3, link1) self.assertContentBefore(response, link1, link2) # Sort by gender descending, name response = self.client.get(reverse('admin:admin_views_person_changelist'), {'o': '-2.1'}) self.assertContentBefore(response, link2, link3) self.assertContentBefore(response, link3, link1) def test_change_list_sorting_preserve_queryset_ordering(self): """ If no ordering is defined in `ModelAdmin.ordering` or in the query string, then the underlying order of the queryset should not be changed, even if it is defined in `Modeladmin.get_queryset()`. Refs #11868, #7309. """ p1 = Person.objects.create(name="Amy", gender=1, alive=True, age=80) p2 = Person.objects.create(name="Bob", gender=1, alive=True, age=70) p3 = Person.objects.create(name="Chris", gender=2, alive=False, age=60) link1 = reverse('admin:admin_views_person_change', args=(p1.pk,)) link2 = reverse('admin:admin_views_person_change', args=(p2.pk,)) link3 = reverse('admin:admin_views_person_change', args=(p3.pk,)) response = self.client.get(reverse('admin:admin_views_person_changelist'), {}) self.assertContentBefore(response, link3, link2) self.assertContentBefore(response, link2, link1) def test_change_list_sorting_model_meta(self): # Test ordering on Model Meta is respected l1 = Language.objects.create(iso='ur', name='Urdu') l2 = Language.objects.create(iso='ar', name='Arabic') link1 = reverse('admin:admin_views_language_change', args=(quote(l1.pk),)) link2 = reverse('admin:admin_views_language_change', args=(quote(l2.pk),)) response = self.client.get(reverse('admin:admin_views_language_changelist'), {}) self.assertContentBefore(response, link2, link1) # Test we can override with query string response = self.client.get(reverse('admin:admin_views_language_changelist'), {'o': '-1'}) self.assertContentBefore(response, link1, link2) def test_change_list_sorting_override_model_admin(self): # Test ordering on Model Admin is respected, and overrides Model Meta dt = datetime.datetime.now() p1 = Podcast.objects.create(name="A", release_date=dt) p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10)) link1 = reverse('admin:admin_views_podcast_change', args=(p1.pk,)) link2 = reverse('admin:admin_views_podcast_change', args=(p2.pk,)) response = self.client.get(reverse('admin:admin_views_podcast_changelist'), {}) self.assertContentBefore(response, link1, link2) def test_multiple_sort_same_field(self): # The changelist displays the correct columns if two columns correspond # to the same ordering field. dt = datetime.datetime.now() p1 = Podcast.objects.create(name="A", release_date=dt) p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10)) link1 = reverse('admin:admin_views_podcast_change', args=(quote(p1.pk),)) link2 = reverse('admin:admin_views_podcast_change', args=(quote(p2.pk),)) response = self.client.get(reverse('admin:admin_views_podcast_changelist'), {}) self.assertContentBefore(response, link1, link2) p1 = ComplexSortedPerson.objects.create(name="Bob", age=10) p2 = ComplexSortedPerson.objects.create(name="Amy", age=20) link1 = reverse('admin:admin_views_complexsortedperson_change', args=(p1.pk,)) link2 = reverse('admin:admin_views_complexsortedperson_change', args=(p2.pk,)) response = self.client.get(reverse('admin:admin_views_complexsortedperson_changelist'), {}) # Should have 5 columns (including action checkbox col) self.assertContains(response, '<th scope="col"', count=5) self.assertContains(response, 'Name') self.assertContains(response, 'Colored name') # Check order self.assertContentBefore(response, 'Name', 'Colored name') # Check sorting - should be by name self.assertContentBefore(response, link2, link1) def test_sort_indicators_admin_order(self): """ The admin shows default sort indicators for all kinds of 'ordering' fields: field names, method on the model admin and model itself, and other callables. See #17252. """ models = [(AdminOrderedField, 'adminorderedfield'), (AdminOrderedModelMethod, 'adminorderedmodelmethod'), (AdminOrderedAdminMethod, 'adminorderedadminmethod'), (AdminOrderedCallable, 'adminorderedcallable')] for model, url in models: model.objects.create(stuff='The Last Item', order=3) model.objects.create(stuff='The First Item', order=1) model.objects.create(stuff='The Middle Item', order=2) response = self.client.get(reverse('admin:admin_views_%s_changelist' % url), {}) # Should have 3 columns including action checkbox col. self.assertContains(response, '<th scope="col"', count=3, msg_prefix=url) # Check if the correct column was selected. 2 is the index of the # 'order' column in the model admin's 'list_display' with 0 being # the implicit 'action_checkbox' and 1 being the column 'stuff'. self.assertEqual(response.context['cl'].get_ordering_field_columns(), {2: 'asc'}) # Check order of records. self.assertContentBefore(response, 'The First Item', 'The Middle Item') self.assertContentBefore(response, 'The Middle Item', 'The Last Item') def test_has_related_field_in_list_display_fk(self): """Joins shouldn't be performed for <FK>_id fields in list display.""" state = State.objects.create(name='Karnataka') City.objects.create(state=state, name='Bangalore') response = self.client.get(reverse('admin:admin_views_city_changelist'), {}) response.context['cl'].list_display = ['id', 'name', 'state'] self.assertIs(response.context['cl'].has_related_field_in_list_display(), True) response.context['cl'].list_display = ['id', 'name', 'state_id'] self.assertIs(response.context['cl'].has_related_field_in_list_display(), False) def test_has_related_field_in_list_display_o2o(self): """Joins shouldn't be performed for <O2O>_id fields in list display.""" media = Media.objects.create(name='Foo') Vodcast.objects.create(media=media) response = self.client.get(reverse('admin:admin_views_vodcast_changelist'), {}) response.context['cl'].list_display = ['media'] self.assertIs(response.context['cl'].has_related_field_in_list_display(), True) response.context['cl'].list_display = ['media_id'] self.assertIs(response.context['cl'].has_related_field_in_list_display(), False) def test_limited_filter(self): """Ensure admin changelist filters do not contain objects excluded via limit_choices_to. This also tests relation-spanning filters (e.g. 'color__value'). """ response = self.client.get(reverse('admin:admin_views_thing_changelist')) self.assertContains( response, '<div id="changelist-filter">', msg_prefix="Expected filter not found in changelist view" ) self.assertNotContains( response, '<a href="?color__id__exact=3">Blue</a>', msg_prefix="Changelist filter not correctly limited by limit_choices_to" ) def test_relation_spanning_filters(self): changelist_url = reverse('admin:admin_views_chapterxtra1_changelist') response = self.client.get(changelist_url) self.assertContains(response, '<div id="changelist-filter">') filters = { 'chap__id__exact': { 'values': [c.id for c in Chapter.objects.all()], 'test': lambda obj, value: obj.chap.id == value, }, 'chap__title': { 'values': [c.title for c in Chapter.objects.all()], 'test': lambda obj, value: obj.chap.title == value, }, 'chap__book__id__exact': { 'values': [b.id for b in Book.objects.all()], 'test': lambda obj, value: obj.chap.book.id == value, }, 'chap__book__name': { 'values': [b.name for b in Book.objects.all()], 'test': lambda obj, value: obj.chap.book.name == value, }, 'chap__book__promo__id__exact': { 'values': [p.id for p in Promo.objects.all()], 'test': lambda obj, value: obj.chap.book.promo_set.filter(id=value).exists(), }, 'chap__book__promo__name': { 'values': [p.name for p in Promo.objects.all()], 'test': lambda obj, value: obj.chap.book.promo_set.filter(name=value).exists(), }, # A forward relation (book) after a reverse relation (promo). 'guest_author__promo__book__id__exact': { 'values': [p.id for p in Book.objects.all()], 'test': lambda obj, value: obj.guest_author.promo_set.filter(book=value).exists(), }, } for filter_path, params in filters.items(): for value in params['values']: query_string = urlencode({filter_path: value}) # ensure filter link exists self.assertContains(response, '<a href="?%s"' % query_string) # ensure link works filtered_response = self.client.get('%s?%s' % (changelist_url, query_string)) self.assertEqual(filtered_response.status_code, 200) # ensure changelist contains only valid objects for obj in filtered_response.context['cl'].queryset.all(): self.assertTrue(params['test'](obj, value)) def test_incorrect_lookup_parameters(self): """Ensure incorrect lookup parameters are handled gracefully.""" changelist_url = reverse('admin:admin_views_thing_changelist') response = self.client.get(changelist_url, {'notarealfield': '5'}) self.assertRedirects(response, '%s?e=1' % changelist_url) # Spanning relationships through a nonexistent related object (Refs #16716) response = self.client.get(changelist_url, {'notarealfield__whatever': '5'}) self.assertRedirects(response, '%s?e=1' % changelist_url) response = self.client.get(changelist_url, {'color__id__exact': 'StringNotInteger!'}) self.assertRedirects(response, '%s?e=1' % changelist_url) # Regression test for #18530 response = self.client.get(changelist_url, {'pub_date__gte': 'foo'}) self.assertRedirects(response, '%s?e=1' % changelist_url) def test_isnull_lookups(self): """Ensure is_null is handled correctly.""" Article.objects.create(title="I Could Go Anywhere", content="Versatile", date=datetime.datetime.now()) changelist_url = reverse('admin:admin_views_article_changelist') response = self.client.get(changelist_url) self.assertContains(response, '4 articles') response = self.client.get(changelist_url, {'section__isnull': 'false'}) self.assertContains(response, '3 articles') response = self.client.get(changelist_url, {'section__isnull': '0'}) self.assertContains(response, '3 articles') response = self.client.get(changelist_url, {'section__isnull': 'true'}) self.assertContains(response, '1 article') response = self.client.get(changelist_url, {'section__isnull': '1'}) self.assertContains(response, '1 article') def test_logout_and_password_change_URLs(self): response = self.client.get(reverse('admin:admin_views_article_changelist')) self.assertContains(response, '<a href="%s">' % reverse('admin:logout')) self.assertContains(response, '<a href="%s">' % reverse('admin:password_change')) def test_named_group_field_choices_change_list(self): """ Ensures the admin changelist shows correct values in the relevant column for rows corresponding to instances of a model in which a named group has been used in the choices option of a field. """ link1 = reverse('admin:admin_views_fabric_change', args=(self.fab1.pk,)) link2 = reverse('admin:admin_views_fabric_change', args=(self.fab2.pk,)) response = self.client.get(reverse('admin:admin_views_fabric_changelist')) fail_msg = ( "Changelist table isn't showing the right human-readable values " "set by a model field 'choices' option named group." ) self.assertContains(response, '<a href="%s">Horizontal</a>' % link1, msg_prefix=fail_msg, html=True) self.assertContains(response, '<a href="%s">Vertical</a>' % link2, msg_prefix=fail_msg, html=True) def test_named_group_field_choices_filter(self): """ Ensures the filter UI shows correctly when at least one named group has been used in the choices option of a model field. """ response = self.client.get(reverse('admin:admin_views_fabric_changelist')) fail_msg = ( "Changelist filter isn't showing options contained inside a model " "field 'choices' option named group." ) self.assertContains(response, '<div id="changelist-filter">') self.assertContains( response, '<a href="?surface__exact=x" title="Horizontal">Horizontal</a>', msg_prefix=fail_msg, html=True ) self.assertContains( response, '<a href="?surface__exact=y" title="Vertical">Vertical</a>', msg_prefix=fail_msg, html=True ) def test_change_list_null_boolean_display(self): Post.objects.create(public=None) response = self.client.get(reverse('admin:admin_views_post_changelist')) self.assertContains(response, 'icon-unknown.svg') def test_display_decorator_with_boolean_and_empty_value(self): msg = ( 'The boolean and empty_value arguments to the @display decorator ' 'are mutually exclusive.' ) with self.assertRaisesMessage(ValueError, msg): class BookAdmin(admin.ModelAdmin): @admin.display(boolean=True, empty_value='(Missing)') def is_published(self, obj): return obj.publish_date is not None def test_i18n_language_non_english_default(self): """ Check if the JavaScript i18n view returns an empty language catalog if the default language is non-English but the selected language is English. See #13388 and #3594 for more details. """ with self.settings(LANGUAGE_CODE='fr'), translation.override('en-us'): response = self.client.get(reverse('admin:jsi18n')) self.assertNotContains(response, 'Choisir une heure') def test_i18n_language_non_english_fallback(self): """ Makes sure that the fallback language is still working properly in cases where the selected language cannot be found. """ with self.settings(LANGUAGE_CODE='fr'), translation.override('none'): response = self.client.get(reverse('admin:jsi18n')) self.assertContains(response, 'Choisir une heure') def test_jsi18n_with_context(self): response = self.client.get(reverse('admin-extra-context:jsi18n')) self.assertEqual(response.status_code, 200) def test_L10N_deactivated(self): """ Check if L10N is deactivated, the JavaScript i18n view doesn't return localized date/time formats. Refs #14824. """ with self.settings(LANGUAGE_CODE='ru', USE_L10N=False), translation.override('none'): response = self.client.get(reverse('admin:jsi18n')) self.assertNotContains(response, '%d.%m.%Y %H:%M:%S') self.assertContains(response, '%Y-%m-%d %H:%M:%S') def test_disallowed_filtering(self): with self.assertLogs('django.security.DisallowedModelAdminLookup', 'ERROR'): response = self.client.get( "%s?owner__email__startswith=fuzzy" % reverse('admin:admin_views_album_changelist') ) self.assertEqual(response.status_code, 400) # Filters are allowed if explicitly included in list_filter response = self.client.get("%s?color__value__startswith=red" % reverse('admin:admin_views_thing_changelist')) self.assertEqual(response.status_code, 200) response = self.client.get("%s?color__value=red" % reverse('admin:admin_views_thing_changelist')) self.assertEqual(response.status_code, 200) # Filters should be allowed if they involve a local field without the # need to allow them in list_filter or date_hierarchy. response = self.client.get("%s?age__gt=30" % reverse('admin:admin_views_person_changelist')) self.assertEqual(response.status_code, 200) e1 = Employee.objects.create(name='Anonymous', gender=1, age=22, alive=True, code='123') e2 = Employee.objects.create(name='Visitor', gender=2, age=19, alive=True, code='124') WorkHour.objects.create(datum=datetime.datetime.now(), employee=e1) WorkHour.objects.create(datum=datetime.datetime.now(), employee=e2) response = self.client.get(reverse('admin:admin_views_workhour_changelist')) self.assertContains(response, 'employee__person_ptr__exact') response = self.client.get("%s?employee__person_ptr__exact=%d" % ( reverse('admin:admin_views_workhour_changelist'), e1.pk) ) self.assertEqual(response.status_code, 200) def test_disallowed_to_field(self): url = reverse('admin:admin_views_section_changelist') with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'): response = self.client.get(url, {TO_FIELD_VAR: 'missing_field'}) self.assertEqual(response.status_code, 400) # Specifying a field that is not referred by any other model registered # to this admin site should raise an exception. with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'): response = self.client.get(reverse('admin:admin_views_section_changelist'), {TO_FIELD_VAR: 'name'}) self.assertEqual(response.status_code, 400) # #23839 - Primary key should always be allowed, even if the referenced model isn't registered. response = self.client.get(reverse('admin:admin_views_notreferenced_changelist'), {TO_FIELD_VAR: 'id'}) self.assertEqual(response.status_code, 200) # #23915 - Specifying a field referenced by another model though a m2m should be allowed. response = self.client.get(reverse('admin:admin_views_recipe_changelist'), {TO_FIELD_VAR: 'rname'}) self.assertEqual(response.status_code, 200) # #23604, #23915 - Specifying a field referenced through a reverse m2m relationship should be allowed. response = self.client.get(reverse('admin:admin_views_ingredient_changelist'), {TO_FIELD_VAR: 'iname'}) self.assertEqual(response.status_code, 200) # #23329 - Specifying a field that is not referred by any other model directly registered # to this admin site but registered through inheritance should be allowed. response = self.client.get(reverse('admin:admin_views_referencedbyparent_changelist'), {TO_FIELD_VAR: 'name'}) self.assertEqual(response.status_code, 200) # #23431 - Specifying a field that is only referred to by a inline of a registered # model should be allowed. response = self.client.get(reverse('admin:admin_views_referencedbyinline_changelist'), {TO_FIELD_VAR: 'name'}) self.assertEqual(response.status_code, 200) # #25622 - Specifying a field of a model only referred by a generic # relation should raise DisallowedModelAdminToField. url = reverse('admin:admin_views_referencedbygenrel_changelist') with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'): response = self.client.get(url, {TO_FIELD_VAR: 'object_id'}) self.assertEqual(response.status_code, 400) # We also want to prevent the add, change, and delete views from # leaking a disallowed field value. with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'): response = self.client.post(reverse('admin:admin_views_section_add'), {TO_FIELD_VAR: 'name'}) self.assertEqual(response.status_code, 400) section = Section.objects.create() url = reverse('admin:admin_views_section_change', args=(section.pk,)) with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'): response = self.client.post(url, {TO_FIELD_VAR: 'name'}) self.assertEqual(response.status_code, 400) url = reverse('admin:admin_views_section_delete', args=(section.pk,)) with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'): response = self.client.post(url, {TO_FIELD_VAR: 'name'}) self.assertEqual(response.status_code, 400) def test_allowed_filtering_15103(self): """ Regressions test for ticket 15103 - filtering on fields defined in a ForeignKey 'limit_choices_to' should be allowed, otherwise raw_id_fields can break. """ # Filters should be allowed if they are defined on a ForeignKey pointing to this model url = "%s?leader__name=Palin&leader__age=27" % reverse('admin:admin_views_inquisition_changelist') response = self.client.get(url) self.assertEqual(response.status_code, 200) def test_popup_dismiss_related(self): """ Regression test for ticket 20664 - ensure the pk is properly quoted. """ actor = Actor.objects.create(name="Palin", age=27) response = self.client.get("%s?%s" % (reverse('admin:admin_views_actor_changelist'), IS_POPUP_VAR)) self.assertContains(response, 'data-popup-opener="%s"' % actor.pk) def test_hide_change_password(self): """ Tests if the "change password" link in the admin is hidden if the User does not have a usable password set. (against 9bea85795705d015cdadc82c68b99196a8554f5c) """ user = User.objects.get(username='super') user.set_unusable_password() user.save() self.client.force_login(user) response = self.client.get(reverse('admin:index')) self.assertNotContains( response, reverse('admin:password_change'), msg_prefix='The "change password" link should not be displayed if a user does not have a usable password.' ) def test_change_view_with_show_delete_extra_context(self): """ The 'show_delete' context variable in the admin's change view controls the display of the delete button. """ instance = UndeletableObject.objects.create(name='foo') response = self.client.get(reverse('admin:admin_views_undeletableobject_change', args=(instance.pk,))) self.assertNotContains(response, 'deletelink') def test_change_view_logs_m2m_field_changes(self): """Changes to ManyToManyFields are included in the object's history.""" pizza = ReadablePizza.objects.create(name='Cheese') cheese = Topping.objects.create(name='cheese') post_data = {'name': pizza.name, 'toppings': [cheese.pk]} response = self.client.post(reverse('admin:admin_views_readablepizza_change', args=(pizza.pk,)), post_data) self.assertRedirects(response, reverse('admin:admin_views_readablepizza_changelist')) pizza_ctype = ContentType.objects.get_for_model(ReadablePizza, for_concrete_model=False) log = LogEntry.objects.filter(content_type=pizza_ctype, object_id=pizza.pk).first() self.assertEqual(log.get_change_message(), 'Changed Toppings.') def test_allows_attributeerror_to_bubble_up(self): """ AttributeErrors are allowed to bubble when raised inside a change list view. Requires a model to be created so there's something to display. Refs: #16655, #18593, and #18747 """ Simple.objects.create() with self.assertRaises(AttributeError): self.client.get(reverse('admin:admin_views_simple_changelist')) def test_changelist_with_no_change_url(self): """ ModelAdmin.changelist_view shouldn't result in a NoReverseMatch if url for change_view is removed from get_urls (#20934). """ o = UnchangeableObject.objects.create() response = self.client.get(reverse('admin:admin_views_unchangeableobject_changelist')) # Check the format of the shown object -- shouldn't contain a change link self.assertContains(response, '<th class="field-__str__">%s</th>' % o, html=True) def test_invalid_appindex_url(self): """ #21056 -- URL reversing shouldn't work for nonexistent apps. """ good_url = '/test_admin/admin/admin_views/' confirm_good_url = reverse('admin:app_list', kwargs={'app_label': 'admin_views'}) self.assertEqual(good_url, confirm_good_url) with self.assertRaises(NoReverseMatch): reverse('admin:app_list', kwargs={'app_label': 'this_should_fail'}) with self.assertRaises(NoReverseMatch): reverse('admin:app_list', args=('admin_views2',)) def test_resolve_admin_views(self): index_match = resolve('/test_admin/admin4/') list_match = resolve('/test_admin/admin4/auth/user/') self.assertIs(index_match.func.admin_site, customadmin.simple_site) self.assertIsInstance(list_match.func.model_admin, customadmin.CustomPwdTemplateUserAdmin) def test_adminsite_display_site_url(self): """ #13749 - Admin should display link to front-end site 'View site' """ url = reverse('admin:index') response = self.client.get(url) self.assertEqual(response.context['site_url'], '/my-site-url/') self.assertContains(response, '<a href="/my-site-url/">View site</a>') def test_date_hierarchy_empty_queryset(self): self.assertIs(Question.objects.exists(), False) response = self.client.get(reverse('admin:admin_views_answer2_changelist')) self.assertEqual(response.status_code, 200) @override_settings(TIME_ZONE='America/Sao_Paulo', USE_TZ=True) def test_date_hierarchy_timezone_dst(self): # This datetime doesn't exist in this timezone due to DST. for date in make_aware_datetimes(datetime.datetime(2016, 10, 16, 15), 'America/Sao_Paulo'): with self.subTest(repr(date.tzinfo)): q = Question.objects.create(question='Why?', expires=date) Answer2.objects.create(question=q, answer='Because.') response = self.client.get(reverse('admin:admin_views_answer2_changelist')) self.assertContains(response, 'question__expires__day=16') self.assertContains(response, 'question__expires__month=10') self.assertContains(response, 'question__expires__year=2016') @override_settings(TIME_ZONE='America/Los_Angeles', USE_TZ=True) def test_date_hierarchy_local_date_differ_from_utc(self): # This datetime is 2017-01-01 in UTC. for date in make_aware_datetimes(datetime.datetime(2016, 12, 31, 16), 'America/Los_Angeles'): with self.subTest(repr(date.tzinfo)): q = Question.objects.create(question='Why?', expires=date) Answer2.objects.create(question=q, answer='Because.') response = self.client.get(reverse('admin:admin_views_answer2_changelist')) self.assertContains(response, 'question__expires__day=31') self.assertContains(response, 'question__expires__month=12') self.assertContains(response, 'question__expires__year=2016') def test_sortable_by_columns_subset(self): expected_sortable_fields = ('date', 'callable_year') expected_not_sortable_fields = ( 'content', 'model_year', 'modeladmin_year', 'model_year_reversed', 'section', ) response = self.client.get(reverse('admin6:admin_views_article_changelist')) for field_name in expected_sortable_fields: self.assertContains(response, '<th scope="col" class="sortable column-%s">' % field_name) for field_name in expected_not_sortable_fields: self.assertContains(response, '<th scope="col" class="column-%s">' % field_name) def test_get_sortable_by_columns_subset(self): response = self.client.get(reverse('admin6:admin_views_actor_changelist')) self.assertContains(response, '<th scope="col" class="sortable column-age">') self.assertContains(response, '<th scope="col" class="column-name">') def test_sortable_by_no_column(self): expected_not_sortable_fields = ('title', 'book') response = self.client.get(reverse('admin6:admin_views_chapter_changelist')) for field_name in expected_not_sortable_fields: self.assertContains(response, '<th scope="col" class="column-%s">' % field_name) self.assertNotContains(response, '<th scope="col" class="sortable column') def test_get_sortable_by_no_column(self): response = self.client.get(reverse('admin6:admin_views_color_changelist')) self.assertContains(response, '<th scope="col" class="column-value">') self.assertNotContains(response, '<th scope="col" class="sortable column') def test_app_index_context(self): response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertContains( response, '<title>Admin_Views administration | Django site admin</title>', ) self.assertEqual(response.context['title'], 'Admin_Views administration') self.assertEqual(response.context['app_label'], 'admin_views') def test_change_view_subtitle_per_object(self): response = self.client.get( reverse('admin:admin_views_article_change', args=(self.a1.pk,)), ) self.assertContains( response, '<title>Article 1 | Change article | Django site admin</title>', ) self.assertContains(response, '<h1>Change article</h1>') self.assertContains(response, '<h2>Article 1</h2>') response = self.client.get( reverse('admin:admin_views_article_change', args=(self.a2.pk,)), ) self.assertContains( response, '<title>Article 2 | Change article | Django site admin</title>', ) self.assertContains(response, '<h1>Change article</h1>') self.assertContains(response, '<h2>Article 2</h2>') def test_view_subtitle_per_object(self): viewuser = User.objects.create_user( username='viewuser', password='secret', is_staff=True, ) viewuser.user_permissions.add( get_perm(Article, get_permission_codename('view', Article._meta)), ) self.client.force_login(viewuser) response = self.client.get( reverse('admin:admin_views_article_change', args=(self.a1.pk,)), ) self.assertContains( response, '<title>Article 1 | View article | Django site admin</title>', ) self.assertContains(response, '<h1>View article</h1>') self.assertContains(response, '<h2>Article 1</h2>') response = self.client.get( reverse('admin:admin_views_article_change', args=(self.a2.pk,)), ) self.assertContains( response, '<title>Article 2 | View article | Django site admin</title>', ) self.assertContains(response, '<h1>View article</h1>') self.assertContains(response, '<h2>Article 2</h2>') def test_formset_kwargs_can_be_overridden(self): response = self.client.get(reverse('admin:admin_views_city_add')) self.assertContains(response, 'overridden_name') def test_render_views_no_subtitle(self): tests = [ reverse('admin:index'), reverse('admin:app_list', args=('admin_views',)), reverse('admin:admin_views_article_delete', args=(self.a1.pk,)), reverse('admin:admin_views_article_history', args=(self.a1.pk,)), ] for url in tests: with self.subTest(url=url): with self.assertNoLogs('django.template', 'DEBUG'): self.client.get(url) @override_settings(TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', # Put this app's and the shared tests templates dirs in DIRS to take precedence # over the admin's templates dir. 'DIRS': [ os.path.join(os.path.dirname(__file__), 'templates'), os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates'), ], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }]) class AdminCustomTemplateTests(AdminViewBasicTestCase): def test_custom_model_admin_templates(self): # Test custom change list template with custom extra context response = self.client.get(reverse('admin:admin_views_customarticle_changelist')) self.assertContains(response, "var hello = 'Hello!';") self.assertTemplateUsed(response, 'custom_admin/change_list.html') # Test custom add form template response = self.client.get(reverse('admin:admin_views_customarticle_add')) self.assertTemplateUsed(response, 'custom_admin/add_form.html') # Add an article so we can test delete, change, and history views post = self.client.post(reverse('admin:admin_views_customarticle_add'), { 'content': '<p>great article</p>', 'date_0': '2008-03-18', 'date_1': '10:54:39' }) self.assertRedirects(post, reverse('admin:admin_views_customarticle_changelist')) self.assertEqual(CustomArticle.objects.all().count(), 1) article_pk = CustomArticle.objects.all()[0].pk # Test custom delete, change, and object history templates # Test custom change form template response = self.client.get(reverse('admin:admin_views_customarticle_change', args=(article_pk,))) self.assertTemplateUsed(response, 'custom_admin/change_form.html') response = self.client.get(reverse('admin:admin_views_customarticle_delete', args=(article_pk,))) self.assertTemplateUsed(response, 'custom_admin/delete_confirmation.html') response = self.client.post(reverse('admin:admin_views_customarticle_changelist'), data={ 'index': 0, 'action': ['delete_selected'], '_selected_action': ['1'], }) self.assertTemplateUsed(response, 'custom_admin/delete_selected_confirmation.html') response = self.client.get(reverse('admin:admin_views_customarticle_history', args=(article_pk,))) self.assertTemplateUsed(response, 'custom_admin/object_history.html') # A custom popup response template may be specified by # ModelAdmin.popup_response_template. response = self.client.post(reverse('admin:admin_views_customarticle_add') + '?%s=1' % IS_POPUP_VAR, { 'content': '<p>great article</p>', 'date_0': '2008-03-18', 'date_1': '10:54:39', IS_POPUP_VAR: '1' }) self.assertEqual(response.template_name, 'custom_admin/popup_response.html') def test_extended_bodyclass_template_change_form(self): """ The admin/change_form.html template uses block.super in the bodyclass block. """ response = self.client.get(reverse('admin:admin_views_section_add')) self.assertContains(response, 'bodyclass_consistency_check ') def test_change_password_template(self): user = User.objects.get(username='super') response = self.client.get(reverse('admin:auth_user_password_change', args=(user.id,))) # The auth/user/change_password.html template uses super in the # bodyclass block. self.assertContains(response, 'bodyclass_consistency_check ') # When a site has multiple passwords in the browser's password manager, # a browser pop up asks which user the new password is for. To prevent # this, the username is added to the change password form. self.assertContains(response, '<input type="text" name="username" value="super" class="hidden">') def test_extended_bodyclass_template_index(self): """ The admin/index.html template uses block.super in the bodyclass block. """ response = self.client.get(reverse('admin:index')) self.assertContains(response, 'bodyclass_consistency_check ') def test_extended_bodyclass_change_list(self): """ The admin/change_list.html' template uses block.super in the bodyclass block. """ response = self.client.get(reverse('admin:admin_views_article_changelist')) self.assertContains(response, 'bodyclass_consistency_check ') def test_extended_bodyclass_template_login(self): """ The admin/login.html template uses block.super in the bodyclass block. """ self.client.logout() response = self.client.get(reverse('admin:login')) self.assertContains(response, 'bodyclass_consistency_check ') def test_extended_bodyclass_template_delete_confirmation(self): """ The admin/delete_confirmation.html template uses block.super in the bodyclass block. """ group = Group.objects.create(name="foogroup") response = self.client.get(reverse('admin:auth_group_delete', args=(group.id,))) self.assertContains(response, 'bodyclass_consistency_check ') def test_extended_bodyclass_template_delete_selected_confirmation(self): """ The admin/delete_selected_confirmation.html template uses block.super in bodyclass block. """ group = Group.objects.create(name="foogroup") post_data = { 'action': 'delete_selected', 'selected_across': '0', 'index': '0', '_selected_action': group.id } response = self.client.post(reverse('admin:auth_group_changelist'), post_data) self.assertEqual(response.context['site_header'], 'Django administration') self.assertContains(response, 'bodyclass_consistency_check ') def test_filter_with_custom_template(self): """ A custom template can be used to render an admin filter. """ response = self.client.get(reverse('admin:admin_views_color2_changelist')) self.assertTemplateUsed(response, 'custom_filter_template.html') @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewFormUrlTest(TestCase): current_app = "admin3" @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') def setUp(self): self.client.force_login(self.superuser) def test_change_form_URL_has_correct_value(self): """ change_view has form_url in response.context """ response = self.client.get( reverse('admin:admin_views_section_change', args=(self.s1.pk,), current_app=self.current_app) ) self.assertIn('form_url', response.context, msg='form_url not present in response.context') self.assertEqual(response.context['form_url'], 'pony') def test_initial_data_can_be_overridden(self): """ The behavior for setting initial form data can be overridden in the ModelAdmin class. Usually, the initial value is set via the GET params. """ response = self.client.get( reverse('admin:admin_views_restaurant_add', current_app=self.current_app), {'name': 'test_value'} ) # this would be the usual behaviour self.assertNotContains(response, 'value="test_value"') # this is the overridden behaviour self.assertContains(response, 'value="overridden_value"') @override_settings(ROOT_URLCONF='admin_views.urls') class AdminJavaScriptTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_js_minified_only_if_debug_is_false(self): """ The minified versions of the JS files are only used when DEBUG is False. """ with override_settings(DEBUG=False): response = self.client.get(reverse('admin:admin_views_section_add')) self.assertNotContains(response, 'vendor/jquery/jquery.js') self.assertContains(response, 'vendor/jquery/jquery.min.js') self.assertContains(response, 'prepopulate.js') self.assertContains(response, 'actions.js') self.assertContains(response, 'collapse.js') self.assertContains(response, 'inlines.js') with override_settings(DEBUG=True): response = self.client.get(reverse('admin:admin_views_section_add')) self.assertContains(response, 'vendor/jquery/jquery.js') self.assertNotContains(response, 'vendor/jquery/jquery.min.js') self.assertContains(response, 'prepopulate.js') self.assertContains(response, 'actions.js') self.assertContains(response, 'collapse.js') self.assertContains(response, 'inlines.js') @override_settings(ROOT_URLCONF='admin_views.urls') class SaveAsTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True) def setUp(self): self.client.force_login(self.superuser) def test_save_as_duplication(self): """'save as' creates a new person""" post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 1, 'age': 42} response = self.client.post(reverse('admin:admin_views_person_change', args=(self.per1.pk,)), post_data) self.assertEqual(len(Person.objects.filter(name='John M')), 1) self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1) new_person = Person.objects.latest('id') self.assertRedirects(response, reverse('admin:admin_views_person_change', args=(new_person.pk,))) def test_save_as_continue_false(self): """ Saving a new object using "Save as new" redirects to the changelist instead of the change view when ModelAdmin.save_as_continue=False. """ post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 1, 'age': 42} url = reverse('admin:admin_views_person_change', args=(self.per1.pk,), current_app=site2.name) response = self.client.post(url, post_data) self.assertEqual(len(Person.objects.filter(name='John M')), 1) self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1) self.assertRedirects(response, reverse('admin:admin_views_person_changelist', current_app=site2.name)) def test_save_as_new_with_validation_errors(self): """ When you click "Save as new" and have a validation error, you only see the "Save as new" button and not the other save buttons, and that only the "Save as" button is visible. """ response = self.client.post(reverse('admin:admin_views_person_change', args=(self.per1.pk,)), { '_saveasnew': '', 'gender': 'invalid', '_addanother': 'fail', }) self.assertContains(response, 'Please correct the errors below.') self.assertFalse(response.context['show_save_and_add_another']) self.assertFalse(response.context['show_save_and_continue']) self.assertTrue(response.context['show_save_as_new']) def test_save_as_new_with_validation_errors_with_inlines(self): parent = Parent.objects.create(name='Father') child = Child.objects.create(parent=parent, name='Child') response = self.client.post(reverse('admin:admin_views_parent_change', args=(parent.pk,)), { '_saveasnew': 'Save as new', 'child_set-0-parent': parent.pk, 'child_set-0-id': child.pk, 'child_set-0-name': 'Child', 'child_set-INITIAL_FORMS': 1, 'child_set-MAX_NUM_FORMS': 1000, 'child_set-MIN_NUM_FORMS': 0, 'child_set-TOTAL_FORMS': 4, 'name': '_invalid', }) self.assertContains(response, 'Please correct the error below.') self.assertFalse(response.context['show_save_and_add_another']) self.assertFalse(response.context['show_save_and_continue']) self.assertTrue(response.context['show_save_as_new']) def test_save_as_new_with_inlines_with_validation_errors(self): parent = Parent.objects.create(name='Father') child = Child.objects.create(parent=parent, name='Child') response = self.client.post(reverse('admin:admin_views_parent_change', args=(parent.pk,)), { '_saveasnew': 'Save as new', 'child_set-0-parent': parent.pk, 'child_set-0-id': child.pk, 'child_set-0-name': '_invalid', 'child_set-INITIAL_FORMS': 1, 'child_set-MAX_NUM_FORMS': 1000, 'child_set-MIN_NUM_FORMS': 0, 'child_set-TOTAL_FORMS': 4, 'name': 'Father', }) self.assertContains(response, 'Please correct the error below.') self.assertFalse(response.context['show_save_and_add_another']) self.assertFalse(response.context['show_save_and_continue']) self.assertTrue(response.context['show_save_as_new']) @override_settings(ROOT_URLCONF='admin_views.urls') class CustomModelAdminTest(AdminViewBasicTestCase): def test_custom_admin_site_login_form(self): self.client.logout() response = self.client.get(reverse('admin2:index'), follow=True) self.assertIsInstance(response, TemplateResponse) self.assertEqual(response.status_code, 200) login = self.client.post(reverse('admin2:login'), { REDIRECT_FIELD_NAME: reverse('admin2:index'), 'username': 'customform', 'password': 'secret', }, follow=True) self.assertIsInstance(login, TemplateResponse) self.assertContains(login, 'custom form error') self.assertContains(login, 'path/to/media.css') def test_custom_admin_site_login_template(self): self.client.logout() response = self.client.get(reverse('admin2:index'), follow=True) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/login.html') self.assertContains(response, 'Hello from a custom login template') def test_custom_admin_site_logout_template(self): response = self.client.get(reverse('admin2:logout')) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/logout.html') self.assertContains(response, 'Hello from a custom logout template') def test_custom_admin_site_index_view_and_template(self): response = self.client.get(reverse('admin2:index')) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/index.html') self.assertContains(response, 'Hello from a custom index template *bar*') def test_custom_admin_site_app_index_view_and_template(self): response = self.client.get(reverse('admin2:app_list', args=('admin_views',))) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/app_index.html') self.assertContains(response, 'Hello from a custom app_index template') def test_custom_admin_site_password_change_template(self): response = self.client.get(reverse('admin2:password_change')) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/password_change_form.html') self.assertContains(response, 'Hello from a custom password change form template') def test_custom_admin_site_password_change_with_extra_context(self): response = self.client.get(reverse('admin2:password_change')) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/password_change_form.html') self.assertContains(response, 'eggs') def test_custom_admin_site_password_change_done_template(self): response = self.client.get(reverse('admin2:password_change_done')) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/password_change_done.html') self.assertContains(response, 'Hello from a custom password change done template') def test_custom_admin_site_view(self): self.client.force_login(self.superuser) response = self.client.get(reverse('admin2:my_view')) self.assertEqual(response.content, b"Django is a magical pony!") def test_pwd_change_custom_template(self): self.client.force_login(self.superuser) su = User.objects.get(username='super') response = self.client.get(reverse('admin4:auth_user_password_change', args=(su.pk,))) self.assertEqual(response.status_code, 200) def get_perm(Model, codename): """Return the permission object, for the Model""" ct = ContentType.objects.get_for_model(Model, for_concrete_model=False) return Permission.objects.get(content_type=ct, codename=codename) @override_settings( ROOT_URLCONF='admin_views.urls', # Test with the admin's documented list of required context processors. TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }], ) class AdminViewPermissionsTest(TestCase): """Tests for Admin Views Permissions.""" @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.viewuser = User.objects.create_user(username='viewuser', password='secret', is_staff=True) cls.adduser = User.objects.create_user(username='adduser', password='secret', is_staff=True) cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True) cls.deleteuser = User.objects.create_user(username='deleteuser', password='secret', is_staff=True) cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret') cls.nostaffuser = User.objects.create_user(username='nostaff', password='secret') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1, another_section=cls.s1, ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') # Setup permissions, for our users who can add, change, and delete. opts = Article._meta # User who can view Articles cls.viewuser.user_permissions.add(get_perm(Article, get_permission_codename('view', opts))) # User who can add Articles cls.adduser.user_permissions.add(get_perm(Article, get_permission_codename('add', opts))) # User who can change Articles cls.changeuser.user_permissions.add(get_perm(Article, get_permission_codename('change', opts))) cls.nostaffuser.user_permissions.add(get_perm(Article, get_permission_codename('change', opts))) # User who can delete Articles cls.deleteuser.user_permissions.add(get_perm(Article, get_permission_codename('delete', opts))) cls.deleteuser.user_permissions.add(get_perm(Section, get_permission_codename('delete', Section._meta))) # login POST dicts cls.index_url = reverse('admin:index') cls.super_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': 'super', 'password': 'secret', } cls.super_email_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': '[email protected]', 'password': 'secret', } cls.super_email_bad_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': '[email protected]', 'password': 'notsecret', } cls.adduser_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': 'adduser', 'password': 'secret', } cls.changeuser_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': 'changeuser', 'password': 'secret', } cls.deleteuser_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': 'deleteuser', 'password': 'secret', } cls.nostaff_login = { REDIRECT_FIELD_NAME: reverse('has_permission_admin:index'), 'username': 'nostaff', 'password': 'secret', } cls.joepublic_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': 'joepublic', 'password': 'secret', } cls.viewuser_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': 'viewuser', 'password': 'secret', } cls.no_username_login = { REDIRECT_FIELD_NAME: cls.index_url, 'password': 'secret', } def test_login(self): """ Make sure only staff members can log in. Successful posts to the login page will redirect to the original url. Unsuccessful attempts will continue to render the login page with a 200 status code. """ login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index')) # Super User response = self.client.get(self.index_url) self.assertRedirects(response, login_url) login = self.client.post(login_url, self.super_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) self.client.get(reverse('admin:logout')) # Test if user enters email address response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.super_email_login) self.assertContains(login, ERROR_MESSAGE) # only correct passwords get a username hint login = self.client.post(login_url, self.super_email_bad_login) self.assertContains(login, ERROR_MESSAGE) new_user = User(username='jondoe', password='secret', email='[email protected]') new_user.save() # check to ensure if there are multiple email addresses a user doesn't get a 500 login = self.client.post(login_url, self.super_email_login) self.assertContains(login, ERROR_MESSAGE) # View User response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.viewuser_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) self.client.get(reverse('admin:logout')) # Add User response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.adduser_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) self.client.get(reverse('admin:logout')) # Change User response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.changeuser_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) self.client.get(reverse('admin:logout')) # Delete User response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.deleteuser_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) self.client.get(reverse('admin:logout')) # Regular User should not be able to login. response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.joepublic_login) self.assertContains(login, ERROR_MESSAGE) # Requests without username should not return 500 errors. response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.no_username_login) self.assertEqual(login.status_code, 200) self.assertFormError(login, 'form', 'username', ['This field is required.']) def test_login_redirect_for_direct_get(self): """ Login redirect should be to the admin index page when going directly to /admin/login/. """ response = self.client.get(reverse('admin:login')) self.assertEqual(response.status_code, 200) self.assertEqual(response.context[REDIRECT_FIELD_NAME], reverse('admin:index')) def test_login_has_permission(self): # Regular User should not be able to login. response = self.client.get(reverse('has_permission_admin:index')) self.assertEqual(response.status_code, 302) login = self.client.post(reverse('has_permission_admin:login'), self.joepublic_login) self.assertContains(login, 'permission denied') # User with permissions should be able to login. response = self.client.get(reverse('has_permission_admin:index')) self.assertEqual(response.status_code, 302) login = self.client.post(reverse('has_permission_admin:login'), self.nostaff_login) self.assertRedirects(login, reverse('has_permission_admin:index')) self.assertFalse(login.context) self.client.get(reverse('has_permission_admin:logout')) # Staff should be able to login. response = self.client.get(reverse('has_permission_admin:index')) self.assertEqual(response.status_code, 302) login = self.client.post(reverse('has_permission_admin:login'), { REDIRECT_FIELD_NAME: reverse('has_permission_admin:index'), 'username': 'deleteuser', 'password': 'secret', }) self.assertRedirects(login, reverse('has_permission_admin:index')) self.assertFalse(login.context) self.client.get(reverse('has_permission_admin:logout')) def test_login_successfully_redirects_to_original_URL(self): response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) query_string = 'the-answer=42' redirect_url = '%s?%s' % (self.index_url, query_string) new_next = {REDIRECT_FIELD_NAME: redirect_url} post_data = self.super_login.copy() post_data.pop(REDIRECT_FIELD_NAME) login = self.client.post( '%s?%s' % (reverse('admin:login'), urlencode(new_next)), post_data) self.assertRedirects(login, redirect_url) def test_double_login_is_not_allowed(self): """Regression test for #19327""" login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index')) response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) # Establish a valid admin session login = self.client.post(login_url, self.super_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) # Logging in with non-admin user fails login = self.client.post(login_url, self.joepublic_login) self.assertContains(login, ERROR_MESSAGE) # Establish a valid admin session login = self.client.post(login_url, self.super_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) # Logging in with admin user while already logged in login = self.client.post(login_url, self.super_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) self.client.get(reverse('admin:logout')) def test_login_page_notice_for_non_staff_users(self): """ A logged-in non-staff user trying to access the admin index should be presented with the login page and a hint indicating that the current user doesn't have access to it. """ hint_template = 'You are authenticated as {}' # Anonymous user should not be shown the hint response = self.client.get(self.index_url, follow=True) self.assertContains(response, 'login-form') self.assertNotContains(response, hint_template.format(''), status_code=200) # Non-staff user should be shown the hint self.client.force_login(self.nostaffuser) response = self.client.get(self.index_url, follow=True) self.assertContains(response, 'login-form') self.assertContains(response, hint_template.format(self.nostaffuser.username), status_code=200) def test_add_view(self): """Test add view restricts access and actually adds items.""" add_dict = { 'title': 'Døm ikke', 'content': '<p>great article</p>', 'date_0': '2008-03-18', 'date_1': '10:54:39', 'section': self.s1.pk, } # Change User should not have access to add articles self.client.force_login(self.changeuser) # make sure the view removes test cookie self.assertIs(self.client.session.test_cookie_worked(), False) response = self.client.get(reverse('admin:admin_views_article_add')) self.assertEqual(response.status_code, 403) # Try POST just to make sure post = self.client.post(reverse('admin:admin_views_article_add'), add_dict) self.assertEqual(post.status_code, 403) self.assertEqual(Article.objects.count(), 3) self.client.get(reverse('admin:logout')) # View User should not have access to add articles self.client.force_login(self.viewuser) response = self.client.get(reverse('admin:admin_views_article_add')) self.assertEqual(response.status_code, 403) # Try POST just to make sure post = self.client.post(reverse('admin:admin_views_article_add'), add_dict) self.assertEqual(post.status_code, 403) self.assertEqual(Article.objects.count(), 3) # Now give the user permission to add but not change. self.viewuser.user_permissions.add(get_perm(Article, get_permission_codename('add', Article._meta))) response = self.client.get(reverse('admin:admin_views_article_add')) self.assertEqual(response.context['title'], 'Add article') self.assertContains(response, '<title>Add article | Django site admin</title>') self.assertContains(response, '<input type="submit" value="Save and view" name="_continue">') post = self.client.post(reverse('admin:admin_views_article_add'), add_dict, follow=False) self.assertEqual(post.status_code, 302) self.assertEqual(Article.objects.count(), 4) article = Article.objects.latest('pk') response = self.client.get(reverse('admin:admin_views_article_change', args=(article.pk,))) self.assertContains(response, '<li class="success">The article “Døm ikke” was added successfully.</li>') article.delete() self.client.get(reverse('admin:logout')) # Add user may login and POST to add view, then redirect to admin root self.client.force_login(self.adduser) addpage = self.client.get(reverse('admin:admin_views_article_add')) change_list_link = '&rsaquo; <a href="%s">Articles</a>' % reverse('admin:admin_views_article_changelist') self.assertNotContains( addpage, change_list_link, msg_prefix='User restricted to add permission is given link to change list view in breadcrumbs.' ) post = self.client.post(reverse('admin:admin_views_article_add'), add_dict) self.assertRedirects(post, self.index_url) self.assertEqual(Article.objects.count(), 4) self.assertEqual(len(mail.outbox), 2) self.assertEqual(mail.outbox[0].subject, 'Greetings from a created object') self.client.get(reverse('admin:logout')) # The addition was logged correctly addition_log = LogEntry.objects.all()[0] new_article = Article.objects.last() article_ct = ContentType.objects.get_for_model(Article) self.assertEqual(addition_log.user_id, self.adduser.pk) self.assertEqual(addition_log.content_type_id, article_ct.pk) self.assertEqual(addition_log.object_id, str(new_article.pk)) self.assertEqual(addition_log.object_repr, "Døm ikke") self.assertEqual(addition_log.action_flag, ADDITION) self.assertEqual(addition_log.get_change_message(), "Added.") # Super can add too, but is redirected to the change list view self.client.force_login(self.superuser) addpage = self.client.get(reverse('admin:admin_views_article_add')) self.assertContains( addpage, change_list_link, msg_prefix='Unrestricted user is not given link to change list view in breadcrumbs.' ) post = self.client.post(reverse('admin:admin_views_article_add'), add_dict) self.assertRedirects(post, reverse('admin:admin_views_article_changelist')) self.assertEqual(Article.objects.count(), 5) self.client.get(reverse('admin:logout')) # 8509 - if a normal user is already logged in, it is possible # to change user into the superuser without error self.client.force_login(self.joepublicuser) # Check and make sure that if user expires, data still persists self.client.force_login(self.superuser) # make sure the view removes test cookie self.assertIs(self.client.session.test_cookie_worked(), False) @mock.patch('django.contrib.admin.options.InlineModelAdmin.has_change_permission') def test_add_view_with_view_only_inlines(self, has_change_permission): """User with add permission to a section but view-only for inlines.""" self.viewuser.user_permissions.add(get_perm(Section, get_permission_codename('add', Section._meta))) self.client.force_login(self.viewuser) # Valid POST creates a new section. data = { 'name': 'New obj', 'article_set-TOTAL_FORMS': 0, 'article_set-INITIAL_FORMS': 0, } response = self.client.post(reverse('admin:admin_views_section_add'), data) self.assertRedirects(response, reverse('admin:index')) self.assertEqual(Section.objects.latest('id').name, data['name']) # InlineModelAdmin.has_change_permission()'s obj argument is always # None during object add. self.assertEqual([obj for (request, obj), _ in has_change_permission.call_args_list], [None, None]) def test_change_view(self): """Change view should restrict access and allow users to edit items.""" change_dict = { 'title': 'Ikke fordømt', 'content': '<p>edited article</p>', 'date_0': '2008-03-18', 'date_1': '10:54:39', 'section': self.s1.pk, } article_change_url = reverse('admin:admin_views_article_change', args=(self.a1.pk,)) article_changelist_url = reverse('admin:admin_views_article_changelist') # add user should not be able to view the list of article or change any of them self.client.force_login(self.adduser) response = self.client.get(article_changelist_url) self.assertEqual(response.status_code, 403) response = self.client.get(article_change_url) self.assertEqual(response.status_code, 403) post = self.client.post(article_change_url, change_dict) self.assertEqual(post.status_code, 403) self.client.get(reverse('admin:logout')) # view user can view articles but not make changes. self.client.force_login(self.viewuser) response = self.client.get(article_changelist_url) self.assertContains( response, '<title>Select article to view | Django site admin</title>', ) self.assertContains(response, '<h1>Select article to view</h1>') self.assertEqual(response.context['title'], 'Select article to view') response = self.client.get(article_change_url) self.assertContains(response, '<title>View article | Django site admin</title>') self.assertContains(response, '<h1>View article</h1>') self.assertContains(response, '<label>Extra form field:</label>') self.assertContains(response, '<a href="/test_admin/admin/admin_views/article/" class="closelink">Close</a>') self.assertEqual(response.context['title'], 'View article') post = self.client.post(article_change_url, change_dict) self.assertEqual(post.status_code, 403) self.assertEqual(Article.objects.get(pk=self.a1.pk).content, '<p>Middle content</p>') self.client.get(reverse('admin:logout')) # change user can view all items and edit them self.client.force_login(self.changeuser) response = self.client.get(article_changelist_url) self.assertEqual(response.context['title'], 'Select article to change') self.assertContains( response, '<title>Select article to change | Django site admin</title>', ) self.assertContains(response, '<h1>Select article to change</h1>') response = self.client.get(article_change_url) self.assertEqual(response.context['title'], 'Change article') self.assertContains( response, '<title>Change article | Django site admin</title>', ) self.assertContains(response, '<h1>Change article</h1>') post = self.client.post(article_change_url, change_dict) self.assertRedirects(post, article_changelist_url) self.assertEqual(Article.objects.get(pk=self.a1.pk).content, '<p>edited article</p>') # one error in form should produce singular error message, multiple errors plural change_dict['title'] = '' post = self.client.post(article_change_url, change_dict) self.assertContains( post, 'Please correct the error below.', msg_prefix='Singular error message not found in response to post with one error' ) change_dict['content'] = '' post = self.client.post(article_change_url, change_dict) self.assertContains( post, 'Please correct the errors below.', msg_prefix='Plural error message not found in response to post with multiple errors' ) self.client.get(reverse('admin:logout')) # Test redirection when using row-level change permissions. Refs #11513. r1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id") r2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id") r3 = RowLevelChangePermissionModel.objects.create(id=3, name='odd id mult 3') r6 = RowLevelChangePermissionModel.objects.create(id=6, name='even id mult 3') change_url_1 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r1.pk,)) change_url_2 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r2.pk,)) change_url_3 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r3.pk,)) change_url_6 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r6.pk,)) logins = [self.superuser, self.viewuser, self.adduser, self.changeuser, self.deleteuser] for login_user in logins: with self.subTest(login_user.username): self.client.force_login(login_user) response = self.client.get(change_url_1) self.assertEqual(response.status_code, 403) response = self.client.post(change_url_1, {'name': 'changed'}) self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id') self.assertEqual(response.status_code, 403) response = self.client.get(change_url_2) self.assertEqual(response.status_code, 200) response = self.client.post(change_url_2, {'name': 'changed'}) self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed') self.assertRedirects(response, self.index_url) response = self.client.get(change_url_3) self.assertEqual(response.status_code, 200) response = self.client.post(change_url_3, {'name': 'changed'}) self.assertEqual(response.status_code, 403) self.assertEqual(RowLevelChangePermissionModel.objects.get(id=3).name, 'odd id mult 3') response = self.client.get(change_url_6) self.assertEqual(response.status_code, 200) response = self.client.post(change_url_6, {'name': 'changed'}) self.assertEqual(RowLevelChangePermissionModel.objects.get(id=6).name, 'changed') self.assertRedirects(response, self.index_url) self.client.get(reverse('admin:logout')) for login_user in [self.joepublicuser, self.nostaffuser]: with self.subTest(login_user.username): self.client.force_login(login_user) response = self.client.get(change_url_1, follow=True) self.assertContains(response, 'login-form') response = self.client.post(change_url_1, {'name': 'changed'}, follow=True) self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id') self.assertContains(response, 'login-form') response = self.client.get(change_url_2, follow=True) self.assertContains(response, 'login-form') response = self.client.post(change_url_2, {'name': 'changed again'}, follow=True) self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed') self.assertContains(response, 'login-form') self.client.get(reverse('admin:logout')) def test_change_view_without_object_change_permission(self): """ The object should be read-only if the user has permission to view it and change objects of that type but not to change the current object. """ change_url = reverse('admin9:admin_views_article_change', args=(self.a1.pk,)) self.client.force_login(self.viewuser) response = self.client.get(change_url) self.assertEqual(response.context['title'], 'View article') self.assertContains(response, '<title>View article | Django site admin</title>') self.assertContains(response, '<h1>View article</h1>') self.assertContains(response, '<a href="/test_admin/admin9/admin_views/article/" class="closelink">Close</a>') def test_change_view_save_as_new(self): """ 'Save as new' should raise PermissionDenied for users without the 'add' permission. """ change_dict_save_as_new = { '_saveasnew': 'Save as new', 'title': 'Ikke fordømt', 'content': '<p>edited article</p>', 'date_0': '2008-03-18', 'date_1': '10:54:39', 'section': self.s1.pk, } article_change_url = reverse('admin:admin_views_article_change', args=(self.a1.pk,)) # Add user can perform "Save as new". article_count = Article.objects.count() self.client.force_login(self.adduser) post = self.client.post(article_change_url, change_dict_save_as_new) self.assertRedirects(post, self.index_url) self.assertEqual(Article.objects.count(), article_count + 1) self.client.logout() # Change user cannot perform "Save as new" (no 'add' permission). article_count = Article.objects.count() self.client.force_login(self.changeuser) post = self.client.post(article_change_url, change_dict_save_as_new) self.assertEqual(post.status_code, 403) self.assertEqual(Article.objects.count(), article_count) # User with both add and change permissions should be redirected to the # change page for the newly created object. article_count = Article.objects.count() self.client.force_login(self.superuser) post = self.client.post(article_change_url, change_dict_save_as_new) self.assertEqual(Article.objects.count(), article_count + 1) new_article = Article.objects.latest('id') self.assertRedirects(post, reverse('admin:admin_views_article_change', args=(new_article.pk,))) def test_change_view_with_view_only_inlines(self): """ User with change permission to a section but view-only for inlines. """ self.viewuser.user_permissions.add(get_perm(Section, get_permission_codename('change', Section._meta))) self.client.force_login(self.viewuser) # GET shows inlines. response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,))) self.assertEqual(len(response.context['inline_admin_formsets']), 1) formset = response.context['inline_admin_formsets'][0] self.assertEqual(len(formset.forms), 3) # Valid POST changes the name. data = { 'name': 'Can edit name with view-only inlines', 'article_set-TOTAL_FORMS': 3, 'article_set-INITIAL_FORMS': 3 } response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data) self.assertRedirects(response, reverse('admin:admin_views_section_changelist')) self.assertEqual(Section.objects.get(pk=self.s1.pk).name, data['name']) # Invalid POST reshows inlines. del data['name'] response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.context['inline_admin_formsets']), 1) formset = response.context['inline_admin_formsets'][0] self.assertEqual(len(formset.forms), 3) def test_change_view_with_view_and_add_inlines(self): """User has view and add permissions on the inline model.""" self.viewuser.user_permissions.add(get_perm(Section, get_permission_codename('change', Section._meta))) self.viewuser.user_permissions.add(get_perm(Article, get_permission_codename('add', Article._meta))) self.client.force_login(self.viewuser) # GET shows inlines. response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,))) self.assertEqual(len(response.context['inline_admin_formsets']), 1) formset = response.context['inline_admin_formsets'][0] self.assertEqual(len(formset.forms), 6) # Valid POST creates a new article. data = { 'name': 'Can edit name with view-only inlines', 'article_set-TOTAL_FORMS': 6, 'article_set-INITIAL_FORMS': 3, 'article_set-3-id': [''], 'article_set-3-title': ['A title'], 'article_set-3-content': ['Added content'], 'article_set-3-date_0': ['2008-3-18'], 'article_set-3-date_1': ['11:54:58'], 'article_set-3-section': [str(self.s1.pk)], } response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data) self.assertRedirects(response, reverse('admin:admin_views_section_changelist')) self.assertEqual(Section.objects.get(pk=self.s1.pk).name, data['name']) self.assertEqual(Article.objects.count(), 4) # Invalid POST reshows inlines. del data['name'] response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.context['inline_admin_formsets']), 1) formset = response.context['inline_admin_formsets'][0] self.assertEqual(len(formset.forms), 6) def test_change_view_with_view_and_delete_inlines(self): """User has view and delete permissions on the inline model.""" self.viewuser.user_permissions.add(get_perm(Section, get_permission_codename('change', Section._meta))) self.client.force_login(self.viewuser) data = { 'name': 'Name is required.', 'article_set-TOTAL_FORMS': 6, 'article_set-INITIAL_FORMS': 3, 'article_set-0-id': [str(self.a1.pk)], 'article_set-0-DELETE': ['on'], } # Inline POST details are ignored without delete permission. response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data) self.assertRedirects(response, reverse('admin:admin_views_section_changelist')) self.assertEqual(Article.objects.count(), 3) # Deletion successful when delete permission is added. self.viewuser.user_permissions.add(get_perm(Article, get_permission_codename('delete', Article._meta))) data = { 'name': 'Name is required.', 'article_set-TOTAL_FORMS': 6, 'article_set-INITIAL_FORMS': 3, 'article_set-0-id': [str(self.a1.pk)], 'article_set-0-DELETE': ['on'], } response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data) self.assertRedirects(response, reverse('admin:admin_views_section_changelist')) self.assertEqual(Article.objects.count(), 2) def test_delete_view(self): """Delete view should restrict access and actually delete items.""" delete_dict = {'post': 'yes'} delete_url = reverse('admin:admin_views_article_delete', args=(self.a1.pk,)) # add user should not be able to delete articles self.client.force_login(self.adduser) response = self.client.get(delete_url) self.assertEqual(response.status_code, 403) post = self.client.post(delete_url, delete_dict) self.assertEqual(post.status_code, 403) self.assertEqual(Article.objects.count(), 3) self.client.logout() # view user should not be able to delete articles self.client.force_login(self.viewuser) response = self.client.get(delete_url) self.assertEqual(response.status_code, 403) post = self.client.post(delete_url, delete_dict) self.assertEqual(post.status_code, 403) self.assertEqual(Article.objects.count(), 3) self.client.logout() # Delete user can delete self.client.force_login(self.deleteuser) response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,))) self.assertContains(response, "<h2>Summary</h2>") self.assertContains(response, "<li>Articles: 3</li>") # test response contains link to related Article self.assertContains(response, "admin_views/article/%s/" % self.a1.pk) response = self.client.get(delete_url) self.assertContains(response, "admin_views/article/%s/" % self.a1.pk) self.assertContains(response, "<h2>Summary</h2>") self.assertContains(response, "<li>Articles: 1</li>") post = self.client.post(delete_url, delete_dict) self.assertRedirects(post, self.index_url) self.assertEqual(Article.objects.count(), 2) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].subject, 'Greetings from a deleted object') article_ct = ContentType.objects.get_for_model(Article) logged = LogEntry.objects.get(content_type=article_ct, action_flag=DELETION) self.assertEqual(logged.object_id, str(self.a1.pk)) def test_delete_view_with_no_default_permissions(self): """ The delete view allows users to delete collected objects without a 'delete' permission (ReadOnlyPizza.Meta.default_permissions is empty). """ pizza = ReadOnlyPizza.objects.create(name='Double Cheese') delete_url = reverse('admin:admin_views_readonlypizza_delete', args=(pizza.pk,)) self.client.force_login(self.adduser) response = self.client.get(delete_url) self.assertContains(response, 'admin_views/readonlypizza/%s/' % pizza.pk) self.assertContains(response, '<h2>Summary</h2>') self.assertContains(response, '<li>Read only pizzas: 1</li>') post = self.client.post(delete_url, {'post': 'yes'}) self.assertRedirects(post, reverse('admin:admin_views_readonlypizza_changelist')) self.assertEqual(ReadOnlyPizza.objects.count(), 0) def test_delete_view_nonexistent_obj(self): self.client.force_login(self.deleteuser) url = reverse('admin:admin_views_article_delete', args=('nonexistent',)) response = self.client.get(url, follow=True) self.assertRedirects(response, reverse('admin:index')) self.assertEqual( [m.message for m in response.context['messages']], ['article with ID “nonexistent” doesn’t exist. Perhaps it was deleted?'] ) def test_history_view(self): """History view should restrict access.""" # add user should not be able to view the list of article or change any of them self.client.force_login(self.adduser) response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,))) self.assertEqual(response.status_code, 403) self.client.get(reverse('admin:logout')) # view user can view all items self.client.force_login(self.viewuser) response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,))) self.assertEqual(response.status_code, 200) self.client.get(reverse('admin:logout')) # change user can view all items and edit them self.client.force_login(self.changeuser) response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,))) self.assertEqual(response.status_code, 200) # Test redirection when using row-level change permissions. Refs #11513. rl1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id") rl2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id") logins = [self.superuser, self.viewuser, self.adduser, self.changeuser, self.deleteuser] for login_user in logins: with self.subTest(login_user.username): self.client.force_login(login_user) url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl1.pk,)) response = self.client.get(url) self.assertEqual(response.status_code, 403) url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl2.pk,)) response = self.client.get(url) self.assertEqual(response.status_code, 200) self.client.get(reverse('admin:logout')) for login_user in [self.joepublicuser, self.nostaffuser]: with self.subTest(login_user.username): self.client.force_login(login_user) url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl1.pk,)) response = self.client.get(url, follow=True) self.assertContains(response, 'login-form') url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl2.pk,)) response = self.client.get(url, follow=True) self.assertContains(response, 'login-form') self.client.get(reverse('admin:logout')) def test_history_view_bad_url(self): self.client.force_login(self.changeuser) response = self.client.get(reverse('admin:admin_views_article_history', args=('foo',)), follow=True) self.assertRedirects(response, reverse('admin:index')) self.assertEqual( [m.message for m in response.context['messages']], ['article with ID “foo” doesn’t exist. Perhaps it was deleted?'] ) def test_conditionally_show_add_section_link(self): """ The foreign key widget should only show the "add related" button if the user has permission to add that related item. """ self.client.force_login(self.adduser) # The user can't add sections yet, so they shouldn't see the "add section" link. url = reverse('admin:admin_views_article_add') add_link_text = 'add_id_section' response = self.client.get(url) self.assertNotContains(response, add_link_text) # Allow the user to add sections too. Now they can see the "add section" link. user = User.objects.get(username='adduser') perm = get_perm(Section, get_permission_codename('add', Section._meta)) user.user_permissions.add(perm) response = self.client.get(url) self.assertContains(response, add_link_text) def test_conditionally_show_change_section_link(self): """ The foreign key widget should only show the "change related" button if the user has permission to change that related item. """ def get_change_related(response): return response.context['adminform'].form.fields['section'].widget.can_change_related self.client.force_login(self.adduser) # The user can't change sections yet, so they shouldn't see the "change section" link. url = reverse('admin:admin_views_article_add') change_link_text = 'change_id_section' response = self.client.get(url) self.assertFalse(get_change_related(response)) self.assertNotContains(response, change_link_text) # Allow the user to change sections too. Now they can see the "change section" link. user = User.objects.get(username='adduser') perm = get_perm(Section, get_permission_codename('change', Section._meta)) user.user_permissions.add(perm) response = self.client.get(url) self.assertTrue(get_change_related(response)) self.assertContains(response, change_link_text) def test_conditionally_show_delete_section_link(self): """ The foreign key widget should only show the "delete related" button if the user has permission to delete that related item. """ def get_delete_related(response): return response.context['adminform'].form.fields['sub_section'].widget.can_delete_related self.client.force_login(self.adduser) # The user can't delete sections yet, so they shouldn't see the "delete section" link. url = reverse('admin:admin_views_article_add') delete_link_text = 'delete_id_sub_section' response = self.client.get(url) self.assertFalse(get_delete_related(response)) self.assertNotContains(response, delete_link_text) # Allow the user to delete sections too. Now they can see the "delete section" link. user = User.objects.get(username='adduser') perm = get_perm(Section, get_permission_codename('delete', Section._meta)) user.user_permissions.add(perm) response = self.client.get(url) self.assertTrue(get_delete_related(response)) self.assertContains(response, delete_link_text) def test_disabled_permissions_when_logged_in(self): self.client.force_login(self.superuser) superuser = User.objects.get(username='super') superuser.is_active = False superuser.save() response = self.client.get(self.index_url, follow=True) self.assertContains(response, 'id="login-form"') self.assertNotContains(response, 'Log out') response = self.client.get(reverse('secure_view'), follow=True) self.assertContains(response, 'id="login-form"') def test_disabled_staff_permissions_when_logged_in(self): self.client.force_login(self.superuser) superuser = User.objects.get(username='super') superuser.is_staff = False superuser.save() response = self.client.get(self.index_url, follow=True) self.assertContains(response, 'id="login-form"') self.assertNotContains(response, 'Log out') response = self.client.get(reverse('secure_view'), follow=True) self.assertContains(response, 'id="login-form"') def test_app_list_permissions(self): """ If a user has no module perms, the app list returns a 404. """ opts = Article._meta change_user = User.objects.get(username='changeuser') permission = get_perm(Article, get_permission_codename('change', opts)) self.client.force_login(self.changeuser) # the user has no module permissions change_user.user_permissions.remove(permission) response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertEqual(response.status_code, 404) # the user now has module permissions change_user.user_permissions.add(permission) response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertEqual(response.status_code, 200) def test_shortcut_view_only_available_to_staff(self): """ Only admin users should be able to use the admin shortcut view. """ model_ctype = ContentType.objects.get_for_model(ModelWithStringPrimaryKey) obj = ModelWithStringPrimaryKey.objects.create(string_pk='foo') shortcut_url = reverse('admin:view_on_site', args=(model_ctype.pk, obj.pk)) # Not logged in: we should see the login page. response = self.client.get(shortcut_url, follow=True) self.assertTemplateUsed(response, 'admin/login.html') # Logged in? Redirect. self.client.force_login(self.superuser) response = self.client.get(shortcut_url, follow=False) # Can't use self.assertRedirects() because User.get_absolute_url() is silly. self.assertEqual(response.status_code, 302) # Domain may depend on contrib.sites tests also run self.assertRegex(response.url, 'http://(testserver|example.com)/dummy/foo/') def test_has_module_permission(self): """ has_module_permission() returns True for all users who have any permission for that module (add, change, or delete), so that the module is displayed on the admin index page. """ self.client.force_login(self.superuser) response = self.client.get(self.index_url) self.assertContains(response, 'admin_views') self.assertContains(response, 'Articles') self.client.logout() self.client.force_login(self.viewuser) response = self.client.get(self.index_url) self.assertContains(response, 'admin_views') self.assertContains(response, 'Articles') self.client.logout() self.client.force_login(self.adduser) response = self.client.get(self.index_url) self.assertContains(response, 'admin_views') self.assertContains(response, 'Articles') self.client.logout() self.client.force_login(self.changeuser) response = self.client.get(self.index_url) self.assertContains(response, 'admin_views') self.assertContains(response, 'Articles') self.client.logout() self.client.force_login(self.deleteuser) response = self.client.get(self.index_url) self.assertContains(response, 'admin_views') self.assertContains(response, 'Articles') def test_overriding_has_module_permission(self): """ If has_module_permission() always returns False, the module shouldn't be displayed on the admin index page for any users. """ articles = Article._meta.verbose_name_plural.title() sections = Section._meta.verbose_name_plural.title() index_url = reverse('admin7:index') self.client.force_login(self.superuser) response = self.client.get(index_url) self.assertContains(response, sections) self.assertNotContains(response, articles) self.client.logout() self.client.force_login(self.viewuser) response = self.client.get(index_url) self.assertNotContains(response, 'admin_views') self.assertNotContains(response, articles) self.client.logout() self.client.force_login(self.adduser) response = self.client.get(index_url) self.assertNotContains(response, 'admin_views') self.assertNotContains(response, articles) self.client.logout() self.client.force_login(self.changeuser) response = self.client.get(index_url) self.assertNotContains(response, 'admin_views') self.assertNotContains(response, articles) self.client.logout() self.client.force_login(self.deleteuser) response = self.client.get(index_url) self.assertNotContains(response, articles) # The app list displays Sections but not Articles as the latter has # ModelAdmin.has_module_permission() = False. self.client.force_login(self.superuser) response = self.client.get(reverse('admin7:app_list', args=('admin_views',))) self.assertContains(response, sections) self.assertNotContains(response, articles) def test_post_save_message_no_forbidden_links_visible(self): """ Post-save message shouldn't contain a link to the change form if the user doesn't have the change permission. """ self.client.force_login(self.adduser) # Emulate Article creation for user with add-only permission. post_data = { "title": "Fun & games", "content": "Some content", "date_0": "2015-10-31", "date_1": "16:35:00", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_article_add'), post_data, follow=True) self.assertContains( response, '<li class="success">The article “Fun &amp; games” was added successfully.</li>', html=True ) @override_settings( ROOT_URLCONF='admin_views.urls', TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }], ) class AdminViewProxyModelPermissionsTests(TestCase): """Tests for proxy models permissions in the admin.""" @classmethod def setUpTestData(cls): cls.viewuser = User.objects.create_user(username='viewuser', password='secret', is_staff=True) cls.adduser = User.objects.create_user(username='adduser', password='secret', is_staff=True) cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True) cls.deleteuser = User.objects.create_user(username='deleteuser', password='secret', is_staff=True) # Setup permissions. opts = UserProxy._meta cls.viewuser.user_permissions.add(get_perm(UserProxy, get_permission_codename('view', opts))) cls.adduser.user_permissions.add(get_perm(UserProxy, get_permission_codename('add', opts))) cls.changeuser.user_permissions.add(get_perm(UserProxy, get_permission_codename('change', opts))) cls.deleteuser.user_permissions.add(get_perm(UserProxy, get_permission_codename('delete', opts))) # UserProxy instances. cls.user_proxy = UserProxy.objects.create(username='user_proxy', password='secret') def test_add(self): self.client.force_login(self.adduser) url = reverse('admin:admin_views_userproxy_add') data = { 'username': 'can_add', 'password': 'secret', 'date_joined_0': '2019-01-15', 'date_joined_1': '16:59:10', } response = self.client.post(url, data, follow=True) self.assertEqual(response.status_code, 200) self.assertTrue(UserProxy.objects.filter(username='can_add').exists()) def test_view(self): self.client.force_login(self.viewuser) response = self.client.get(reverse('admin:admin_views_userproxy_changelist')) self.assertContains(response, '<h1>Select user proxy to view</h1>') response = self.client.get(reverse('admin:admin_views_userproxy_change', args=(self.user_proxy.pk,))) self.assertContains(response, '<h1>View user proxy</h1>') self.assertContains(response, '<div class="readonly">user_proxy</div>') def test_change(self): self.client.force_login(self.changeuser) data = { 'password': self.user_proxy.password, 'username': self.user_proxy.username, 'date_joined_0': self.user_proxy.date_joined.strftime('%Y-%m-%d'), 'date_joined_1': self.user_proxy.date_joined.strftime('%H:%M:%S'), 'first_name': 'first_name', } url = reverse('admin:admin_views_userproxy_change', args=(self.user_proxy.pk,)) response = self.client.post(url, data) self.assertRedirects(response, reverse('admin:admin_views_userproxy_changelist')) self.assertEqual(UserProxy.objects.get(pk=self.user_proxy.pk).first_name, 'first_name') def test_delete(self): self.client.force_login(self.deleteuser) url = reverse('admin:admin_views_userproxy_delete', args=(self.user_proxy.pk,)) response = self.client.post(url, {'post': 'yes'}, follow=True) self.assertEqual(response.status_code, 200) self.assertFalse(UserProxy.objects.filter(pk=self.user_proxy.pk).exists()) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewsNoUrlTest(TestCase): """Regression test for #17333""" @classmethod def setUpTestData(cls): # User who can change Reports cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True) cls.changeuser.user_permissions.add(get_perm(Report, get_permission_codename('change', Report._meta))) def test_no_standard_modeladmin_urls(self): """Admin index views don't break when user's ModelAdmin removes standard urls""" self.client.force_login(self.changeuser) r = self.client.get(reverse('admin:index')) # we shouldn't get a 500 error caused by a NoReverseMatch self.assertEqual(r.status_code, 200) self.client.get(reverse('admin:logout')) @skipUnlessDBFeature('can_defer_constraint_checks') @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewDeletedObjectsTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.deleteuser = User.objects.create_user(username='deleteuser', password='secret', is_staff=True) cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') cls.v1 = Villain.objects.create(name='Adam') cls.v2 = Villain.objects.create(name='Sue') cls.sv1 = SuperVillain.objects.create(name='Bob') cls.pl1 = Plot.objects.create(name='World Domination', team_leader=cls.v1, contact=cls.v2) cls.pl2 = Plot.objects.create(name='World Peace', team_leader=cls.v2, contact=cls.v2) cls.pl3 = Plot.objects.create(name='Corn Conspiracy', team_leader=cls.v1, contact=cls.v1) cls.pd1 = PlotDetails.objects.create(details='almost finished', plot=cls.pl1) cls.sh1 = SecretHideout.objects.create(location='underground bunker', villain=cls.v1) cls.sh2 = SecretHideout.objects.create(location='floating castle', villain=cls.sv1) cls.ssh1 = SuperSecretHideout.objects.create(location='super floating castle!', supervillain=cls.sv1) cls.cy1 = CyclicOne.objects.create(name='I am recursive', two_id=1) cls.cy2 = CyclicTwo.objects.create(name='I am recursive too', one_id=1) def setUp(self): self.client.force_login(self.superuser) def test_nesting(self): """ Objects should be nested to display the relationships that cause them to be scheduled for deletion. """ pattern = re.compile( r'<li>Plot: <a href="%s">World Domination</a>\s*<ul>\s*' r'<li>Plot details: <a href="%s">almost finished</a>' % ( reverse('admin:admin_views_plot_change', args=(self.pl1.pk,)), reverse('admin:admin_views_plotdetails_change', args=(self.pd1.pk,)), ) ) response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,))) self.assertRegex(response.content.decode(), pattern) def test_cyclic(self): """ Cyclic relationships should still cause each object to only be listed once. """ one = '<li>Cyclic one: <a href="%s">I am recursive</a>' % ( reverse('admin:admin_views_cyclicone_change', args=(self.cy1.pk,)), ) two = '<li>Cyclic two: <a href="%s">I am recursive too</a>' % ( reverse('admin:admin_views_cyclictwo_change', args=(self.cy2.pk,)), ) response = self.client.get(reverse('admin:admin_views_cyclicone_delete', args=(self.cy1.pk,))) self.assertContains(response, one, 1) self.assertContains(response, two, 1) def test_perms_needed(self): self.client.logout() delete_user = User.objects.get(username='deleteuser') delete_user.user_permissions.add(get_perm(Plot, get_permission_codename('delete', Plot._meta))) self.client.force_login(self.deleteuser) response = self.client.get(reverse('admin:admin_views_plot_delete', args=(self.pl1.pk,))) self.assertContains(response, "your account doesn't have permission to delete the following types of objects") self.assertContains(response, "<li>plot details</li>") def test_protected(self): q = Question.objects.create(question="Why?") a1 = Answer.objects.create(question=q, answer="Because.") a2 = Answer.objects.create(question=q, answer="Yes.") response = self.client.get(reverse('admin:admin_views_question_delete', args=(q.pk,))) self.assertContains(response, "would require deleting the following protected related objects") self.assertContains( response, '<li>Answer: <a href="%s">Because.</a></li>' % reverse('admin:admin_views_answer_change', args=(a1.pk,)) ) self.assertContains( response, '<li>Answer: <a href="%s">Yes.</a></li>' % reverse('admin:admin_views_answer_change', args=(a2.pk,)) ) def test_post_delete_protected(self): """ A POST request to delete protected objects should display the page which says the deletion is prohibited. """ q = Question.objects.create(question='Why?') Answer.objects.create(question=q, answer='Because.') response = self.client.post(reverse('admin:admin_views_question_delete', args=(q.pk,)), {'post': 'yes'}) self.assertEqual(Question.objects.count(), 1) self.assertContains(response, "would require deleting the following protected related objects") def test_restricted(self): album = Album.objects.create(title='Amaryllis') song = Song.objects.create(album=album, name='Unity') response = self.client.get(reverse('admin:admin_views_album_delete', args=(album.pk,))) self.assertContains( response, 'would require deleting the following protected related objects', ) self.assertContains( response, '<li>Song: <a href="%s">Unity</a></li>' % reverse('admin:admin_views_song_change', args=(song.pk,)) ) def test_post_delete_restricted(self): album = Album.objects.create(title='Amaryllis') Song.objects.create(album=album, name='Unity') response = self.client.post( reverse('admin:admin_views_album_delete', args=(album.pk,)), {'post': 'yes'}, ) self.assertEqual(Album.objects.count(), 1) self.assertContains( response, 'would require deleting the following protected related objects', ) def test_not_registered(self): should_contain = """<li>Secret hideout: underground bunker""" response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,))) self.assertContains(response, should_contain, 1) def test_multiple_fkeys_to_same_model(self): """ If a deleted object has two relationships from another model, both of those should be followed in looking for related objects to delete. """ should_contain = '<li>Plot: <a href="%s">World Domination</a>' % reverse( 'admin:admin_views_plot_change', args=(self.pl1.pk,) ) response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,))) self.assertContains(response, should_contain) response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v2.pk,))) self.assertContains(response, should_contain) def test_multiple_fkeys_to_same_instance(self): """ If a deleted object has two relationships pointing to it from another object, the other object should still only be listed once. """ should_contain = '<li>Plot: <a href="%s">World Peace</a></li>' % reverse( 'admin:admin_views_plot_change', args=(self.pl2.pk,) ) response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v2.pk,))) self.assertContains(response, should_contain, 1) def test_inheritance(self): """ In the case of an inherited model, if either the child or parent-model instance is deleted, both instances are listed for deletion, as well as any relationships they have. """ should_contain = [ '<li>Villain: <a href="%s">Bob</a>' % reverse('admin:admin_views_villain_change', args=(self.sv1.pk,)), '<li>Super villain: <a href="%s">Bob</a>' % reverse( 'admin:admin_views_supervillain_change', args=(self.sv1.pk,) ), '<li>Secret hideout: floating castle', '<li>Super secret hideout: super floating castle!', ] response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.sv1.pk,))) for should in should_contain: self.assertContains(response, should, 1) response = self.client.get(reverse('admin:admin_views_supervillain_delete', args=(self.sv1.pk,))) for should in should_contain: self.assertContains(response, should, 1) def test_generic_relations(self): """ If a deleted object has GenericForeignKeys pointing to it, those objects should be listed for deletion. """ plot = self.pl3 tag = FunkyTag.objects.create(content_object=plot, name='hott') should_contain = '<li>Funky tag: <a href="%s">hott' % reverse( 'admin:admin_views_funkytag_change', args=(tag.id,)) response = self.client.get(reverse('admin:admin_views_plot_delete', args=(plot.pk,))) self.assertContains(response, should_contain) def test_generic_relations_with_related_query_name(self): """ If a deleted object has GenericForeignKey with GenericRelation(related_query_name='...') pointing to it, those objects should be listed for deletion. """ bookmark = Bookmark.objects.create(name='djangoproject') tag = FunkyTag.objects.create(content_object=bookmark, name='django') tag_url = reverse('admin:admin_views_funkytag_change', args=(tag.id,)) should_contain = '<li>Funky tag: <a href="%s">django' % tag_url response = self.client.get(reverse('admin:admin_views_bookmark_delete', args=(bookmark.pk,))) self.assertContains(response, should_contain) def test_delete_view_uses_get_deleted_objects(self): """The delete view uses ModelAdmin.get_deleted_objects().""" book = Book.objects.create(name='Test Book') response = self.client.get(reverse('admin2:admin_views_book_delete', args=(book.pk,))) # BookAdmin.get_deleted_objects() returns custom text. self.assertContains(response, 'a deletable object') @override_settings(ROOT_URLCONF='admin_views.urls') class TestGenericRelations(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.v1 = Villain.objects.create(name='Adam') cls.pl3 = Plot.objects.create(name='Corn Conspiracy', team_leader=cls.v1, contact=cls.v1) def setUp(self): self.client.force_login(self.superuser) def test_generic_content_object_in_list_display(self): FunkyTag.objects.create(content_object=self.pl3, name='hott') response = self.client.get(reverse('admin:admin_views_funkytag_changelist')) self.assertContains(response, "%s</td>" % self.pl3) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewStringPrimaryKeyTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') cls.pk = ( "abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ 1234567890 " r"""-_.!~*'() ;/?:@&=+$, <>#%" {}|\^[]`""" ) cls.m1 = ModelWithStringPrimaryKey.objects.create(string_pk=cls.pk) content_type_pk = ContentType.objects.get_for_model(ModelWithStringPrimaryKey).pk user_pk = cls.superuser.pk LogEntry.objects.log_action(user_pk, content_type_pk, cls.pk, cls.pk, 2, change_message='Changed something') def setUp(self): self.client.force_login(self.superuser) def test_get_history_view(self): """ Retrieving the history for an object using urlencoded form of primary key should work. Refs #12349, #18550. """ response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_history', args=(self.pk,))) self.assertContains(response, escape(self.pk)) self.assertContains(response, 'Changed something') def test_get_change_view(self): "Retrieving the object using urlencoded form of primary key should work" response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_change', args=(self.pk,))) self.assertContains(response, escape(self.pk)) def test_changelist_to_changeform_link(self): "Link to the changeform of the object in changelist should use reverse() and be quoted -- #18072" response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_changelist')) # this URL now comes through reverse(), thus url quoting and iri_to_uri encoding pk_final_url = escape(iri_to_uri(quote(self.pk))) change_url = reverse( 'admin:admin_views_modelwithstringprimarykey_change', args=('__fk__',) ).replace('__fk__', pk_final_url) should_contain = '<th class="field-__str__"><a href="%s">%s</a></th>' % (change_url, escape(self.pk)) self.assertContains(response, should_contain) def test_recentactions_link(self): "The link from the recent actions list referring to the changeform of the object should be quoted" response = self.client.get(reverse('admin:index')) link = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(self.pk),)) should_contain = """<a href="%s">%s</a>""" % (escape(link), escape(self.pk)) self.assertContains(response, should_contain) def test_deleteconfirmation_link(self): "The link from the delete confirmation page referring back to the changeform of the object should be quoted" url = reverse('admin:admin_views_modelwithstringprimarykey_delete', args=(quote(self.pk),)) response = self.client.get(url) # this URL now comes through reverse(), thus url quoting and iri_to_uri encoding change_url = reverse( 'admin:admin_views_modelwithstringprimarykey_change', args=('__fk__',) ).replace('__fk__', escape(iri_to_uri(quote(self.pk)))) should_contain = '<a href="%s">%s</a>' % (change_url, escape(self.pk)) self.assertContains(response, should_contain) def test_url_conflicts_with_add(self): "A model with a primary key that ends with add or is `add` should be visible" add_model = ModelWithStringPrimaryKey.objects.create(pk="i have something to add") add_model.save() response = self.client.get( reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(add_model.pk),)) ) should_contain = """<h1>Change model with string primary key</h1>""" self.assertContains(response, should_contain) add_model2 = ModelWithStringPrimaryKey.objects.create(pk="add") add_url = reverse('admin:admin_views_modelwithstringprimarykey_add') change_url = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(add_model2.pk),)) self.assertNotEqual(add_url, change_url) def test_url_conflicts_with_delete(self): "A model with a primary key that ends with delete should be visible" delete_model = ModelWithStringPrimaryKey(pk="delete") delete_model.save() response = self.client.get( reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(delete_model.pk),)) ) should_contain = """<h1>Change model with string primary key</h1>""" self.assertContains(response, should_contain) def test_url_conflicts_with_history(self): "A model with a primary key that ends with history should be visible" history_model = ModelWithStringPrimaryKey(pk="history") history_model.save() response = self.client.get( reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(history_model.pk),)) ) should_contain = """<h1>Change model with string primary key</h1>""" self.assertContains(response, should_contain) def test_shortcut_view_with_escaping(self): "'View on site should' work properly with char fields" model = ModelWithStringPrimaryKey(pk='abc_123') model.save() response = self.client.get( reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(model.pk),)) ) should_contain = '/%s/" class="viewsitelink">' % model.pk self.assertContains(response, should_contain) def test_change_view_history_link(self): """Object history button link should work and contain the pk value quoted.""" url = reverse( 'admin:%s_modelwithstringprimarykey_change' % ModelWithStringPrimaryKey._meta.app_label, args=(quote(self.pk),) ) response = self.client.get(url) self.assertEqual(response.status_code, 200) expected_link = reverse( 'admin:%s_modelwithstringprimarykey_history' % ModelWithStringPrimaryKey._meta.app_label, args=(quote(self.pk),) ) self.assertContains(response, '<a href="%s" class="historylink"' % escape(expected_link)) def test_redirect_on_add_view_continue_button(self): """As soon as an object is added using "Save and continue editing" button, the user should be redirected to the object's change_view. In case primary key is a string containing some special characters like slash or underscore, these characters must be escaped (see #22266) """ response = self.client.post( reverse('admin:admin_views_modelwithstringprimarykey_add'), { 'string_pk': '123/history', "_continue": "1", # Save and continue editing } ) self.assertEqual(response.status_code, 302) # temporary redirect self.assertIn('/123_2Fhistory/', response.headers['location']) # PK is quoted @override_settings(ROOT_URLCONF='admin_views.urls') class SecureViewTests(TestCase): """ Test behavior of a view protected by the staff_member_required decorator. """ def test_secure_view_shows_login_if_not_logged_in(self): secure_url = reverse('secure_view') response = self.client.get(secure_url) self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), secure_url)) response = self.client.get(secure_url, follow=True) self.assertTemplateUsed(response, 'admin/login.html') self.assertEqual(response.context[REDIRECT_FIELD_NAME], secure_url) def test_staff_member_required_decorator_works_with_argument(self): """ Staff_member_required decorator works with an argument (redirect_field_name). """ secure_url = '/test_admin/admin/secure-view2/' response = self.client.get(secure_url) self.assertRedirects(response, '%s?myfield=%s' % (reverse('admin:login'), secure_url)) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewUnicodeTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.b1 = Book.objects.create(name='Lærdommer') cls.p1 = Promo.objects.create(name='<Promo for Lærdommer>', book=cls.b1) cls.chap1 = Chapter.objects.create( title='Norske bostaver æøå skaper problemer', content='<p>Svært frustrerende med UnicodeDecodeErro</p>', book=cls.b1 ) cls.chap2 = Chapter.objects.create( title='Kjærlighet', content='<p>La kjærligheten til de lidende seire.</p>', book=cls.b1) cls.chap3 = Chapter.objects.create(title='Kjærlighet', content='<p>Noe innhold</p>', book=cls.b1) cls.chap4 = ChapterXtra1.objects.create(chap=cls.chap1, xtra='<Xtra(1) Norske bostaver æøå skaper problemer>') cls.chap5 = ChapterXtra1.objects.create(chap=cls.chap2, xtra='<Xtra(1) Kjærlighet>') cls.chap6 = ChapterXtra1.objects.create(chap=cls.chap3, xtra='<Xtra(1) Kjærlighet>') cls.chap7 = ChapterXtra2.objects.create(chap=cls.chap1, xtra='<Xtra(2) Norske bostaver æøå skaper problemer>') cls.chap8 = ChapterXtra2.objects.create(chap=cls.chap2, xtra='<Xtra(2) Kjærlighet>') cls.chap9 = ChapterXtra2.objects.create(chap=cls.chap3, xtra='<Xtra(2) Kjærlighet>') def setUp(self): self.client.force_login(self.superuser) def test_unicode_edit(self): """ A test to ensure that POST on edit_view handles non-ASCII characters. """ post_data = { "name": "Test lærdommer", # inline data "chapter_set-TOTAL_FORMS": "6", "chapter_set-INITIAL_FORMS": "3", "chapter_set-MAX_NUM_FORMS": "0", "chapter_set-0-id": self.chap1.pk, "chapter_set-0-title": "Norske bostaver æøå skaper problemer", "chapter_set-0-content": "&lt;p&gt;Svært frustrerende med UnicodeDecodeError&lt;/p&gt;", "chapter_set-1-id": self.chap2.id, "chapter_set-1-title": "Kjærlighet.", "chapter_set-1-content": "&lt;p&gt;La kjærligheten til de lidende seire.&lt;/p&gt;", "chapter_set-2-id": self.chap3.id, "chapter_set-2-title": "Need a title.", "chapter_set-2-content": "&lt;p&gt;Newest content&lt;/p&gt;", "chapter_set-3-id": "", "chapter_set-3-title": "", "chapter_set-3-content": "", "chapter_set-4-id": "", "chapter_set-4-title": "", "chapter_set-4-content": "", "chapter_set-5-id": "", "chapter_set-5-title": "", "chapter_set-5-content": "", } response = self.client.post(reverse('admin:admin_views_book_change', args=(self.b1.pk,)), post_data) self.assertEqual(response.status_code, 302) # redirect somewhere def test_unicode_delete(self): """ The delete_view handles non-ASCII characters """ delete_dict = {'post': 'yes'} delete_url = reverse('admin:admin_views_book_delete', args=(self.b1.pk,)) response = self.client.get(delete_url) self.assertEqual(response.status_code, 200) response = self.client.post(delete_url, delete_dict) self.assertRedirects(response, reverse('admin:admin_views_book_changelist')) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewListEditable(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True) cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False) cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True) def setUp(self): self.client.force_login(self.superuser) def test_inheritance(self): Podcast.objects.create(name="This Week in Django", release_date=datetime.date.today()) response = self.client.get(reverse('admin:admin_views_podcast_changelist')) self.assertEqual(response.status_code, 200) def test_inheritance_2(self): Vodcast.objects.create(name="This Week in Django", released=True) response = self.client.get(reverse('admin:admin_views_vodcast_changelist')) self.assertEqual(response.status_code, 200) def test_custom_pk(self): Language.objects.create(iso='en', name='English', english_name='English') response = self.client.get(reverse('admin:admin_views_language_changelist')) self.assertEqual(response.status_code, 200) def test_changelist_input_html(self): response = self.client.get(reverse('admin:admin_views_person_changelist')) # 2 inputs per object(the field and the hidden id field) = 6 # 4 management hidden fields = 4 # 4 action inputs (3 regular checkboxes, 1 checkbox to select all) # main form submit button = 1 # search field and search submit button = 2 # CSRF field = 1 # field to track 'select all' across paginated views = 1 # 6 + 4 + 4 + 1 + 2 + 1 + 1 = 19 inputs self.assertContains(response, "<input", count=20) # 1 select per object = 3 selects self.assertContains(response, "<select", count=4) def test_post_messages(self): # Ticket 12707: Saving inline editable should not show admin # action warnings data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-gender": "1", "form-0-id": str(self.per1.pk), "form-1-gender": "2", "form-1-id": str(self.per2.pk), "form-2-alive": "checked", "form-2-gender": "1", "form-2-id": str(self.per3.pk), "_save": "Save", } response = self.client.post(reverse('admin:admin_views_person_changelist'), data, follow=True) self.assertEqual(len(response.context['messages']), 1) def test_post_submission(self): data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-gender": "1", "form-0-id": str(self.per1.pk), "form-1-gender": "2", "form-1-id": str(self.per2.pk), "form-2-alive": "checked", "form-2-gender": "1", "form-2-id": str(self.per3.pk), "_save": "Save", } self.client.post(reverse('admin:admin_views_person_changelist'), data) self.assertIs(Person.objects.get(name="John Mauchly").alive, False) self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2) # test a filtered page data = { "form-TOTAL_FORMS": "2", "form-INITIAL_FORMS": "2", "form-MAX_NUM_FORMS": "0", "form-0-id": str(self.per1.pk), "form-0-gender": "1", "form-0-alive": "checked", "form-1-id": str(self.per3.pk), "form-1-gender": "1", "form-1-alive": "checked", "_save": "Save", } self.client.post(reverse('admin:admin_views_person_changelist') + '?gender__exact=1', data) self.assertIs(Person.objects.get(name="John Mauchly").alive, True) # test a searched page data = { "form-TOTAL_FORMS": "1", "form-INITIAL_FORMS": "1", "form-MAX_NUM_FORMS": "0", "form-0-id": str(self.per1.pk), "form-0-gender": "1", "_save": "Save", } self.client.post(reverse('admin:admin_views_person_changelist') + '?q=john', data) self.assertIs(Person.objects.get(name="John Mauchly").alive, False) def test_non_field_errors(self): """ Non-field errors are displayed for each of the forms in the changelist's formset. """ fd1 = FoodDelivery.objects.create(reference='123', driver='bill', restaurant='thai') fd2 = FoodDelivery.objects.create(reference='456', driver='bill', restaurant='india') fd3 = FoodDelivery.objects.create(reference='789', driver='bill', restaurant='pizza') data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-id": str(fd1.id), "form-0-reference": "123", "form-0-driver": "bill", "form-0-restaurant": "thai", # Same data as above: Forbidden because of unique_together! "form-1-id": str(fd2.id), "form-1-reference": "456", "form-1-driver": "bill", "form-1-restaurant": "thai", "form-2-id": str(fd3.id), "form-2-reference": "789", "form-2-driver": "bill", "form-2-restaurant": "pizza", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_fooddelivery_changelist'), data) self.assertContains( response, '<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery ' 'with this Driver and Restaurant already exists.</li></ul></td></tr>', 1, html=True ) data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-id": str(fd1.id), "form-0-reference": "123", "form-0-driver": "bill", "form-0-restaurant": "thai", # Same data as above: Forbidden because of unique_together! "form-1-id": str(fd2.id), "form-1-reference": "456", "form-1-driver": "bill", "form-1-restaurant": "thai", # Same data also. "form-2-id": str(fd3.id), "form-2-reference": "789", "form-2-driver": "bill", "form-2-restaurant": "thai", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_fooddelivery_changelist'), data) self.assertContains( response, '<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery ' 'with this Driver and Restaurant already exists.</li></ul></td></tr>', 2, html=True ) def test_non_form_errors(self): # test if non-form errors are handled; ticket #12716 data = { "form-TOTAL_FORMS": "1", "form-INITIAL_FORMS": "1", "form-MAX_NUM_FORMS": "0", "form-0-id": str(self.per2.pk), "form-0-alive": "1", "form-0-gender": "2", # The form processing understands this as a list_editable "Save" # and not an action "Go". "_save": "Save", } response = self.client.post(reverse('admin:admin_views_person_changelist'), data) self.assertContains(response, "Grace is not a Zombie") def test_non_form_errors_is_errorlist(self): # test if non-form errors are correctly handled; ticket #12878 data = { "form-TOTAL_FORMS": "1", "form-INITIAL_FORMS": "1", "form-MAX_NUM_FORMS": "0", "form-0-id": str(self.per2.pk), "form-0-alive": "1", "form-0-gender": "2", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_person_changelist'), data) non_form_errors = response.context['cl'].formset.non_form_errors() self.assertIsInstance(non_form_errors, ErrorList) self.assertEqual(str(non_form_errors), str(ErrorList(["Grace is not a Zombie"]))) def test_list_editable_ordering(self): collector = Collector.objects.create(id=1, name="Frederick Clegg") Category.objects.create(id=1, order=1, collector=collector) Category.objects.create(id=2, order=2, collector=collector) Category.objects.create(id=3, order=0, collector=collector) Category.objects.create(id=4, order=0, collector=collector) # NB: The order values must be changed so that the items are reordered. data = { "form-TOTAL_FORMS": "4", "form-INITIAL_FORMS": "4", "form-MAX_NUM_FORMS": "0", "form-0-order": "14", "form-0-id": "1", "form-0-collector": "1", "form-1-order": "13", "form-1-id": "2", "form-1-collector": "1", "form-2-order": "1", "form-2-id": "3", "form-2-collector": "1", "form-3-order": "0", "form-3-id": "4", "form-3-collector": "1", # The form processing understands this as a list_editable "Save" # and not an action "Go". "_save": "Save", } response = self.client.post(reverse('admin:admin_views_category_changelist'), data) # Successful post will redirect self.assertEqual(response.status_code, 302) # The order values have been applied to the right objects self.assertEqual(Category.objects.get(id=1).order, 14) self.assertEqual(Category.objects.get(id=2).order, 13) self.assertEqual(Category.objects.get(id=3).order, 1) self.assertEqual(Category.objects.get(id=4).order, 0) def test_list_editable_pagination(self): """ Pagination works for list_editable items. """ UnorderedObject.objects.create(id=1, name='Unordered object #1') UnorderedObject.objects.create(id=2, name='Unordered object #2') UnorderedObject.objects.create(id=3, name='Unordered object #3') response = self.client.get(reverse('admin:admin_views_unorderedobject_changelist')) self.assertContains(response, 'Unordered object #3') self.assertContains(response, 'Unordered object #2') self.assertNotContains(response, 'Unordered object #1') response = self.client.get(reverse('admin:admin_views_unorderedobject_changelist') + '?p=2') self.assertNotContains(response, 'Unordered object #3') self.assertNotContains(response, 'Unordered object #2') self.assertContains(response, 'Unordered object #1') def test_list_editable_action_submit(self): # List editable changes should not be executed if the action "Go" button is # used to submit the form. data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-gender": "1", "form-0-id": "1", "form-1-gender": "2", "form-1-id": "2", "form-2-alive": "checked", "form-2-gender": "1", "form-2-id": "3", "index": "0", "_selected_action": ['3'], "action": ['', 'delete_selected'], } self.client.post(reverse('admin:admin_views_person_changelist'), data) self.assertIs(Person.objects.get(name="John Mauchly").alive, True) self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 1) def test_list_editable_action_choices(self): # List editable changes should be executed if the "Save" button is # used to submit the form - any action choices should be ignored. data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-gender": "1", "form-0-id": str(self.per1.pk), "form-1-gender": "2", "form-1-id": str(self.per2.pk), "form-2-alive": "checked", "form-2-gender": "1", "form-2-id": str(self.per3.pk), "_save": "Save", "_selected_action": ['1'], "action": ['', 'delete_selected'], } self.client.post(reverse('admin:admin_views_person_changelist'), data) self.assertIs(Person.objects.get(name="John Mauchly").alive, False) self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2) def test_list_editable_popup(self): """ Fields should not be list-editable in popups. """ response = self.client.get(reverse('admin:admin_views_person_changelist')) self.assertNotEqual(response.context['cl'].list_editable, ()) response = self.client.get(reverse('admin:admin_views_person_changelist') + '?%s' % IS_POPUP_VAR) self.assertEqual(response.context['cl'].list_editable, ()) def test_pk_hidden_fields(self): """ hidden pk fields aren't displayed in the table body and their corresponding human-readable value is displayed instead. The hidden pk fields are displayed but separately (not in the table) and only once. """ story1 = Story.objects.create(title='The adventures of Guido', content='Once upon a time in Djangoland...') story2 = Story.objects.create( title='Crouching Tiger, Hidden Python', content='The Python was sneaking into...', ) response = self.client.get(reverse('admin:admin_views_story_changelist')) # Only one hidden field, in a separate place than the table. self.assertContains(response, 'id="id_form-0-id"', 1) self.assertContains(response, 'id="id_form-1-id"', 1) self.assertContains( response, '<div class="hiddenfields">\n' '<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">' '<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id">\n</div>' % (story2.id, story1.id), html=True ) self.assertContains(response, '<td class="field-id">%d</td>' % story1.id, 1) self.assertContains(response, '<td class="field-id">%d</td>' % story2.id, 1) def test_pk_hidden_fields_with_list_display_links(self): """ Similarly as test_pk_hidden_fields, but when the hidden pk fields are referenced in list_display_links. Refs #12475. """ story1 = OtherStory.objects.create( title='The adventures of Guido', content='Once upon a time in Djangoland...', ) story2 = OtherStory.objects.create( title='Crouching Tiger, Hidden Python', content='The Python was sneaking into...', ) link1 = reverse('admin:admin_views_otherstory_change', args=(story1.pk,)) link2 = reverse('admin:admin_views_otherstory_change', args=(story2.pk,)) response = self.client.get(reverse('admin:admin_views_otherstory_changelist')) # Only one hidden field, in a separate place than the table. self.assertContains(response, 'id="id_form-0-id"', 1) self.assertContains(response, 'id="id_form-1-id"', 1) self.assertContains( response, '<div class="hiddenfields">\n' '<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">' '<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id">\n</div>' % (story2.id, story1.id), html=True ) self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link1, story1.id), 1) self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link2, story2.id), 1) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminSearchTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True) cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False) cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True) Person.objects.create(name='John Doe', gender=1) Person.objects.create(name='John O"Hara', gender=1) Person.objects.create(name="John O'Hara", gender=1) cls.t1 = Recommender.objects.create() cls.t2 = Recommendation.objects.create(the_recommender=cls.t1) cls.t3 = Recommender.objects.create() cls.t4 = Recommendation.objects.create(the_recommender=cls.t3) cls.tt1 = TitleTranslation.objects.create(title=cls.t1, text='Bar') cls.tt2 = TitleTranslation.objects.create(title=cls.t2, text='Foo') cls.tt3 = TitleTranslation.objects.create(title=cls.t3, text='Few') cls.tt4 = TitleTranslation.objects.create(title=cls.t4, text='Bas') def setUp(self): self.client.force_login(self.superuser) def test_search_on_sibling_models(self): "A search that mentions sibling models" response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar') # confirm the search returned 1 object self.assertContains(response, "\n1 recommendation\n") def test_with_fk_to_field(self): """ The to_field GET parameter is preserved when a search is performed. Refs #10918. """ response = self.client.get(reverse('admin:auth_user_changelist') + '?q=joe&%s=id' % TO_FIELD_VAR) self.assertContains(response, "\n1 user\n") self.assertContains(response, '<input type="hidden" name="%s" value="id">' % TO_FIELD_VAR, html=True) def test_exact_matches(self): response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar') # confirm the search returned one object self.assertContains(response, "\n1 recommendation\n") response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=ba') # confirm the search returned zero objects self.assertContains(response, "\n0 recommendations\n") def test_beginning_matches(self): response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=Gui') # confirm the search returned one object self.assertContains(response, "\n1 person\n") self.assertContains(response, "Guido") response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=uido') # confirm the search returned zero objects self.assertContains(response, "\n0 persons\n") self.assertNotContains(response, "Guido") def test_pluggable_search(self): PluggableSearchPerson.objects.create(name="Bob", age=10) PluggableSearchPerson.objects.create(name="Amy", age=20) response = self.client.get(reverse('admin:admin_views_pluggablesearchperson_changelist') + '?q=Bob') # confirm the search returned one object self.assertContains(response, "\n1 pluggable search person\n") self.assertContains(response, "Bob") response = self.client.get(reverse('admin:admin_views_pluggablesearchperson_changelist') + '?q=20') # confirm the search returned one object self.assertContains(response, "\n1 pluggable search person\n") self.assertContains(response, "Amy") def test_reset_link(self): """ Test presence of reset link in search bar ("1 result (_x total_)"). """ # 1 query for session + 1 for fetching user # + 1 for filtered result + 1 for filtered count # + 1 for total count with self.assertNumQueries(5): response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=Gui') self.assertContains( response, """<span class="small quiet">1 result (<a href="?">6 total</a>)</span>""", html=True ) def test_no_total_count(self): """ #8408 -- "Show all" should be displayed instead of the total count if ModelAdmin.show_full_result_count is False. """ # 1 query for session + 1 for fetching user # + 1 for filtered result + 1 for filtered count with self.assertNumQueries(4): response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar') self.assertContains( response, """<span class="small quiet">1 result (<a href="?">Show all</a>)</span>""", html=True ) self.assertTrue(response.context['cl'].show_admin_actions) def test_search_with_spaces(self): url = reverse('admin:admin_views_person_changelist') + '?q=%s' tests = [ ('"John Doe"', 1), ("'John Doe'", 1), ('John Doe', 0), ('"John Doe" John', 1), ("'John Doe' John", 1), ("John Doe John", 0), ('"John Do"', 1), ("'John Do'", 1), ("'John O\'Hara'", 0), ("'John O\\'Hara'", 1), ('"John O\"Hara"', 0), ('"John O\\"Hara"', 1), ] for search, hits in tests: with self.subTest(search=search): response = self.client.get(url % search) self.assertContains(response, '\n%s person' % hits) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminInheritedInlinesTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_inline(self): """ Inline models which inherit from a common parent are correctly handled. """ foo_user = "foo username" bar_user = "bar username" name_re = re.compile(b'name="(.*?)"') # test the add case response = self.client.get(reverse('admin:admin_views_persona_add')) names = name_re.findall(response.content) # make sure we have no duplicate HTML names self.assertEqual(len(names), len(set(names))) # test the add case post_data = { "name": "Test Name", # inline data "accounts-TOTAL_FORMS": "1", "accounts-INITIAL_FORMS": "0", "accounts-MAX_NUM_FORMS": "0", "accounts-0-username": foo_user, "accounts-2-TOTAL_FORMS": "1", "accounts-2-INITIAL_FORMS": "0", "accounts-2-MAX_NUM_FORMS": "0", "accounts-2-0-username": bar_user, } response = self.client.post(reverse('admin:admin_views_persona_add'), post_data) self.assertEqual(response.status_code, 302) # redirect somewhere self.assertEqual(Persona.objects.count(), 1) self.assertEqual(FooAccount.objects.count(), 1) self.assertEqual(BarAccount.objects.count(), 1) self.assertEqual(FooAccount.objects.all()[0].username, foo_user) self.assertEqual(BarAccount.objects.all()[0].username, bar_user) self.assertEqual(Persona.objects.all()[0].accounts.count(), 2) persona_id = Persona.objects.all()[0].id foo_id = FooAccount.objects.all()[0].id bar_id = BarAccount.objects.all()[0].id # test the edit case response = self.client.get(reverse('admin:admin_views_persona_change', args=(persona_id,))) names = name_re.findall(response.content) # make sure we have no duplicate HTML names self.assertEqual(len(names), len(set(names))) post_data = { "name": "Test Name", "accounts-TOTAL_FORMS": "2", "accounts-INITIAL_FORMS": "1", "accounts-MAX_NUM_FORMS": "0", "accounts-0-username": "%s-1" % foo_user, "accounts-0-account_ptr": str(foo_id), "accounts-0-persona": str(persona_id), "accounts-2-TOTAL_FORMS": "2", "accounts-2-INITIAL_FORMS": "1", "accounts-2-MAX_NUM_FORMS": "0", "accounts-2-0-username": "%s-1" % bar_user, "accounts-2-0-account_ptr": str(bar_id), "accounts-2-0-persona": str(persona_id), } response = self.client.post(reverse('admin:admin_views_persona_change', args=(persona_id,)), post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Persona.objects.count(), 1) self.assertEqual(FooAccount.objects.count(), 1) self.assertEqual(BarAccount.objects.count(), 1) self.assertEqual(FooAccount.objects.all()[0].username, "%s-1" % foo_user) self.assertEqual(BarAccount.objects.all()[0].username, "%s-1" % bar_user) self.assertEqual(Persona.objects.all()[0].accounts.count(), 2) @override_settings(ROOT_URLCONF='admin_views.urls') class TestCustomChangeList(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_custom_changelist(self): """ Validate that a custom ChangeList class can be used (#9749) """ # Insert some data post_data = {"name": "First Gadget"} response = self.client.post(reverse('admin:admin_views_gadget_add'), post_data) self.assertEqual(response.status_code, 302) # redirect somewhere # Hit the page once to get messages out of the queue message list response = self.client.get(reverse('admin:admin_views_gadget_changelist')) # Data is still not visible on the page response = self.client.get(reverse('admin:admin_views_gadget_changelist')) self.assertNotContains(response, 'First Gadget') @override_settings(ROOT_URLCONF='admin_views.urls') class TestInlineNotEditable(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_GET_parent_add(self): """ InlineModelAdmin broken? """ response = self.client.get(reverse('admin:admin_views_parent_add')) self.assertEqual(response.status_code, 200) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminCustomQuerysetTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.pks = [EmptyModel.objects.create().id for i in range(3)] def setUp(self): self.client.force_login(self.superuser) self.super_login = { REDIRECT_FIELD_NAME: reverse('admin:index'), 'username': 'super', 'password': 'secret', } def test_changelist_view(self): response = self.client.get(reverse('admin:admin_views_emptymodel_changelist')) for i in self.pks: if i > 1: self.assertContains(response, 'Primary key = %s' % i) else: self.assertNotContains(response, 'Primary key = %s' % i) def test_changelist_view_count_queries(self): # create 2 Person objects Person.objects.create(name='person1', gender=1) Person.objects.create(name='person2', gender=2) changelist_url = reverse('admin:admin_views_person_changelist') # 5 queries are expected: 1 for the session, 1 for the user, # 2 for the counts and 1 for the objects on the page with self.assertNumQueries(5): resp = self.client.get(changelist_url) self.assertEqual(resp.context['selection_note'], '0 of 2 selected') self.assertEqual(resp.context['selection_note_all'], 'All 2 selected') with self.assertNumQueries(5): extra = {'q': 'not_in_name'} resp = self.client.get(changelist_url, extra) self.assertEqual(resp.context['selection_note'], '0 of 0 selected') self.assertEqual(resp.context['selection_note_all'], 'All 0 selected') with self.assertNumQueries(5): extra = {'q': 'person'} resp = self.client.get(changelist_url, extra) self.assertEqual(resp.context['selection_note'], '0 of 2 selected') self.assertEqual(resp.context['selection_note_all'], 'All 2 selected') with self.assertNumQueries(5): extra = {'gender__exact': '1'} resp = self.client.get(changelist_url, extra) self.assertEqual(resp.context['selection_note'], '0 of 1 selected') self.assertEqual(resp.context['selection_note_all'], '1 selected') def test_change_view(self): for i in self.pks: url = reverse('admin:admin_views_emptymodel_change', args=(i,)) response = self.client.get(url, follow=True) if i > 1: self.assertEqual(response.status_code, 200) else: self.assertRedirects(response, reverse('admin:index')) self.assertEqual( [m.message for m in response.context['messages']], ['empty model with ID “1” doesn’t exist. Perhaps it was deleted?'] ) def test_add_model_modeladmin_defer_qs(self): # Test for #14529. defer() is used in ModelAdmin.get_queryset() # model has __str__ method self.assertEqual(CoverLetter.objects.count(), 0) # Emulate model instance creation via the admin post_data = { "author": "Candidate, Best", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_coverletter_add'), post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(CoverLetter.objects.count(), 1) # Message should contain non-ugly model verbose name pk = CoverLetter.objects.all()[0].pk self.assertContains( response, '<li class="success">The cover letter “<a href="%s">' 'Candidate, Best</a>” was added successfully.</li>' % reverse('admin:admin_views_coverletter_change', args=(pk,)), html=True ) # model has no __str__ method self.assertEqual(ShortMessage.objects.count(), 0) # Emulate model instance creation via the admin post_data = { "content": "What's this SMS thing?", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_shortmessage_add'), post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(ShortMessage.objects.count(), 1) # Message should contain non-ugly model verbose name sm = ShortMessage.objects.all()[0] self.assertContains( response, '<li class="success">The short message “<a href="%s">' '%s</a>” was added successfully.</li>' % (reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)), sm), html=True ) def test_add_model_modeladmin_only_qs(self): # Test for #14529. only() is used in ModelAdmin.get_queryset() # model has __str__ method self.assertEqual(Telegram.objects.count(), 0) # Emulate model instance creation via the admin post_data = { "title": "Urgent telegram", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_telegram_add'), post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(Telegram.objects.count(), 1) # Message should contain non-ugly model verbose name pk = Telegram.objects.all()[0].pk self.assertContains( response, '<li class="success">The telegram “<a href="%s">' 'Urgent telegram</a>” was added successfully.</li>' % reverse('admin:admin_views_telegram_change', args=(pk,)), html=True ) # model has no __str__ method self.assertEqual(Paper.objects.count(), 0) # Emulate model instance creation via the admin post_data = { "title": "My Modified Paper Title", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_paper_add'), post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(Paper.objects.count(), 1) # Message should contain non-ugly model verbose name p = Paper.objects.all()[0] self.assertContains( response, '<li class="success">The paper “<a href="%s">' '%s</a>” was added successfully.</li>' % (reverse('admin:admin_views_paper_change', args=(p.pk,)), p), html=True ) def test_edit_model_modeladmin_defer_qs(self): # Test for #14529. defer() is used in ModelAdmin.get_queryset() # model has __str__ method cl = CoverLetter.objects.create(author="John Doe") self.assertEqual(CoverLetter.objects.count(), 1) response = self.client.get(reverse('admin:admin_views_coverletter_change', args=(cl.pk,))) self.assertEqual(response.status_code, 200) # Emulate model instance edit via the admin post_data = { "author": "John Doe II", "_save": "Save", } url = reverse('admin:admin_views_coverletter_change', args=(cl.pk,)) response = self.client.post(url, post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(CoverLetter.objects.count(), 1) # Message should contain non-ugly model verbose name. Instance # representation is set by model's __str__() self.assertContains( response, '<li class="success">The cover letter “<a href="%s">' 'John Doe II</a>” was changed successfully.</li>' % reverse('admin:admin_views_coverletter_change', args=(cl.pk,)), html=True ) # model has no __str__ method sm = ShortMessage.objects.create(content="This is expensive") self.assertEqual(ShortMessage.objects.count(), 1) response = self.client.get(reverse('admin:admin_views_shortmessage_change', args=(sm.pk,))) self.assertEqual(response.status_code, 200) # Emulate model instance edit via the admin post_data = { "content": "Too expensive", "_save": "Save", } url = reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)) response = self.client.post(url, post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(ShortMessage.objects.count(), 1) # Message should contain non-ugly model verbose name. The ugly(!) # instance representation is set by __str__(). self.assertContains( response, '<li class="success">The short message “<a href="%s">' '%s</a>” was changed successfully.</li>' % (reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)), sm), html=True ) def test_edit_model_modeladmin_only_qs(self): # Test for #14529. only() is used in ModelAdmin.get_queryset() # model has __str__ method t = Telegram.objects.create(title="First Telegram") self.assertEqual(Telegram.objects.count(), 1) response = self.client.get(reverse('admin:admin_views_telegram_change', args=(t.pk,))) self.assertEqual(response.status_code, 200) # Emulate model instance edit via the admin post_data = { "title": "Telegram without typo", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_telegram_change', args=(t.pk,)), post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(Telegram.objects.count(), 1) # Message should contain non-ugly model verbose name. The instance # representation is set by model's __str__() self.assertContains( response, '<li class="success">The telegram “<a href="%s">' 'Telegram without typo</a>” was changed successfully.</li>' % reverse('admin:admin_views_telegram_change', args=(t.pk,)), html=True ) # model has no __str__ method p = Paper.objects.create(title="My Paper Title") self.assertEqual(Paper.objects.count(), 1) response = self.client.get(reverse('admin:admin_views_paper_change', args=(p.pk,))) self.assertEqual(response.status_code, 200) # Emulate model instance edit via the admin post_data = { "title": "My Modified Paper Title", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_paper_change', args=(p.pk,)), post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(Paper.objects.count(), 1) # Message should contain non-ugly model verbose name. The ugly(!) # instance representation is set by __str__(). self.assertContains( response, '<li class="success">The paper “<a href="%s">' '%s</a>” was changed successfully.</li>' % (reverse('admin:admin_views_paper_change', args=(p.pk,)), p), html=True ) def test_history_view_custom_qs(self): """ Custom querysets are considered for the admin history view. """ self.client.post(reverse('admin:login'), self.super_login) FilteredManager.objects.create(pk=1) FilteredManager.objects.create(pk=2) response = self.client.get(reverse('admin:admin_views_filteredmanager_changelist')) self.assertContains(response, "PK=1") self.assertContains(response, "PK=2") self.assertEqual( self.client.get(reverse('admin:admin_views_filteredmanager_history', args=(1,))).status_code, 200 ) self.assertEqual( self.client.get(reverse('admin:admin_views_filteredmanager_history', args=(2,))).status_code, 200 ) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminInlineFileUploadTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') file1 = tempfile.NamedTemporaryFile(suffix=".file1") file1.write(b'a' * (2 ** 21)) filename = file1.name file1.close() cls.gallery = Gallery.objects.create(name='Test Gallery') cls.picture = Picture.objects.create( name='Test Picture', image=filename, gallery=cls.gallery, ) def setUp(self): self.client.force_login(self.superuser) def test_form_has_multipart_enctype(self): response = self.client.get( reverse('admin:admin_views_gallery_change', args=(self.gallery.id,)) ) self.assertIs(response.context['has_file_field'], True) self.assertContains(response, MULTIPART_ENCTYPE) def test_inline_file_upload_edit_validation_error_post(self): """ Inline file uploads correctly display prior data (#10002). """ post_data = { "name": "Test Gallery", "pictures-TOTAL_FORMS": "2", "pictures-INITIAL_FORMS": "1", "pictures-MAX_NUM_FORMS": "0", "pictures-0-id": str(self.picture.id), "pictures-0-gallery": str(self.gallery.id), "pictures-0-name": "Test Picture", "pictures-0-image": "", "pictures-1-id": "", "pictures-1-gallery": str(self.gallery.id), "pictures-1-name": "Test Picture 2", "pictures-1-image": "", } response = self.client.post( reverse('admin:admin_views_gallery_change', args=(self.gallery.id,)), post_data ) self.assertContains(response, b"Currently") @override_settings(ROOT_URLCONF='admin_views.urls') class AdminInlineTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.collector = Collector.objects.create(pk=1, name='John Fowles') def setUp(self): self.post_data = { "name": "Test Name", "widget_set-TOTAL_FORMS": "3", "widget_set-INITIAL_FORMS": "0", "widget_set-MAX_NUM_FORMS": "0", "widget_set-0-id": "", "widget_set-0-owner": "1", "widget_set-0-name": "", "widget_set-1-id": "", "widget_set-1-owner": "1", "widget_set-1-name": "", "widget_set-2-id": "", "widget_set-2-owner": "1", "widget_set-2-name": "", "doohickey_set-TOTAL_FORMS": "3", "doohickey_set-INITIAL_FORMS": "0", "doohickey_set-MAX_NUM_FORMS": "0", "doohickey_set-0-owner": "1", "doohickey_set-0-code": "", "doohickey_set-0-name": "", "doohickey_set-1-owner": "1", "doohickey_set-1-code": "", "doohickey_set-1-name": "", "doohickey_set-2-owner": "1", "doohickey_set-2-code": "", "doohickey_set-2-name": "", "grommet_set-TOTAL_FORMS": "3", "grommet_set-INITIAL_FORMS": "0", "grommet_set-MAX_NUM_FORMS": "0", "grommet_set-0-code": "", "grommet_set-0-owner": "1", "grommet_set-0-name": "", "grommet_set-1-code": "", "grommet_set-1-owner": "1", "grommet_set-1-name": "", "grommet_set-2-code": "", "grommet_set-2-owner": "1", "grommet_set-2-name": "", "whatsit_set-TOTAL_FORMS": "3", "whatsit_set-INITIAL_FORMS": "0", "whatsit_set-MAX_NUM_FORMS": "0", "whatsit_set-0-owner": "1", "whatsit_set-0-index": "", "whatsit_set-0-name": "", "whatsit_set-1-owner": "1", "whatsit_set-1-index": "", "whatsit_set-1-name": "", "whatsit_set-2-owner": "1", "whatsit_set-2-index": "", "whatsit_set-2-name": "", "fancydoodad_set-TOTAL_FORMS": "3", "fancydoodad_set-INITIAL_FORMS": "0", "fancydoodad_set-MAX_NUM_FORMS": "0", "fancydoodad_set-0-doodad_ptr": "", "fancydoodad_set-0-owner": "1", "fancydoodad_set-0-name": "", "fancydoodad_set-0-expensive": "on", "fancydoodad_set-1-doodad_ptr": "", "fancydoodad_set-1-owner": "1", "fancydoodad_set-1-name": "", "fancydoodad_set-1-expensive": "on", "fancydoodad_set-2-doodad_ptr": "", "fancydoodad_set-2-owner": "1", "fancydoodad_set-2-name": "", "fancydoodad_set-2-expensive": "on", "category_set-TOTAL_FORMS": "3", "category_set-INITIAL_FORMS": "0", "category_set-MAX_NUM_FORMS": "0", "category_set-0-order": "", "category_set-0-id": "", "category_set-0-collector": "1", "category_set-1-order": "", "category_set-1-id": "", "category_set-1-collector": "1", "category_set-2-order": "", "category_set-2-id": "", "category_set-2-collector": "1", } self.client.force_login(self.superuser) def test_simple_inline(self): "A simple model can be saved as inlines" # First add a new inline self.post_data['widget_set-0-name'] = "Widget 1" collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,)) response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Widget.objects.count(), 1) self.assertEqual(Widget.objects.all()[0].name, "Widget 1") widget_id = Widget.objects.all()[0].id # The PK link exists on the rendered form response = self.client.get(collector_url) self.assertContains(response, 'name="widget_set-0-id"') # No file or image fields, no enctype on the forms self.assertIs(response.context['has_file_field'], False) self.assertNotContains(response, MULTIPART_ENCTYPE) # Now resave that inline self.post_data['widget_set-INITIAL_FORMS'] = "1" self.post_data['widget_set-0-id'] = str(widget_id) self.post_data['widget_set-0-name'] = "Widget 1" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Widget.objects.count(), 1) self.assertEqual(Widget.objects.all()[0].name, "Widget 1") # Now modify that inline self.post_data['widget_set-INITIAL_FORMS'] = "1" self.post_data['widget_set-0-id'] = str(widget_id) self.post_data['widget_set-0-name'] = "Widget 1 Updated" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Widget.objects.count(), 1) self.assertEqual(Widget.objects.all()[0].name, "Widget 1 Updated") def test_explicit_autofield_inline(self): "A model with an explicit autofield primary key can be saved as inlines. Regression for #8093" # First add a new inline self.post_data['grommet_set-0-name'] = "Grommet 1" collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,)) response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Grommet.objects.count(), 1) self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1") # The PK link exists on the rendered form response = self.client.get(collector_url) self.assertContains(response, 'name="grommet_set-0-code"') # Now resave that inline self.post_data['grommet_set-INITIAL_FORMS'] = "1" self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code) self.post_data['grommet_set-0-name'] = "Grommet 1" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Grommet.objects.count(), 1) self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1") # Now modify that inline self.post_data['grommet_set-INITIAL_FORMS'] = "1" self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code) self.post_data['grommet_set-0-name'] = "Grommet 1 Updated" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Grommet.objects.count(), 1) self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1 Updated") def test_char_pk_inline(self): "A model with a character PK can be saved as inlines. Regression for #10992" # First add a new inline self.post_data['doohickey_set-0-code'] = "DH1" self.post_data['doohickey_set-0-name'] = "Doohickey 1" collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,)) response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(DooHickey.objects.count(), 1) self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1") # The PK link exists on the rendered form response = self.client.get(collector_url) self.assertContains(response, 'name="doohickey_set-0-code"') # Now resave that inline self.post_data['doohickey_set-INITIAL_FORMS'] = "1" self.post_data['doohickey_set-0-code'] = "DH1" self.post_data['doohickey_set-0-name'] = "Doohickey 1" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(DooHickey.objects.count(), 1) self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1") # Now modify that inline self.post_data['doohickey_set-INITIAL_FORMS'] = "1" self.post_data['doohickey_set-0-code'] = "DH1" self.post_data['doohickey_set-0-name'] = "Doohickey 1 Updated" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(DooHickey.objects.count(), 1) self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1 Updated") def test_integer_pk_inline(self): "A model with an integer PK can be saved as inlines. Regression for #10992" # First add a new inline self.post_data['whatsit_set-0-index'] = "42" self.post_data['whatsit_set-0-name'] = "Whatsit 1" collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,)) response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Whatsit.objects.count(), 1) self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1") # The PK link exists on the rendered form response = self.client.get(collector_url) self.assertContains(response, 'name="whatsit_set-0-index"') # Now resave that inline self.post_data['whatsit_set-INITIAL_FORMS'] = "1" self.post_data['whatsit_set-0-index'] = "42" self.post_data['whatsit_set-0-name'] = "Whatsit 1" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Whatsit.objects.count(), 1) self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1") # Now modify that inline self.post_data['whatsit_set-INITIAL_FORMS'] = "1" self.post_data['whatsit_set-0-index'] = "42" self.post_data['whatsit_set-0-name'] = "Whatsit 1 Updated" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Whatsit.objects.count(), 1) self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1 Updated") def test_inherited_inline(self): "An inherited model can be saved as inlines. Regression for #11042" # First add a new inline self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1" collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,)) response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(FancyDoodad.objects.count(), 1) self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1") doodad_pk = FancyDoodad.objects.all()[0].pk # The PK link exists on the rendered form response = self.client.get(collector_url) self.assertContains(response, 'name="fancydoodad_set-0-doodad_ptr"') # Now resave that inline self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1" self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk) self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(FancyDoodad.objects.count(), 1) self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1") # Now modify that inline self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1" self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk) self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1 Updated" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(FancyDoodad.objects.count(), 1) self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1 Updated") def test_ordered_inline(self): """ An inline with an editable ordering fields is updated correctly. """ # Create some objects with an initial ordering Category.objects.create(id=1, order=1, collector=self.collector) Category.objects.create(id=2, order=2, collector=self.collector) Category.objects.create(id=3, order=0, collector=self.collector) Category.objects.create(id=4, order=0, collector=self.collector) # NB: The order values must be changed so that the items are reordered. self.post_data.update({ "name": "Frederick Clegg", "category_set-TOTAL_FORMS": "7", "category_set-INITIAL_FORMS": "4", "category_set-MAX_NUM_FORMS": "0", "category_set-0-order": "14", "category_set-0-id": "1", "category_set-0-collector": "1", "category_set-1-order": "13", "category_set-1-id": "2", "category_set-1-collector": "1", "category_set-2-order": "1", "category_set-2-id": "3", "category_set-2-collector": "1", "category_set-3-order": "0", "category_set-3-id": "4", "category_set-3-collector": "1", "category_set-4-order": "", "category_set-4-id": "", "category_set-4-collector": "1", "category_set-5-order": "", "category_set-5-id": "", "category_set-5-collector": "1", "category_set-6-order": "", "category_set-6-id": "", "category_set-6-collector": "1", }) collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,)) response = self.client.post(collector_url, self.post_data) # Successful post will redirect self.assertEqual(response.status_code, 302) # The order values have been applied to the right objects self.assertEqual(self.collector.category_set.count(), 4) self.assertEqual(Category.objects.get(id=1).order, 14) self.assertEqual(Category.objects.get(id=2).order, 13) self.assertEqual(Category.objects.get(id=3).order, 1) self.assertEqual(Category.objects.get(id=4).order, 0) @override_settings(ROOT_URLCONF='admin_views.urls') class NeverCacheTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = Section.objects.create(name='Test section') def setUp(self): self.client.force_login(self.superuser) def test_admin_index(self): "Check the never-cache status of the main index" response = self.client.get(reverse('admin:index')) self.assertEqual(get_max_age(response), 0) def test_app_index(self): "Check the never-cache status of an application index" response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertEqual(get_max_age(response), 0) def test_model_index(self): "Check the never-cache status of a model index" response = self.client.get(reverse('admin:admin_views_fabric_changelist')) self.assertEqual(get_max_age(response), 0) def test_model_add(self): "Check the never-cache status of a model add page" response = self.client.get(reverse('admin:admin_views_fabric_add')) self.assertEqual(get_max_age(response), 0) def test_model_view(self): "Check the never-cache status of a model edit page" response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,))) self.assertEqual(get_max_age(response), 0) def test_model_history(self): "Check the never-cache status of a model history page" response = self.client.get(reverse('admin:admin_views_section_history', args=(self.s1.pk,))) self.assertEqual(get_max_age(response), 0) def test_model_delete(self): "Check the never-cache status of a model delete page" response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,))) self.assertEqual(get_max_age(response), 0) def test_login(self): "Check the never-cache status of login views" self.client.logout() response = self.client.get(reverse('admin:index')) self.assertEqual(get_max_age(response), 0) def test_logout(self): "Check the never-cache status of logout view" response = self.client.get(reverse('admin:logout')) self.assertEqual(get_max_age(response), 0) def test_password_change(self): "Check the never-cache status of the password change view" self.client.logout() response = self.client.get(reverse('admin:password_change')) self.assertIsNone(get_max_age(response)) def test_password_change_done(self): "Check the never-cache status of the password change done view" response = self.client.get(reverse('admin:password_change_done')) self.assertIsNone(get_max_age(response)) def test_JS_i18n(self): "Check the never-cache status of the JavaScript i18n view" response = self.client.get(reverse('admin:jsi18n')) self.assertIsNone(get_max_age(response)) @override_settings(ROOT_URLCONF='admin_views.urls') class PrePopulatedTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') def setUp(self): self.client.force_login(self.superuser) def test_prepopulated_on(self): response = self.client.get(reverse('admin:admin_views_prepopulatedpost_add')) self.assertContains(response, "&quot;id&quot;: &quot;#id_slug&quot;") self.assertContains(response, "&quot;dependency_ids&quot;: [&quot;#id_title&quot;]") self.assertContains(response, "&quot;id&quot;: &quot;#id_prepopulatedsubpost_set-0-subslug&quot;") def test_prepopulated_off(self): response = self.client.get(reverse('admin:admin_views_prepopulatedpost_change', args=(self.p1.pk,))) self.assertContains(response, "A Long Title") self.assertNotContains(response, "&quot;id&quot;: &quot;#id_slug&quot;") self.assertNotContains(response, "&quot;dependency_ids&quot;: [&quot;#id_title&quot;]") self.assertNotContains( response, "&quot;id&quot;: &quot;#id_prepopulatedsubpost_set-0-subslug&quot;" ) @override_settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=True) def test_prepopulated_maxlength_localized(self): """ Regression test for #15938: if USE_THOUSAND_SEPARATOR is set, make sure that maxLength (in the JavaScript) is rendered without separators. """ response = self.client.get(reverse('admin:admin_views_prepopulatedpostlargeslug_add')) self.assertContains(response, "&quot;maxLength&quot;: 1000") # instead of 1,000 def test_view_only_add_form(self): """ PrePopulatedPostReadOnlyAdmin.prepopulated_fields includes 'slug' which is present in the add view, even if the ModelAdmin.has_change_permission() returns False. """ response = self.client.get(reverse('admin7:admin_views_prepopulatedpost_add')) self.assertContains(response, 'data-prepopulated-fields=') self.assertContains(response, '&quot;id&quot;: &quot;#id_slug&quot;') def test_view_only_change_form(self): """ PrePopulatedPostReadOnlyAdmin.prepopulated_fields includes 'slug'. That doesn't break a view-only change view. """ response = self.client.get(reverse('admin7:admin_views_prepopulatedpost_change', args=(self.p1.pk,))) self.assertContains(response, 'data-prepopulated-fields="[]"') self.assertContains(response, '<div class="readonly">%s</div>' % self.p1.slug) @override_settings(ROOT_URLCONF='admin_views.urls') class SeleniumTests(AdminSeleniumTestCase): available_apps = ['admin_views'] + AdminSeleniumTestCase.available_apps def setUp(self): self.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') self.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') def test_login_button_centered(self): self.selenium.get(self.live_server_url + reverse('admin:login')) button = self.selenium.find_element_by_css_selector('.submit-row input') offset_left = button.get_property('offsetLeft') offset_right = ( button.get_property('offsetParent').get_property('offsetWidth') - (offset_left + button.get_property('offsetWidth')) ) # Use assertAlmostEqual to avoid pixel rounding errors. self.assertAlmostEqual(offset_left, offset_right, delta=3) def test_prepopulated_fields(self): """ The JavaScript-automated prepopulated fields work with the main form and with stacked and tabular inlines. Refs #13068, #9264, #9983, #9784. """ self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) self.selenium.get(self.live_server_url + reverse('admin:admin_views_mainprepopulated_add')) self.wait_for('.select2') # Main form ---------------------------------------------------------- self.selenium.find_element_by_id('id_pubdate').send_keys('2012-02-18') self.select_option('#id_status', 'option two') self.selenium.find_element_by_id('id_name').send_keys(' the mAin nÀMë and it\'s awεšomeıııİ') slug1 = self.selenium.find_element_by_id('id_slug1').get_attribute('value') slug2 = self.selenium.find_element_by_id('id_slug2').get_attribute('value') slug3 = self.selenium.find_element_by_id('id_slug3').get_attribute('value') self.assertEqual(slug1, 'the-main-name-and-its-awesomeiiii-2012-02-18') self.assertEqual(slug2, 'option-two-the-main-name-and-its-awesomeiiii') self.assertEqual(slug3, 'the-main-n\xe0m\xeb-and-its-aw\u03b5\u0161ome\u0131\u0131\u0131i') # Stacked inlines ---------------------------------------------------- # Initial inline self.selenium.find_element_by_id('id_relatedprepopulated_set-0-pubdate').send_keys('2011-12-17') self.select_option('#id_relatedprepopulated_set-0-status', 'option one') self.selenium.find_element_by_id('id_relatedprepopulated_set-0-name').send_keys( ' here is a sŤāÇkeð inline ! ' ) slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-0-slug1').get_attribute('value') slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-0-slug2').get_attribute('value') self.assertEqual(slug1, 'here-is-a-stacked-inline-2011-12-17') self.assertEqual(slug2, 'option-one-here-is-a-stacked-inline') initial_select2_inputs = self.selenium.find_elements_by_class_name('select2-selection') # Inline formsets have empty/invisible forms. # Only the 4 visible select2 inputs are initialized. num_initial_select2_inputs = len(initial_select2_inputs) self.assertEqual(num_initial_select2_inputs, 4) # Add an inline self.selenium.find_elements_by_link_text('Add another Related prepopulated')[0].click() self.assertEqual( len(self.selenium.find_elements_by_class_name('select2-selection')), num_initial_select2_inputs + 2 ) self.selenium.find_element_by_id('id_relatedprepopulated_set-1-pubdate').send_keys('1999-01-25') self.select_option('#id_relatedprepopulated_set-1-status', 'option two') self.selenium.find_element_by_id('id_relatedprepopulated_set-1-name').send_keys( ' now you haVe anöther sŤāÇkeð inline with a very ... ' 'loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooog text... ' ) slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-1-slug1').get_attribute('value') slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-1-slug2').get_attribute('value') # 50 characters maximum for slug1 field self.assertEqual(slug1, 'now-you-have-another-stacked-inline-with-a-very-lo') # 60 characters maximum for slug2 field self.assertEqual(slug2, 'option-two-now-you-have-another-stacked-inline-with-a-very-l') # Tabular inlines ---------------------------------------------------- # Initial inline self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-pubdate').send_keys('1234-12-07') self.select_option('#id_relatedprepopulated_set-2-0-status', 'option two') self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-name').send_keys( 'And now, with a tÃbűlaŘ inline !!!' ) slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-slug1').get_attribute('value') slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-slug2').get_attribute('value') self.assertEqual(slug1, 'and-now-with-a-tabular-inline-1234-12-07') self.assertEqual(slug2, 'option-two-and-now-with-a-tabular-inline') # Add an inline # Button may be outside the browser frame. element = self.selenium.find_elements_by_link_text('Add another Related prepopulated')[1] self.selenium.execute_script('window.scrollTo(0, %s);' % element.location['y']) element.click() self.assertEqual( len(self.selenium.find_elements_by_class_name('select2-selection')), num_initial_select2_inputs + 4 ) self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-pubdate').send_keys('1981-08-22') self.select_option('#id_relatedprepopulated_set-2-1-status', 'option one') self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-name').send_keys( r'tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters' ) slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-slug1').get_attribute('value') slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-slug2').get_attribute('value') self.assertEqual(slug1, 'tabular-inline-with-ignored-characters-1981-08-22') self.assertEqual(slug2, 'option-one-tabular-inline-with-ignored-characters') # Add an inline without an initial inline. # The button is outside of the browser frame. self.selenium.execute_script("window.scrollTo(0, document.body.scrollHeight);") self.selenium.find_elements_by_link_text('Add another Related prepopulated')[2].click() self.assertEqual( len(self.selenium.find_elements_by_class_name('select2-selection')), num_initial_select2_inputs + 6 ) # Save and check that everything is properly stored in the database with self.wait_page_loaded(): self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.assertEqual(MainPrepopulated.objects.all().count(), 1) MainPrepopulated.objects.get( name=' the mAin nÀMë and it\'s awεšomeıııİ', pubdate='2012-02-18', status='option two', slug1='the-main-name-and-its-awesomeiiii-2012-02-18', slug2='option-two-the-main-name-and-its-awesomeiiii', slug3='the-main-nàmë-and-its-awεšomeıııi', ) self.assertEqual(RelatedPrepopulated.objects.all().count(), 4) RelatedPrepopulated.objects.get( name=' here is a sŤāÇkeð inline ! ', pubdate='2011-12-17', status='option one', slug1='here-is-a-stacked-inline-2011-12-17', slug2='option-one-here-is-a-stacked-inline', ) RelatedPrepopulated.objects.get( # 75 characters in name field name=' now you haVe anöther sŤāÇkeð inline with a very ... loooooooooooooooooo', pubdate='1999-01-25', status='option two', slug1='now-you-have-another-stacked-inline-with-a-very-lo', slug2='option-two-now-you-have-another-stacked-inline-with-a-very-l', ) RelatedPrepopulated.objects.get( name='And now, with a tÃbűlaŘ inline !!!', pubdate='1234-12-07', status='option two', slug1='and-now-with-a-tabular-inline-1234-12-07', slug2='option-two-and-now-with-a-tabular-inline', ) RelatedPrepopulated.objects.get( name=r'tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters', pubdate='1981-08-22', status='option one', slug1='tabular-inline-with-ignored-characters-1981-08-22', slug2='option-one-tabular-inline-with-ignored-characters', ) def test_populate_existing_object(self): """ The prepopulation works for existing objects too, as long as the original field is empty (#19082). """ # Slugs are empty to start with. item = MainPrepopulated.objects.create( name=' this is the mAin nÀMë', pubdate='2012-02-18', status='option two', slug1='', slug2='', ) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) object_url = self.live_server_url + reverse('admin:admin_views_mainprepopulated_change', args=(item.id,)) self.selenium.get(object_url) self.selenium.find_element_by_id('id_name').send_keys(' the best') # The slugs got prepopulated since they were originally empty slug1 = self.selenium.find_element_by_id('id_slug1').get_attribute('value') slug2 = self.selenium.find_element_by_id('id_slug2').get_attribute('value') self.assertEqual(slug1, 'this-is-the-main-name-the-best-2012-02-18') self.assertEqual(slug2, 'option-two-this-is-the-main-name-the-best') # Save the object with self.wait_page_loaded(): self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.selenium.get(object_url) self.selenium.find_element_by_id('id_name').send_keys(' hello') # The slugs got prepopulated didn't change since they were originally not empty slug1 = self.selenium.find_element_by_id('id_slug1').get_attribute('value') slug2 = self.selenium.find_element_by_id('id_slug2').get_attribute('value') self.assertEqual(slug1, 'this-is-the-main-name-the-best-2012-02-18') self.assertEqual(slug2, 'option-two-this-is-the-main-name-the-best') def test_collapsible_fieldset(self): """ The 'collapse' class in fieldsets definition allows to show/hide the appropriate field section. """ self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) self.selenium.get(self.live_server_url + reverse('admin:admin_views_article_add')) self.assertFalse(self.selenium.find_element_by_id('id_title').is_displayed()) self.selenium.find_elements_by_link_text('Show')[0].click() self.assertTrue(self.selenium.find_element_by_id('id_title').is_displayed()) self.assertEqual(self.selenium.find_element_by_id('fieldsetcollapser0').text, "Hide") def test_first_field_focus(self): """JavaScript-assisted auto-focus on first usable form field.""" # First form field has a single widget self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) with self.wait_page_loaded(): self.selenium.get(self.live_server_url + reverse('admin:admin_views_picture_add')) self.assertEqual( self.selenium.switch_to.active_element, self.selenium.find_element_by_id('id_name') ) # First form field has a MultiWidget with self.wait_page_loaded(): self.selenium.get(self.live_server_url + reverse('admin:admin_views_reservation_add')) self.assertEqual( self.selenium.switch_to.active_element, self.selenium.find_element_by_id('id_start_date_0') ) def test_cancel_delete_confirmation(self): "Cancelling the deletion of an object takes the user back one page." pizza = Pizza.objects.create(name="Double Cheese") url = reverse('admin:admin_views_pizza_change', args=(pizza.id,)) full_url = self.live_server_url + url self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) self.selenium.get(full_url) self.selenium.find_element_by_class_name('deletelink').click() # Click 'cancel' on the delete page. self.selenium.find_element_by_class_name('cancel-link').click() # Wait until we're back on the change page. self.wait_for_text('#content h1', 'Change pizza') self.assertEqual(self.selenium.current_url, full_url) self.assertEqual(Pizza.objects.count(), 1) def test_cancel_delete_related_confirmation(self): """ Cancelling the deletion of an object with relations takes the user back one page. """ pizza = Pizza.objects.create(name="Double Cheese") topping1 = Topping.objects.create(name="Cheddar") topping2 = Topping.objects.create(name="Mozzarella") pizza.toppings.add(topping1, topping2) url = reverse('admin:admin_views_pizza_change', args=(pizza.id,)) full_url = self.live_server_url + url self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) self.selenium.get(full_url) self.selenium.find_element_by_class_name('deletelink').click() # Click 'cancel' on the delete page. self.selenium.find_element_by_class_name('cancel-link').click() # Wait until we're back on the change page. self.wait_for_text('#content h1', 'Change pizza') self.assertEqual(self.selenium.current_url, full_url) self.assertEqual(Pizza.objects.count(), 1) self.assertEqual(Topping.objects.count(), 2) def test_list_editable_popups(self): """ list_editable foreign keys have add/change popups. """ from selenium.webdriver.support.ui import Select s1 = Section.objects.create(name='Test section') Article.objects.create( title='foo', content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=s1, ) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) self.selenium.get(self.live_server_url + reverse('admin:admin_views_article_changelist')) # Change popup self.selenium.find_element_by_id('change_id_form-0-section').click() self.wait_for_and_switch_to_popup() self.wait_for_text('#content h1', 'Change section') name_input = self.selenium.find_element_by_id('id_name') name_input.clear() name_input.send_keys('<i>edited section</i>') self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.selenium.switch_to.window(self.selenium.window_handles[0]) # Hide sidebar. toggle_button = self.selenium.find_element_by_css_selector('#toggle-nav-sidebar') toggle_button.click() select = Select(self.selenium.find_element_by_id('id_form-0-section')) self.assertEqual(select.first_selected_option.text, '<i>edited section</i>') # Rendered select2 input. select2_display = self.selenium.find_element_by_class_name('select2-selection__rendered') # Clear button (×\n) is included in text. self.assertEqual(select2_display.text, '×\n<i>edited section</i>') # Add popup self.selenium.find_element_by_id('add_id_form-0-section').click() self.wait_for_and_switch_to_popup() self.wait_for_text('#content h1', 'Add section') self.selenium.find_element_by_id('id_name').send_keys('new section') self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.selenium.switch_to.window(self.selenium.window_handles[0]) select = Select(self.selenium.find_element_by_id('id_form-0-section')) self.assertEqual(select.first_selected_option.text, 'new section') select2_display = self.selenium.find_element_by_class_name('select2-selection__rendered') # Clear button (×\n) is included in text. self.assertEqual(select2_display.text, '×\nnew section') def test_inline_uuid_pk_edit_with_popup(self): from selenium.webdriver.support.ui import Select parent = ParentWithUUIDPK.objects.create(title='test') related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_change', args=(related_with_parent.id,)) self.selenium.get(self.live_server_url + change_url) self.selenium.find_element_by_id('change_id_parent').click() self.wait_for_and_switch_to_popup() self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.selenium.switch_to.window(self.selenium.window_handles[0]) select = Select(self.selenium.find_element_by_id('id_parent')) self.assertEqual(select.first_selected_option.text, str(parent.id)) self.assertEqual(select.first_selected_option.get_attribute('value'), str(parent.id)) def test_inline_uuid_pk_add_with_popup(self): from selenium.webdriver.support.ui import Select self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) self.selenium.get(self.live_server_url + reverse('admin:admin_views_relatedwithuuidpkmodel_add')) self.selenium.find_element_by_id('add_id_parent').click() self.wait_for_and_switch_to_popup() self.selenium.find_element_by_id('id_title').send_keys('test') self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.selenium.switch_to.window(self.selenium.window_handles[0]) select = Select(self.selenium.find_element_by_id('id_parent')) uuid_id = str(ParentWithUUIDPK.objects.first().id) self.assertEqual(select.first_selected_option.text, uuid_id) self.assertEqual(select.first_selected_option.get_attribute('value'), uuid_id) def test_inline_uuid_pk_delete_with_popup(self): from selenium.webdriver.support.ui import Select parent = ParentWithUUIDPK.objects.create(title='test') related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_change', args=(related_with_parent.id,)) self.selenium.get(self.live_server_url + change_url) self.selenium.find_element_by_id('delete_id_parent').click() self.wait_for_and_switch_to_popup() self.selenium.find_element_by_xpath('//input[@value="Yes, I’m sure"]').click() self.selenium.switch_to.window(self.selenium.window_handles[0]) select = Select(self.selenium.find_element_by_id('id_parent')) self.assertEqual(ParentWithUUIDPK.objects.count(), 0) self.assertEqual(select.first_selected_option.text, '---------') self.assertEqual(select.first_selected_option.get_attribute('value'), '') def test_inline_with_popup_cancel_delete(self): """Clicking ""No, take me back" on a delete popup closes the window.""" parent = ParentWithUUIDPK.objects.create(title='test') related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_change', args=(related_with_parent.id,)) self.selenium.get(self.live_server_url + change_url) self.selenium.find_element_by_id('delete_id_parent').click() self.wait_for_and_switch_to_popup() self.selenium.find_element_by_xpath('//a[text()="No, take me back"]').click() self.selenium.switch_to.window(self.selenium.window_handles[0]) self.assertEqual(len(self.selenium.window_handles), 1) def test_list_editable_raw_id_fields(self): parent = ParentWithUUIDPK.objects.create(title='test') parent2 = ParentWithUUIDPK.objects.create(title='test2') RelatedWithUUIDPKModel.objects.create(parent=parent) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_changelist', current_app=site2.name) self.selenium.get(self.live_server_url + change_url) self.selenium.find_element_by_id('lookup_id_form-0-parent').click() self.wait_for_and_switch_to_popup() # Select "parent2" in the popup. self.selenium.find_element_by_link_text(str(parent2.pk)).click() self.selenium.switch_to.window(self.selenium.window_handles[0]) # The newly selected pk should appear in the raw id input. value = self.selenium.find_element_by_id('id_form-0-parent').get_attribute('value') self.assertEqual(value, str(parent2.pk)) def test_input_element_font(self): """ Browsers' default stylesheets override the font of inputs. The admin adds additional CSS to handle this. """ self.selenium.get(self.live_server_url + reverse('admin:login')) element = self.selenium.find_element_by_id('id_username') # Some browsers quotes the fonts, some don't. fonts = [ font.strip().strip('"') for font in element.value_of_css_property('font-family').split(',') ] self.assertEqual( fonts, ['Roboto', 'Lucida Grande', 'Verdana', 'Arial', 'sans-serif'], ) def test_search_input_filtered_page(self): Person.objects.create(name='Guido van Rossum', gender=1, alive=True) Person.objects.create(name='Grace Hopper', gender=1, alive=False) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) person_url = reverse('admin:admin_views_person_changelist') + '?q=Gui' self.selenium.get(self.live_server_url + person_url) self.assertGreater( self.selenium.find_element_by_id('searchbar').rect['width'], 50, ) @override_settings(ROOT_URLCONF='admin_views.urls') class ReadonlyTest(AdminFieldExtractionMixin, TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_readonly_get(self): response = self.client.get(reverse('admin:admin_views_post_add')) self.assertNotContains(response, 'name="posted"') # 3 fields + 2 submit buttons + 5 inline management form fields, + 2 # hidden fields for inlines + 1 field for the inline + 2 empty form self.assertContains(response, "<input", count=16) self.assertContains(response, formats.localize(datetime.date.today())) self.assertContains(response, "<label>Awesomeness level:</label>") self.assertContains(response, "Very awesome.") self.assertContains(response, "Unknown coolness.") self.assertContains(response, "foo") # Multiline text in a readonly field gets <br> tags self.assertContains(response, 'Multiline<br>test<br>string') self.assertContains(response, '<div class="readonly">Multiline<br>html<br>content</div>', html=True) self.assertContains(response, 'InlineMultiline<br>test<br>string') self.assertContains(response, formats.localize(datetime.date.today() - datetime.timedelta(days=7))) self.assertContains(response, '<div class="form-row field-coolness">') self.assertContains(response, '<div class="form-row field-awesomeness_level">') self.assertContains(response, '<div class="form-row field-posted">') self.assertContains(response, '<div class="form-row field-value">') self.assertContains(response, '<div class="form-row">') self.assertContains(response, '<div class="help">', 3) self.assertContains( response, '<div class="help">Some help text for the title (with Unicode ŠĐĆŽćžšđ)</div>', html=True ) self.assertContains( response, '<div class="help">Some help text for the content (with Unicode ŠĐĆŽćžšđ)</div>', html=True ) self.assertContains( response, '<div class="help">Some help text for the date (with Unicode ŠĐĆŽćžšđ)</div>', html=True ) p = Post.objects.create(title="I worked on readonly_fields", content="Its good stuff") response = self.client.get(reverse('admin:admin_views_post_change', args=(p.pk,))) self.assertContains(response, "%d amount of cool" % p.pk) def test_readonly_text_field(self): p = Post.objects.create( title="Readonly test", content="test", readonly_content='test\r\n\r\ntest\r\n\r\ntest\r\n\r\ntest', ) Link.objects.create( url="http://www.djangoproject.com", post=p, readonly_link_content="test\r\nlink", ) response = self.client.get(reverse('admin:admin_views_post_change', args=(p.pk,))) # Checking readonly field. self.assertContains(response, 'test<br><br>test<br><br>test<br><br>test') # Checking readonly field in inline. self.assertContains(response, 'test<br>link') def test_readonly_post(self): data = { "title": "Django Got Readonly Fields", "content": "This is an incredible development.", "link_set-TOTAL_FORMS": "1", "link_set-INITIAL_FORMS": "0", "link_set-MAX_NUM_FORMS": "0", } response = self.client.post(reverse('admin:admin_views_post_add'), data) self.assertEqual(response.status_code, 302) self.assertEqual(Post.objects.count(), 1) p = Post.objects.get() self.assertEqual(p.posted, datetime.date.today()) data["posted"] = "10-8-1990" # some date that's not today response = self.client.post(reverse('admin:admin_views_post_add'), data) self.assertEqual(response.status_code, 302) self.assertEqual(Post.objects.count(), 2) p = Post.objects.order_by('-id')[0] self.assertEqual(p.posted, datetime.date.today()) def test_readonly_manytomany(self): "Regression test for #13004" response = self.client.get(reverse('admin:admin_views_pizza_add')) self.assertEqual(response.status_code, 200) def test_user_password_change_limited_queryset(self): su = User.objects.filter(is_superuser=True)[0] response = self.client.get(reverse('admin2:auth_user_password_change', args=(su.pk,))) self.assertEqual(response.status_code, 404) def test_change_form_renders_correct_null_choice_value(self): """ Regression test for #17911. """ choice = Choice.objects.create(choice=None) response = self.client.get(reverse('admin:admin_views_choice_change', args=(choice.pk,))) self.assertContains(response, '<div class="readonly">No opinion</div>', html=True) def test_readonly_foreignkey_links(self): """ ForeignKey readonly fields render as links if the target model is registered in admin. """ chapter = Chapter.objects.create( title='Chapter 1', content='content', book=Book.objects.create(name='Book 1'), ) language = Language.objects.create(iso='_40', name='Test') obj = ReadOnlyRelatedField.objects.create( chapter=chapter, language=language, user=self.superuser, ) response = self.client.get( reverse('admin:admin_views_readonlyrelatedfield_change', args=(obj.pk,)), ) # Related ForeignKey object registered in admin. user_url = reverse('admin:auth_user_change', args=(self.superuser.pk,)) self.assertContains( response, '<div class="readonly"><a href="%s">super</a></div>' % user_url, html=True, ) # Related ForeignKey with the string primary key registered in admin. language_url = reverse( 'admin:admin_views_language_change', args=(quote(language.pk),), ) self.assertContains( response, '<div class="readonly"><a href="%s">_40</a></div>' % language_url, html=True, ) # Related ForeignKey object not registered in admin. self.assertContains(response, '<div class="readonly">Chapter 1</div>', html=True) def test_readonly_manytomany_backwards_ref(self): """ Regression test for #16433 - backwards references for related objects broke if the related field is read-only due to the help_text attribute """ topping = Topping.objects.create(name='Salami') pizza = Pizza.objects.create(name='Americano') pizza.toppings.add(topping) response = self.client.get(reverse('admin:admin_views_topping_add')) self.assertEqual(response.status_code, 200) def test_readonly_manytomany_forwards_ref(self): topping = Topping.objects.create(name='Salami') pizza = Pizza.objects.create(name='Americano') pizza.toppings.add(topping) response = self.client.get(reverse('admin:admin_views_pizza_change', args=(pizza.pk,))) self.assertContains(response, '<label>Toppings:</label>', html=True) self.assertContains(response, '<div class="readonly">Salami</div>', html=True) def test_readonly_onetoone_backwards_ref(self): """ Can reference a reverse OneToOneField in ModelAdmin.readonly_fields. """ v1 = Villain.objects.create(name='Adam') pl = Plot.objects.create(name='Test Plot', team_leader=v1, contact=v1) pd = PlotDetails.objects.create(details='Brand New Plot', plot=pl) response = self.client.get(reverse('admin:admin_views_plotproxy_change', args=(pl.pk,))) field = self.get_admin_readonly_field(response, 'plotdetails') pd_url = reverse('admin:admin_views_plotdetails_change', args=(pd.pk,)) self.assertEqual(field.contents(), '<a href="%s">Brand New Plot</a>' % pd_url) # The reverse relation also works if the OneToOneField is null. pd.plot = None pd.save() response = self.client.get(reverse('admin:admin_views_plotproxy_change', args=(pl.pk,))) field = self.get_admin_readonly_field(response, 'plotdetails') self.assertEqual(field.contents(), '-') # default empty value def test_readonly_field_overrides(self): """ Regression test for #22087 - ModelForm Meta overrides are ignored by AdminReadonlyField """ p = FieldOverridePost.objects.create(title="Test Post", content="Test Content") response = self.client.get(reverse('admin:admin_views_fieldoverridepost_change', args=(p.pk,))) self.assertContains(response, '<div class="help">Overridden help text for the date</div>') self.assertContains(response, '<label for="id_public">Overridden public label:</label>', html=True) self.assertNotContains(response, 'Some help text for the date (with Unicode ŠĐĆŽćžšđ)') def test_correct_autoescaping(self): """ Make sure that non-field readonly elements are properly autoescaped (#24461) """ section = Section.objects.create(name='<a>evil</a>') response = self.client.get(reverse('admin:admin_views_section_change', args=(section.pk,))) self.assertNotContains(response, "<a>evil</a>", status_code=200) self.assertContains(response, "&lt;a&gt;evil&lt;/a&gt;", status_code=200) def test_label_suffix_translated(self): pizza = Pizza.objects.create(name='Americano') url = reverse('admin:admin_views_pizza_change', args=(pizza.pk,)) with self.settings(LANGUAGE_CODE='fr'): response = self.client.get(url) self.assertContains(response, '<label>Toppings\u00A0:</label>', html=True) @override_settings(ROOT_URLCONF='admin_views.urls') class LimitChoicesToInAdminTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_limit_choices_to_as_callable(self): """Test for ticket 2445 changes to admin.""" threepwood = Character.objects.create( username='threepwood', last_action=datetime.datetime.today() + datetime.timedelta(days=1), ) marley = Character.objects.create( username='marley', last_action=datetime.datetime.today() - datetime.timedelta(days=1), ) response = self.client.get(reverse('admin:admin_views_stumpjoke_add')) # The allowed option should appear twice; the limited option should not appear. self.assertContains(response, threepwood.username, count=2) self.assertNotContains(response, marley.username) @override_settings(ROOT_URLCONF='admin_views.urls') class RawIdFieldsTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_limit_choices_to(self): """Regression test for 14880""" actor = Actor.objects.create(name="Palin", age=27) Inquisition.objects.create(expected=True, leader=actor, country="England") Inquisition.objects.create(expected=False, leader=actor, country="Spain") response = self.client.get(reverse('admin:admin_views_sketch_add')) # Find the link m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_inquisition"', response.content) self.assertTrue(m) # Got a match popup_url = m[1].decode().replace('&amp;', '&') # Handle relative links popup_url = urljoin(response.request['PATH_INFO'], popup_url) # Get the popup and verify the correct objects show up in the resulting # page. This step also tests integers, strings and booleans in the # lookup query string; in model we define inquisition field to have a # limit_choices_to option that includes a filter on a string field # (inquisition__actor__name), a filter on an integer field # (inquisition__actor__age), and a filter on a boolean field # (inquisition__expected). response2 = self.client.get(popup_url) self.assertContains(response2, "Spain") self.assertNotContains(response2, "England") def test_limit_choices_to_isnull_false(self): """Regression test for 20182""" Actor.objects.create(name="Palin", age=27) Actor.objects.create(name="Kilbraken", age=50, title="Judge") response = self.client.get(reverse('admin:admin_views_sketch_add')) # Find the link m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant0"', response.content) self.assertTrue(m) # Got a match popup_url = m[1].decode().replace('&amp;', '&') # Handle relative links popup_url = urljoin(response.request['PATH_INFO'], popup_url) # Get the popup and verify the correct objects show up in the resulting # page. This step tests field__isnull=0 gets parsed correctly from the # lookup query string; in model we define defendant0 field to have a # limit_choices_to option that includes "actor__title__isnull=False". response2 = self.client.get(popup_url) self.assertContains(response2, "Kilbraken") self.assertNotContains(response2, "Palin") def test_limit_choices_to_isnull_true(self): """Regression test for 20182""" Actor.objects.create(name="Palin", age=27) Actor.objects.create(name="Kilbraken", age=50, title="Judge") response = self.client.get(reverse('admin:admin_views_sketch_add')) # Find the link m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant1"', response.content) self.assertTrue(m) # Got a match popup_url = m[1].decode().replace('&amp;', '&') # Handle relative links popup_url = urljoin(response.request['PATH_INFO'], popup_url) # Get the popup and verify the correct objects show up in the resulting # page. This step tests field__isnull=1 gets parsed correctly from the # lookup query string; in model we define defendant1 field to have a # limit_choices_to option that includes "actor__title__isnull=True". response2 = self.client.get(popup_url) self.assertNotContains(response2, "Kilbraken") self.assertContains(response2, "Palin") def test_list_display_method_same_name_as_reverse_accessor(self): """ Should be able to use a ModelAdmin method in list_display that has the same name as a reverse model field ("sketch" in this case). """ actor = Actor.objects.create(name="Palin", age=27) Inquisition.objects.create(expected=True, leader=actor, country="England") response = self.client.get(reverse('admin:admin_views_inquisition_changelist')) self.assertContains(response, 'list-display-sketch') @override_settings(ROOT_URLCONF='admin_views.urls') class UserAdminTest(TestCase): """ Tests user CRUD functionality. """ @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.adduser = User.objects.create_user(username='adduser', password='secret', is_staff=True) cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True) cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True) cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False) cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True) def setUp(self): self.client.force_login(self.superuser) def test_save_button(self): user_count = User.objects.count() response = self.client.post(reverse('admin:auth_user_add'), { 'username': 'newuser', 'password1': 'newpassword', 'password2': 'newpassword', }) new_user = User.objects.get(username='newuser') self.assertRedirects(response, reverse('admin:auth_user_change', args=(new_user.pk,))) self.assertEqual(User.objects.count(), user_count + 1) self.assertTrue(new_user.has_usable_password()) def test_save_continue_editing_button(self): user_count = User.objects.count() response = self.client.post(reverse('admin:auth_user_add'), { 'username': 'newuser', 'password1': 'newpassword', 'password2': 'newpassword', '_continue': '1', }) new_user = User.objects.get(username='newuser') new_user_url = reverse('admin:auth_user_change', args=(new_user.pk,)) self.assertRedirects(response, new_user_url, fetch_redirect_response=False) self.assertEqual(User.objects.count(), user_count + 1) self.assertTrue(new_user.has_usable_password()) response = self.client.get(new_user_url) self.assertContains( response, '<li class="success">The user “<a href="%s">' '%s</a>” was added successfully. You may edit it again below.</li>' % (new_user_url, new_user), html=True, ) def test_password_mismatch(self): response = self.client.post(reverse('admin:auth_user_add'), { 'username': 'newuser', 'password1': 'newpassword', 'password2': 'mismatch', }) self.assertEqual(response.status_code, 200) self.assertFormError(response, 'adminform', 'password', []) self.assertFormError(response, 'adminform', 'password2', ['The two password fields didn’t match.']) def test_user_fk_add_popup(self): """User addition through a FK popup should return the appropriate JavaScript response.""" response = self.client.get(reverse('admin:admin_views_album_add')) self.assertContains(response, reverse('admin:auth_user_add')) self.assertContains(response, 'class="related-widget-wrapper-link add-related" id="add_id_owner"') response = self.client.get(reverse('admin:auth_user_add') + '?%s=1' % IS_POPUP_VAR) self.assertNotContains(response, 'name="_continue"') self.assertNotContains(response, 'name="_addanother"') data = { 'username': 'newuser', 'password1': 'newpassword', 'password2': 'newpassword', IS_POPUP_VAR: '1', '_save': '1', } response = self.client.post(reverse('admin:auth_user_add') + '?%s=1' % IS_POPUP_VAR, data, follow=True) self.assertContains(response, '&quot;obj&quot;: &quot;newuser&quot;') def test_user_fk_change_popup(self): """User change through a FK popup should return the appropriate JavaScript response.""" response = self.client.get(reverse('admin:admin_views_album_add')) self.assertContains(response, reverse('admin:auth_user_change', args=('__fk__',))) self.assertContains(response, 'class="related-widget-wrapper-link change-related" id="change_id_owner"') user = User.objects.get(username='changeuser') url = reverse('admin:auth_user_change', args=(user.pk,)) + '?%s=1' % IS_POPUP_VAR response = self.client.get(url) self.assertNotContains(response, 'name="_continue"') self.assertNotContains(response, 'name="_addanother"') data = { 'username': 'newuser', 'password1': 'newpassword', 'password2': 'newpassword', 'last_login_0': '2007-05-30', 'last_login_1': '13:20:10', 'date_joined_0': '2007-05-30', 'date_joined_1': '13:20:10', IS_POPUP_VAR: '1', '_save': '1', } response = self.client.post(url, data, follow=True) self.assertContains(response, '&quot;obj&quot;: &quot;newuser&quot;') self.assertContains(response, '&quot;action&quot;: &quot;change&quot;') def test_user_fk_delete_popup(self): """User deletion through a FK popup should return the appropriate JavaScript response.""" response = self.client.get(reverse('admin:admin_views_album_add')) self.assertContains(response, reverse('admin:auth_user_delete', args=('__fk__',))) self.assertContains(response, 'class="related-widget-wrapper-link change-related" id="change_id_owner"') user = User.objects.get(username='changeuser') url = reverse('admin:auth_user_delete', args=(user.pk,)) + '?%s=1' % IS_POPUP_VAR response = self.client.get(url) self.assertEqual(response.status_code, 200) data = { 'post': 'yes', IS_POPUP_VAR: '1', } response = self.client.post(url, data, follow=True) self.assertContains(response, '&quot;action&quot;: &quot;delete&quot;') def test_save_add_another_button(self): user_count = User.objects.count() response = self.client.post(reverse('admin:auth_user_add'), { 'username': 'newuser', 'password1': 'newpassword', 'password2': 'newpassword', '_addanother': '1', }) new_user = User.objects.order_by('-id')[0] self.assertRedirects(response, reverse('admin:auth_user_add')) self.assertEqual(User.objects.count(), user_count + 1) self.assertTrue(new_user.has_usable_password()) def test_user_permission_performance(self): u = User.objects.all()[0] # Don't depend on a warm cache, see #17377. ContentType.objects.clear_cache() with self.assertNumQueries(10): response = self.client.get(reverse('admin:auth_user_change', args=(u.pk,))) self.assertEqual(response.status_code, 200) def test_form_url_present_in_context(self): u = User.objects.all()[0] response = self.client.get(reverse('admin3:auth_user_password_change', args=(u.pk,))) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['form_url'], 'pony') @override_settings(ROOT_URLCONF='admin_views.urls') class GroupAdminTest(TestCase): """ Tests group CRUD functionality. """ @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_save_button(self): group_count = Group.objects.count() response = self.client.post(reverse('admin:auth_group_add'), { 'name': 'newgroup', }) Group.objects.order_by('-id')[0] self.assertRedirects(response, reverse('admin:auth_group_changelist')) self.assertEqual(Group.objects.count(), group_count + 1) def test_group_permission_performance(self): g = Group.objects.create(name="test_group") # Ensure no queries are skipped due to cached content type for Group. ContentType.objects.clear_cache() with self.assertNumQueries(8): response = self.client.get(reverse('admin:auth_group_change', args=(g.pk,))) self.assertEqual(response.status_code, 200) @override_settings(ROOT_URLCONF='admin_views.urls') class CSSTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') def setUp(self): self.client.force_login(self.superuser) def test_field_prefix_css_classes(self): """ Fields have a CSS class name with a 'field-' prefix. """ response = self.client.get(reverse('admin:admin_views_post_add')) # The main form self.assertContains(response, 'class="form-row field-title"') self.assertContains(response, 'class="form-row field-content"') self.assertContains(response, 'class="form-row field-public"') self.assertContains(response, 'class="form-row field-awesomeness_level"') self.assertContains(response, 'class="form-row field-coolness"') self.assertContains(response, 'class="form-row field-value"') self.assertContains(response, 'class="form-row"') # The lambda function # The tabular inline self.assertContains(response, '<td class="field-url">') self.assertContains(response, '<td class="field-posted">') def test_index_css_classes(self): """ CSS class names are used for each app and model on the admin index pages (#17050). """ # General index page response = self.client.get(reverse('admin:index')) self.assertContains(response, '<div class="app-admin_views module') self.assertContains(response, '<tr class="model-actor">') self.assertContains(response, '<tr class="model-album">') # App index page response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertContains(response, '<div class="app-admin_views module') self.assertContains(response, '<tr class="model-actor">') self.assertContains(response, '<tr class="model-album">') def test_app_model_in_form_body_class(self): """ Ensure app and model tag are correctly read by change_form template """ response = self.client.get(reverse('admin:admin_views_section_add')) self.assertContains(response, '<body class=" app-admin_views model-section ') def test_app_model_in_list_body_class(self): """ Ensure app and model tag are correctly read by change_list template """ response = self.client.get(reverse('admin:admin_views_section_changelist')) self.assertContains(response, '<body class=" app-admin_views model-section ') def test_app_model_in_delete_confirmation_body_class(self): """ Ensure app and model tag are correctly read by delete_confirmation template """ response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,))) self.assertContains(response, '<body class=" app-admin_views model-section ') def test_app_model_in_app_index_body_class(self): """ Ensure app and model tag are correctly read by app_index template """ response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertContains(response, '<body class=" dashboard app-admin_views') def test_app_model_in_delete_selected_confirmation_body_class(self): """ Ensure app and model tag are correctly read by delete_selected_confirmation template """ action_data = { ACTION_CHECKBOX_NAME: [self.s1.pk], 'action': 'delete_selected', 'index': 0, } response = self.client.post(reverse('admin:admin_views_section_changelist'), action_data) self.assertContains(response, '<body class=" app-admin_views model-section ') def test_changelist_field_classes(self): """ Cells of the change list table should contain the field name in their class attribute Refs #11195. """ Podcast.objects.create(name="Django Dose", release_date=datetime.date.today()) response = self.client.get(reverse('admin:admin_views_podcast_changelist')) self.assertContains(response, '<th class="field-name">') self.assertContains(response, '<td class="field-release_date nowrap">') self.assertContains(response, '<td class="action-checkbox">') try: import docutils except ImportError: docutils = None @unittest.skipUnless(docutils, "no docutils installed.") @override_settings(ROOT_URLCONF='admin_views.urls') @modify_settings(INSTALLED_APPS={'append': ['django.contrib.admindocs', 'django.contrib.flatpages']}) class AdminDocsTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_tags(self): response = self.client.get(reverse('django-admindocs-tags')) # The builtin tag group exists self.assertContains(response, "<h2>Built-in tags</h2>", count=2, html=True) # A builtin tag exists in both the index and detail self.assertContains(response, '<h3 id="built_in-autoescape">autoescape</h3>', html=True) self.assertContains(response, '<li><a href="#built_in-autoescape">autoescape</a></li>', html=True) # An app tag exists in both the index and detail self.assertContains(response, '<h3 id="flatpages-get_flatpages">get_flatpages</h3>', html=True) self.assertContains(response, '<li><a href="#flatpages-get_flatpages">get_flatpages</a></li>', html=True) # The admin list tag group exists self.assertContains(response, "<h2>admin_list</h2>", count=2, html=True) # An admin list tag exists in both the index and detail self.assertContains(response, '<h3 id="admin_list-admin_actions">admin_actions</h3>', html=True) self.assertContains(response, '<li><a href="#admin_list-admin_actions">admin_actions</a></li>', html=True) def test_filters(self): response = self.client.get(reverse('django-admindocs-filters')) # The builtin filter group exists self.assertContains(response, "<h2>Built-in filters</h2>", count=2, html=True) # A builtin filter exists in both the index and detail self.assertContains(response, '<h3 id="built_in-add">add</h3>', html=True) self.assertContains(response, '<li><a href="#built_in-add">add</a></li>', html=True) @override_settings( ROOT_URLCONF='admin_views.urls', TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }], USE_I18N=False, ) class ValidXHTMLTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_lang_name_present(self): response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertNotContains(response, ' lang=""') self.assertNotContains(response, ' xml:lang=""') @override_settings(ROOT_URLCONF='admin_views.urls', USE_THOUSAND_SEPARATOR=True, USE_L10N=True) class DateHierarchyTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def assert_non_localized_year(self, response, year): """ The year is not localized with USE_THOUSAND_SEPARATOR (#15234). """ self.assertNotContains(response, formats.number_format(year)) def assert_contains_year_link(self, response, date): self.assertContains(response, '?release_date__year=%d"' % date.year) def assert_contains_month_link(self, response, date): self.assertContains( response, '?release_date__month=%d&amp;release_date__year=%d"' % ( date.month, date.year)) def assert_contains_day_link(self, response, date): self.assertContains( response, '?release_date__day=%d&amp;' 'release_date__month=%d&amp;release_date__year=%d"' % ( date.day, date.month, date.year)) def test_empty(self): """ No date hierarchy links display with empty changelist. """ response = self.client.get( reverse('admin:admin_views_podcast_changelist')) self.assertNotContains(response, 'release_date__year=') self.assertNotContains(response, 'release_date__month=') self.assertNotContains(response, 'release_date__day=') def test_single(self): """ Single day-level date hierarchy appears for single object. """ DATE = datetime.date(2000, 6, 30) Podcast.objects.create(release_date=DATE) url = reverse('admin:admin_views_podcast_changelist') response = self.client.get(url) self.assert_contains_day_link(response, DATE) self.assert_non_localized_year(response, 2000) def test_within_month(self): """ day-level links appear for changelist within single month. """ DATES = (datetime.date(2000, 6, 30), datetime.date(2000, 6, 15), datetime.date(2000, 6, 3)) for date in DATES: Podcast.objects.create(release_date=date) url = reverse('admin:admin_views_podcast_changelist') response = self.client.get(url) for date in DATES: self.assert_contains_day_link(response, date) self.assert_non_localized_year(response, 2000) def test_within_year(self): """ month-level links appear for changelist within single year. """ DATES = (datetime.date(2000, 1, 30), datetime.date(2000, 3, 15), datetime.date(2000, 5, 3)) for date in DATES: Podcast.objects.create(release_date=date) url = reverse('admin:admin_views_podcast_changelist') response = self.client.get(url) # no day-level links self.assertNotContains(response, 'release_date__day=') for date in DATES: self.assert_contains_month_link(response, date) self.assert_non_localized_year(response, 2000) def test_multiple_years(self): """ year-level links appear for year-spanning changelist. """ DATES = (datetime.date(2001, 1, 30), datetime.date(2003, 3, 15), datetime.date(2005, 5, 3)) for date in DATES: Podcast.objects.create(release_date=date) response = self.client.get( reverse('admin:admin_views_podcast_changelist')) # no day/month-level links self.assertNotContains(response, 'release_date__day=') self.assertNotContains(response, 'release_date__month=') for date in DATES: self.assert_contains_year_link(response, date) # and make sure GET parameters still behave correctly for date in DATES: url = '%s?release_date__year=%d' % ( reverse('admin:admin_views_podcast_changelist'), date.year) response = self.client.get(url) self.assert_contains_month_link(response, date) self.assert_non_localized_year(response, 2000) self.assert_non_localized_year(response, 2003) self.assert_non_localized_year(response, 2005) url = '%s?release_date__year=%d&release_date__month=%d' % ( reverse('admin:admin_views_podcast_changelist'), date.year, date.month) response = self.client.get(url) self.assert_contains_day_link(response, date) self.assert_non_localized_year(response, 2000) self.assert_non_localized_year(response, 2003) self.assert_non_localized_year(response, 2005) def test_related_field(self): questions_data = ( # (posted data, number of answers), (datetime.date(2001, 1, 30), 0), (datetime.date(2003, 3, 15), 1), (datetime.date(2005, 5, 3), 2), ) for date, answer_count in questions_data: question = Question.objects.create(posted=date) for i in range(answer_count): question.answer_set.create() response = self.client.get(reverse('admin:admin_views_answer_changelist')) for date, answer_count in questions_data: link = '?question__posted__year=%d"' % date.year if answer_count > 0: self.assertContains(response, link) else: self.assertNotContains(response, link) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminCustomSaveRelatedTests(TestCase): """ One can easily customize the way related objects are saved. Refs #16115. """ @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_should_be_able_to_edit_related_objects_on_add_view(self): post = { 'child_set-TOTAL_FORMS': '3', 'child_set-INITIAL_FORMS': '0', 'name': 'Josh Stone', 'child_set-0-name': 'Paul', 'child_set-1-name': 'Catherine', } self.client.post(reverse('admin:admin_views_parent_add'), post) self.assertEqual(1, Parent.objects.count()) self.assertEqual(2, Child.objects.count()) children_names = list(Child.objects.order_by('name').values_list('name', flat=True)) self.assertEqual('Josh Stone', Parent.objects.latest('id').name) self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names) def test_should_be_able_to_edit_related_objects_on_change_view(self): parent = Parent.objects.create(name='Josh Stone') paul = Child.objects.create(parent=parent, name='Paul') catherine = Child.objects.create(parent=parent, name='Catherine') post = { 'child_set-TOTAL_FORMS': '5', 'child_set-INITIAL_FORMS': '2', 'name': 'Josh Stone', 'child_set-0-name': 'Paul', 'child_set-0-id': paul.id, 'child_set-1-name': 'Catherine', 'child_set-1-id': catherine.id, } self.client.post(reverse('admin:admin_views_parent_change', args=(parent.id,)), post) children_names = list(Child.objects.order_by('name').values_list('name', flat=True)) self.assertEqual('Josh Stone', Parent.objects.latest('id').name) self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names) def test_should_be_able_to_edit_related_objects_on_changelist_view(self): parent = Parent.objects.create(name='Josh Rock') Child.objects.create(parent=parent, name='Paul') Child.objects.create(parent=parent, name='Catherine') post = { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '1', 'form-MAX_NUM_FORMS': '0', 'form-0-id': parent.id, 'form-0-name': 'Josh Stone', '_save': 'Save' } self.client.post(reverse('admin:admin_views_parent_changelist'), post) children_names = list(Child.objects.order_by('name').values_list('name', flat=True)) self.assertEqual('Josh Stone', Parent.objects.latest('id').name) self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewLogoutTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def test_logout(self): self.client.force_login(self.superuser) response = self.client.get(reverse('admin:logout')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'registration/logged_out.html') self.assertEqual(response.request['PATH_INFO'], reverse('admin:logout')) self.assertFalse(response.context['has_permission']) self.assertNotContains(response, 'user-tools') # user-tools div shouldn't visible. def test_client_logout_url_can_be_used_to_login(self): response = self.client.get(reverse('admin:logout')) self.assertEqual(response.status_code, 302) # we should be redirected to the login page. # follow the redirect and test results. response = self.client.get(reverse('admin:logout'), follow=True) self.assertContains( response, '<input type="hidden" name="next" value="%s">' % reverse('admin:index'), ) self.assertTemplateUsed(response, 'admin/login.html') self.assertEqual(response.request['PATH_INFO'], reverse('admin:login')) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminUserMessageTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def send_message(self, level): """ Helper that sends a post to the dummy test methods and asserts that a message with the level has appeared in the response. """ action_data = { ACTION_CHECKBOX_NAME: [1], 'action': 'message_%s' % level, 'index': 0, } response = self.client.post(reverse('admin:admin_views_usermessenger_changelist'), action_data, follow=True) self.assertContains(response, '<li class="%s">Test %s</li>' % (level, level), html=True) @override_settings(MESSAGE_LEVEL=10) # Set to DEBUG for this request def test_message_debug(self): self.send_message('debug') def test_message_info(self): self.send_message('info') def test_message_success(self): self.send_message('success') def test_message_warning(self): self.send_message('warning') def test_message_error(self): self.send_message('error') def test_message_extra_tags(self): action_data = { ACTION_CHECKBOX_NAME: [1], 'action': 'message_extra_tags', 'index': 0, } response = self.client.post(reverse('admin:admin_views_usermessenger_changelist'), action_data, follow=True) self.assertContains(response, '<li class="extra_tag info">Test tags</li>', html=True) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminKeepChangeListFiltersTests(TestCase): admin_site = site @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret') def setUp(self): self.client.force_login(self.superuser) def assertURLEqual(self, url1, url2, msg_prefix=''): """ Assert that two URLs are equal despite the ordering of their querystring. Refs #22360. """ parsed_url1 = urlparse(url1) path1 = parsed_url1.path parsed_qs1 = dict(parse_qsl(parsed_url1.query)) parsed_url2 = urlparse(url2) path2 = parsed_url2.path parsed_qs2 = dict(parse_qsl(parsed_url2.query)) for parsed_qs in [parsed_qs1, parsed_qs2]: if '_changelist_filters' in parsed_qs: changelist_filters = parsed_qs['_changelist_filters'] parsed_filters = dict(parse_qsl(changelist_filters)) parsed_qs['_changelist_filters'] = parsed_filters self.assertEqual(path1, path2) self.assertEqual(parsed_qs1, parsed_qs2) def test_assert_url_equal(self): # Test equality. change_user_url = reverse('admin:auth_user_change', args=(self.joepublicuser.pk,)) self.assertURLEqual( 'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format( change_user_url ), 'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format( change_user_url ) ) # Test inequality. with self.assertRaises(AssertionError): self.assertURLEqual( 'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format( change_user_url ), 'http://testserver{}?_changelist_filters=is_staff__exact%3D1%26is_superuser__exact%3D1'.format( change_user_url ) ) # Ignore scheme and host. self.assertURLEqual( 'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format( change_user_url ), '{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url) ) # Ignore ordering of querystring. self.assertURLEqual( '{}?is_staff__exact=0&is_superuser__exact=0'.format(reverse('admin:auth_user_changelist')), '{}?is_superuser__exact=0&is_staff__exact=0'.format(reverse('admin:auth_user_changelist')) ) # Ignore ordering of _changelist_filters. self.assertURLEqual( '{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url), '{}?_changelist_filters=is_superuser__exact%3D0%26is_staff__exact%3D0'.format(change_user_url) ) def get_changelist_filters(self): return { 'is_superuser__exact': 0, 'is_staff__exact': 0, } def get_changelist_filters_querystring(self): return urlencode(self.get_changelist_filters()) def get_preserved_filters_querystring(self): return urlencode({ '_changelist_filters': self.get_changelist_filters_querystring() }) def get_sample_user_id(self): return self.joepublicuser.pk def get_changelist_url(self): return '%s?%s' % ( reverse('admin:auth_user_changelist', current_app=self.admin_site.name), self.get_changelist_filters_querystring(), ) def get_add_url(self, add_preserved_filters=True): url = reverse('admin:auth_user_add', current_app=self.admin_site.name) if add_preserved_filters: url = '%s?%s' % (url, self.get_preserved_filters_querystring()) return url def get_change_url(self, user_id=None, add_preserved_filters=True): if user_id is None: user_id = self.get_sample_user_id() url = reverse('admin:auth_user_change', args=(user_id,), current_app=self.admin_site.name) if add_preserved_filters: url = '%s?%s' % (url, self.get_preserved_filters_querystring()) return url def get_history_url(self, user_id=None): if user_id is None: user_id = self.get_sample_user_id() return "%s?%s" % ( reverse('admin:auth_user_history', args=(user_id,), current_app=self.admin_site.name), self.get_preserved_filters_querystring(), ) def get_delete_url(self, user_id=None): if user_id is None: user_id = self.get_sample_user_id() return "%s?%s" % ( reverse('admin:auth_user_delete', args=(user_id,), current_app=self.admin_site.name), self.get_preserved_filters_querystring(), ) def test_changelist_view(self): response = self.client.get(self.get_changelist_url()) self.assertEqual(response.status_code, 200) # Check the `change_view` link has the correct querystring. detail_link = re.search( '<a href="(.*?)">{}</a>'.format(self.joepublicuser.username), response.content.decode() ) self.assertURLEqual(detail_link[1], self.get_change_url()) def test_change_view(self): # Get the `change_view`. response = self.client.get(self.get_change_url()) self.assertEqual(response.status_code, 200) # Check the form action. form_action = re.search( '<form action="(.*?)" method="post" id="user_form" novalidate>', response.content.decode() ) self.assertURLEqual(form_action[1], '?%s' % self.get_preserved_filters_querystring()) # Check the history link. history_link = re.search( '<a href="(.*?)" class="historylink">History</a>', response.content.decode() ) self.assertURLEqual(history_link[1], self.get_history_url()) # Check the delete link. delete_link = re.search( '<a href="(.*?)" class="deletelink">Delete</a>', response.content.decode() ) self.assertURLEqual(delete_link[1], self.get_delete_url()) # Test redirect on "Save". post_data = { 'username': 'joepublic', 'last_login_0': '2007-05-30', 'last_login_1': '13:20:10', 'date_joined_0': '2007-05-30', 'date_joined_1': '13:20:10', } post_data['_save'] = 1 response = self.client.post(self.get_change_url(), data=post_data) self.assertRedirects(response, self.get_changelist_url()) post_data.pop('_save') # Test redirect on "Save and continue". post_data['_continue'] = 1 response = self.client.post(self.get_change_url(), data=post_data) self.assertRedirects(response, self.get_change_url()) post_data.pop('_continue') # Test redirect on "Save and add new". post_data['_addanother'] = 1 response = self.client.post(self.get_change_url(), data=post_data) self.assertRedirects(response, self.get_add_url()) post_data.pop('_addanother') def test_change_view_without_preserved_filters(self): response = self.client.get(self.get_change_url(add_preserved_filters=False)) # The action attribute is omitted. self.assertContains(response, '<form method="post" id="user_form" novalidate>') def test_add_view(self): # Get the `add_view`. response = self.client.get(self.get_add_url()) self.assertEqual(response.status_code, 200) # Check the form action. form_action = re.search( '<form action="(.*?)" method="post" id="user_form" novalidate>', response.content.decode() ) self.assertURLEqual(form_action[1], '?%s' % self.get_preserved_filters_querystring()) post_data = { 'username': 'dummy', 'password1': 'test', 'password2': 'test', } # Test redirect on "Save". post_data['_save'] = 1 response = self.client.post(self.get_add_url(), data=post_data) self.assertRedirects(response, self.get_change_url(User.objects.get(username='dummy').pk)) post_data.pop('_save') # Test redirect on "Save and continue". post_data['username'] = 'dummy2' post_data['_continue'] = 1 response = self.client.post(self.get_add_url(), data=post_data) self.assertRedirects(response, self.get_change_url(User.objects.get(username='dummy2').pk)) post_data.pop('_continue') # Test redirect on "Save and add new". post_data['username'] = 'dummy3' post_data['_addanother'] = 1 response = self.client.post(self.get_add_url(), data=post_data) self.assertRedirects(response, self.get_add_url()) post_data.pop('_addanother') def test_add_view_without_preserved_filters(self): response = self.client.get(self.get_add_url(add_preserved_filters=False)) # The action attribute is omitted. self.assertContains(response, '<form method="post" id="user_form" novalidate>') def test_delete_view(self): # Test redirect on "Delete". response = self.client.post(self.get_delete_url(), {'post': 'yes'}) self.assertRedirects(response, self.get_changelist_url()) def test_url_prefix(self): context = { 'preserved_filters': self.get_preserved_filters_querystring(), 'opts': User._meta, } prefixes = ('', '/prefix/', '/後台/') for prefix in prefixes: with self.subTest(prefix=prefix), override_script_prefix(prefix): url = reverse('admin:auth_user_changelist', current_app=self.admin_site.name) self.assertURLEqual( self.get_changelist_url(), add_preserved_filters(context, url), ) class NamespacedAdminKeepChangeListFiltersTests(AdminKeepChangeListFiltersTests): admin_site = site2 @override_settings(ROOT_URLCONF='admin_views.urls') class TestLabelVisibility(TestCase): """ #11277 -Labels of hidden fields in admin were not hidden. """ @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_all_fields_visible(self): response = self.client.get(reverse('admin:admin_views_emptymodelvisible_add')) self.assert_fieldline_visible(response) self.assert_field_visible(response, 'first') self.assert_field_visible(response, 'second') def test_all_fields_hidden(self): response = self.client.get(reverse('admin:admin_views_emptymodelhidden_add')) self.assert_fieldline_hidden(response) self.assert_field_hidden(response, 'first') self.assert_field_hidden(response, 'second') def test_mixin(self): response = self.client.get(reverse('admin:admin_views_emptymodelmixin_add')) self.assert_fieldline_visible(response) self.assert_field_hidden(response, 'first') self.assert_field_visible(response, 'second') def assert_field_visible(self, response, field_name): self.assertContains(response, '<div class="fieldBox field-%s">' % field_name) def assert_field_hidden(self, response, field_name): self.assertContains(response, '<div class="fieldBox field-%s hidden">' % field_name) def assert_fieldline_visible(self, response): self.assertContains(response, '<div class="form-row field-first field-second">') def assert_fieldline_hidden(self, response): self.assertContains(response, '<div class="form-row hidden') @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewOnSiteTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = State.objects.create(name='New York') cls.s2 = State.objects.create(name='Illinois') cls.s3 = State.objects.create(name='California') cls.c1 = City.objects.create(state=cls.s1, name='New York') cls.c2 = City.objects.create(state=cls.s2, name='Chicago') cls.c3 = City.objects.create(state=cls.s3, name='San Francisco') cls.r1 = Restaurant.objects.create(city=cls.c1, name='Italian Pizza') cls.r2 = Restaurant.objects.create(city=cls.c1, name='Boulevard') cls.r3 = Restaurant.objects.create(city=cls.c2, name='Chinese Dinner') cls.r4 = Restaurant.objects.create(city=cls.c2, name='Angels') cls.r5 = Restaurant.objects.create(city=cls.c2, name='Take Away') cls.r6 = Restaurant.objects.create(city=cls.c3, name='The Unknown Restaurant') cls.w1 = Worker.objects.create(work_at=cls.r1, name='Mario', surname='Rossi') cls.w2 = Worker.objects.create(work_at=cls.r1, name='Antonio', surname='Bianchi') cls.w3 = Worker.objects.create(work_at=cls.r1, name='John', surname='Doe') def setUp(self): self.client.force_login(self.superuser) def test_add_view_form_and_formsets_run_validation(self): """ Issue #20522 Verifying that if the parent form fails validation, the inlines also run validation even if validation is contingent on parent form data. Also, assertFormError() and assertFormsetError() is usable for admin forms and formsets. """ # The form validation should fail because 'some_required_info' is # not included on the parent form, and the family_name of the parent # does not match that of the child post_data = { 'family_name': 'Test1', 'dependentchild_set-TOTAL_FORMS': '1', 'dependentchild_set-INITIAL_FORMS': '0', 'dependentchild_set-MAX_NUM_FORMS': '1', 'dependentchild_set-0-id': '', 'dependentchild_set-0-parent': '', 'dependentchild_set-0-family_name': 'Test2', } response = self.client.post(reverse('admin:admin_views_parentwithdependentchildren_add'), post_data) self.assertFormError(response, 'adminform', 'some_required_info', ['This field is required.']) msg = "The form 'adminform' in context 0 does not contain the non-field error 'Error'" with self.assertRaisesMessage(AssertionError, msg): self.assertFormError(response, 'adminform', None, ['Error']) self.assertFormsetError( response, 'inline_admin_formset', 0, None, ['Children must share a family name with their parents in this contrived test case'] ) msg = "The formset 'inline_admin_formset' in context 12 does not contain any non-form errors." with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError(response, 'inline_admin_formset', None, None, ['Error']) def test_change_view_form_and_formsets_run_validation(self): """ Issue #20522 Verifying that if the parent form fails validation, the inlines also run validation even if validation is contingent on parent form data """ pwdc = ParentWithDependentChildren.objects.create(some_required_info=6, family_name='Test1') # The form validation should fail because 'some_required_info' is # not included on the parent form, and the family_name of the parent # does not match that of the child post_data = { 'family_name': 'Test2', 'dependentchild_set-TOTAL_FORMS': '1', 'dependentchild_set-INITIAL_FORMS': '0', 'dependentchild_set-MAX_NUM_FORMS': '1', 'dependentchild_set-0-id': '', 'dependentchild_set-0-parent': str(pwdc.id), 'dependentchild_set-0-family_name': 'Test1', } response = self.client.post( reverse('admin:admin_views_parentwithdependentchildren_change', args=(pwdc.id,)), post_data ) self.assertFormError(response, 'adminform', 'some_required_info', ['This field is required.']) self.assertFormsetError( response, 'inline_admin_formset', 0, None, ['Children must share a family name with their parents in this contrived test case'] ) def test_check(self): "The view_on_site value is either a boolean or a callable" try: admin = CityAdmin(City, AdminSite()) CityAdmin.view_on_site = True self.assertEqual(admin.check(), []) CityAdmin.view_on_site = False self.assertEqual(admin.check(), []) CityAdmin.view_on_site = lambda obj: obj.get_absolute_url() self.assertEqual(admin.check(), []) CityAdmin.view_on_site = [] self.assertEqual(admin.check(), [ Error( "The value of 'view_on_site' must be a callable or a boolean value.", obj=CityAdmin, id='admin.E025', ), ]) finally: # Restore the original values for the benefit of other tests. CityAdmin.view_on_site = True def test_false(self): "The 'View on site' button is not displayed if view_on_site is False" response = self.client.get(reverse('admin:admin_views_restaurant_change', args=(self.r1.pk,))) content_type_pk = ContentType.objects.get_for_model(Restaurant).pk self.assertNotContains(response, reverse('admin:view_on_site', args=(content_type_pk, 1))) def test_true(self): "The default behavior is followed if view_on_site is True" response = self.client.get(reverse('admin:admin_views_city_change', args=(self.c1.pk,))) content_type_pk = ContentType.objects.get_for_model(City).pk self.assertContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.c1.pk))) def test_callable(self): "The right link is displayed if view_on_site is a callable" response = self.client.get(reverse('admin:admin_views_worker_change', args=(self.w1.pk,))) self.assertContains(response, '"/worker/%s/%s/"' % (self.w1.surname, self.w1.name)) def test_missing_get_absolute_url(self): "None is returned if model doesn't have get_absolute_url" model_admin = ModelAdmin(Worker, None) self.assertIsNone(model_admin.get_view_on_site_url(Worker())) @override_settings(ROOT_URLCONF='admin_views.urls') class InlineAdminViewOnSiteTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = State.objects.create(name='New York') cls.s2 = State.objects.create(name='Illinois') cls.s3 = State.objects.create(name='California') cls.c1 = City.objects.create(state=cls.s1, name='New York') cls.c2 = City.objects.create(state=cls.s2, name='Chicago') cls.c3 = City.objects.create(state=cls.s3, name='San Francisco') cls.r1 = Restaurant.objects.create(city=cls.c1, name='Italian Pizza') cls.r2 = Restaurant.objects.create(city=cls.c1, name='Boulevard') cls.r3 = Restaurant.objects.create(city=cls.c2, name='Chinese Dinner') cls.r4 = Restaurant.objects.create(city=cls.c2, name='Angels') cls.r5 = Restaurant.objects.create(city=cls.c2, name='Take Away') cls.r6 = Restaurant.objects.create(city=cls.c3, name='The Unknown Restaurant') cls.w1 = Worker.objects.create(work_at=cls.r1, name='Mario', surname='Rossi') cls.w2 = Worker.objects.create(work_at=cls.r1, name='Antonio', surname='Bianchi') cls.w3 = Worker.objects.create(work_at=cls.r1, name='John', surname='Doe') def setUp(self): self.client.force_login(self.superuser) def test_false(self): "The 'View on site' button is not displayed if view_on_site is False" response = self.client.get(reverse('admin:admin_views_state_change', args=(self.s1.pk,))) content_type_pk = ContentType.objects.get_for_model(City).pk self.assertNotContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.c1.pk))) def test_true(self): "The 'View on site' button is displayed if view_on_site is True" response = self.client.get(reverse('admin:admin_views_city_change', args=(self.c1.pk,))) content_type_pk = ContentType.objects.get_for_model(Restaurant).pk self.assertContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.r1.pk))) def test_callable(self): "The right link is displayed if view_on_site is a callable" response = self.client.get(reverse('admin:admin_views_restaurant_change', args=(self.r1.pk,))) self.assertContains(response, '"/worker_inline/%s/%s/"' % (self.w1.surname, self.w1.name)) @override_settings(ROOT_URLCONF='admin_views.urls') class GetFormsetsWithInlinesArgumentTest(TestCase): """ #23934 - When adding a new model instance in the admin, the 'obj' argument of get_formsets_with_inlines() should be None. When changing, it should be equal to the existing model instance. The GetFormsetsArgumentCheckingAdmin ModelAdmin throws an exception if obj is not None during add_view or obj is None during change_view. """ @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_explicitly_provided_pk(self): post_data = {'name': '1'} response = self.client.post(reverse('admin:admin_views_explicitlyprovidedpk_add'), post_data) self.assertEqual(response.status_code, 302) post_data = {'name': '2'} response = self.client.post(reverse('admin:admin_views_explicitlyprovidedpk_change', args=(1,)), post_data) self.assertEqual(response.status_code, 302) def test_implicitly_generated_pk(self): post_data = {'name': '1'} response = self.client.post(reverse('admin:admin_views_implicitlygeneratedpk_add'), post_data) self.assertEqual(response.status_code, 302) post_data = {'name': '2'} response = self.client.post(reverse('admin:admin_views_implicitlygeneratedpk_change', args=(1,)), post_data) self.assertEqual(response.status_code, 302) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminSiteFinalCatchAllPatternTests(TestCase): """ Verifies the behaviour of the admin catch-all view. * Anonynous/non-staff users are redirected to login for all URLs, whether otherwise valid or not. * APPEND_SLASH is applied for staff if needed. * Otherwise Http404. * Catch-all view disabled via AdminSite.final_catch_all_view. """ def test_unknown_url_redirects_login_if_not_authenticated(self): unknown_url = '/test_admin/admin/unknown/' response = self.client.get(unknown_url) self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), unknown_url)) def test_unknown_url_404_if_authenticated(self): superuser = User.objects.create_superuser( username='super', password='secret', email='[email protected]', ) self.client.force_login(superuser) unknown_url = '/test_admin/admin/unknown/' response = self.client.get(unknown_url) self.assertEqual(response.status_code, 404) def test_known_url_redirects_login_if_not_authenticated(self): known_url = reverse('admin:admin_views_article_changelist') response = self.client.get(known_url) self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), known_url)) def test_known_url_missing_slash_redirects_login_if_not_authenticated(self): known_url = reverse('admin:admin_views_article_changelist')[:-1] response = self.client.get(known_url) # Redirects with the next URL also missing the slash. self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), known_url)) def test_non_admin_url_shares_url_prefix(self): url = reverse('non_admin')[:-1] response = self.client.get(url) # Redirects with the next URL also missing the slash. self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), url)) def test_url_without_trailing_slash_if_not_authenticated(self): url = reverse('admin:article_extra_json') response = self.client.get(url) self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), url)) def test_unkown_url_without_trailing_slash_if_not_authenticated(self): url = reverse('admin:article_extra_json')[:-1] response = self.client.get(url) self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), url)) @override_settings(APPEND_SLASH=True) def test_missing_slash_append_slash_true_unknown_url(self): superuser = User.objects.create_user( username='staff', password='secret', email='[email protected]', is_staff=True, ) self.client.force_login(superuser) unknown_url = '/test_admin/admin/unknown/' response = self.client.get(unknown_url[:-1]) self.assertEqual(response.status_code, 404) @override_settings(APPEND_SLASH=True) def test_missing_slash_append_slash_true(self): superuser = User.objects.create_user( username='staff', password='secret', email='[email protected]', is_staff=True, ) self.client.force_login(superuser) known_url = reverse('admin:admin_views_article_changelist') response = self.client.get(known_url[:-1]) self.assertRedirects(response, known_url, status_code=301, target_status_code=403) @override_settings(APPEND_SLASH=True) def test_missing_slash_append_slash_true_script_name(self): superuser = User.objects.create_user( username='staff', password='secret', email='[email protected]', is_staff=True, ) self.client.force_login(superuser) known_url = reverse('admin:admin_views_article_changelist') response = self.client.get(known_url[:-1], SCRIPT_NAME='/prefix/') self.assertRedirects( response, '/prefix' + known_url, status_code=301, fetch_redirect_response=False, ) @override_settings(APPEND_SLASH=True, FORCE_SCRIPT_NAME='/prefix/') def test_missing_slash_append_slash_true_force_script_name(self): superuser = User.objects.create_user( username='staff', password='secret', email='[email protected]', is_staff=True, ) self.client.force_login(superuser) known_url = reverse('admin:admin_views_article_changelist') response = self.client.get(known_url[:-1]) self.assertRedirects( response, '/prefix' + known_url, status_code=301, fetch_redirect_response=False, ) @override_settings(APPEND_SLASH=True) def test_missing_slash_append_slash_true_non_staff_user(self): user = User.objects.create_user( username='user', password='secret', email='[email protected]', is_staff=False, ) self.client.force_login(user) known_url = reverse('admin:admin_views_article_changelist') response = self.client.get(known_url[:-1]) self.assertRedirects(response, '/test_admin/admin/login/?next=/test_admin/admin/admin_views/article') @override_settings(APPEND_SLASH=False) def test_missing_slash_append_slash_false(self): superuser = User.objects.create_user( username='staff', password='secret', email='[email protected]', is_staff=True, ) self.client.force_login(superuser) known_url = reverse('admin:admin_views_article_changelist') response = self.client.get(known_url[:-1]) self.assertEqual(response.status_code, 404) @override_settings(APPEND_SLASH=True) def test_single_model_no_append_slash(self): superuser = User.objects.create_user( username='staff', password='secret', email='[email protected]', is_staff=True, ) self.client.force_login(superuser) known_url = reverse('admin9:admin_views_actor_changelist') response = self.client.get(known_url[:-1]) self.assertEqual(response.status_code, 404) # Same tests above with final_catch_all_view=False. def test_unknown_url_404_if_not_authenticated_without_final_catch_all_view(self): unknown_url = '/test_admin/admin10/unknown/' response = self.client.get(unknown_url) self.assertEqual(response.status_code, 404) def test_unknown_url_404_if_authenticated_without_final_catch_all_view(self): superuser = User.objects.create_superuser( username='super', password='secret', email='[email protected]', ) self.client.force_login(superuser) unknown_url = '/test_admin/admin10/unknown/' response = self.client.get(unknown_url) self.assertEqual(response.status_code, 404) def test_known_url_redirects_login_if_not_authenticated_without_final_catch_all_view(self): known_url = reverse('admin10:admin_views_article_changelist') response = self.client.get(known_url) self.assertRedirects(response, '%s?next=%s' % (reverse('admin10:login'), known_url)) def test_known_url_missing_slash_redirects_with_slash_if_not_authenticated_without_final_catch_all_view(self): known_url = reverse('admin10:admin_views_article_changelist') response = self.client.get(known_url[:-1]) self.assertRedirects(response, known_url, status_code=301, fetch_redirect_response=False) def test_non_admin_url_shares_url_prefix_without_final_catch_all_view(self): url = reverse('non_admin10') response = self.client.get(url[:-1]) self.assertRedirects(response, url, status_code=301) def test_url_without_trailing_slash_if_not_authenticated_without_final_catch_all_view(self): url = reverse('admin10:article_extra_json') response = self.client.get(url) self.assertRedirects(response, '%s?next=%s' % (reverse('admin10:login'), url)) def test_unkown_url_without_trailing_slash_if_not_authenticated_without_final_catch_all_view(self): url = reverse('admin10:article_extra_json')[:-1] response = self.client.get(url) # Matches test_admin/admin10/admin_views/article/<path:object_id>/ self.assertRedirects(response, url + '/', status_code=301, fetch_redirect_response=False) @override_settings(APPEND_SLASH=True) def test_missing_slash_append_slash_true_unknown_url_without_final_catch_all_view(self): superuser = User.objects.create_user( username='staff', password='secret', email='[email protected]', is_staff=True, ) self.client.force_login(superuser) unknown_url = '/test_admin/admin10/unknown/' response = self.client.get(unknown_url[:-1]) self.assertEqual(response.status_code, 404) @override_settings(APPEND_SLASH=True) def test_missing_slash_append_slash_true_without_final_catch_all_view(self): superuser = User.objects.create_user( username='staff', password='secret', email='[email protected]', is_staff=True, ) self.client.force_login(superuser) known_url = reverse('admin10:admin_views_article_changelist') response = self.client.get(known_url[:-1]) self.assertRedirects(response, known_url, status_code=301, target_status_code=403) @override_settings(APPEND_SLASH=False) def test_missing_slash_append_slash_false_without_final_catch_all_view(self): superuser = User.objects.create_user( username='staff', password='secret', email='[email protected]', is_staff=True, ) self.client.force_login(superuser) known_url = reverse('admin10:admin_views_article_changelist') response = self.client.get(known_url[:-1]) self.assertEqual(response.status_code, 404) # Outside admin. def test_non_admin_url_404_if_not_authenticated(self): unknown_url = '/unknown/' response = self.client.get(unknown_url) # Does not redirect to the admin login. self.assertEqual(response.status_code, 404)
5ea2417678ab498a5adc1a02f0e1697760ecd36886f0147841d31119f79f5a8c
import os import re import types from datetime import datetime, timedelta from decimal import Decimal from unittest import TestCase, mock from django.core.exceptions import ValidationError from django.core.files.base import ContentFile from django.core.validators import ( BaseValidator, DecimalValidator, EmailValidator, FileExtensionValidator, MaxLengthValidator, MaxValueValidator, MinLengthValidator, MinValueValidator, ProhibitNullCharactersValidator, RegexValidator, URLValidator, int_list_validator, validate_comma_separated_integer_list, validate_email, validate_image_file_extension, validate_integer, validate_ipv4_address, validate_ipv6_address, validate_ipv46_address, validate_slug, validate_unicode_slug, ) from django.test import SimpleTestCase, ignore_warnings from django.utils.deprecation import RemovedInDjango41Warning try: from PIL import Image # noqa except ImportError: PILLOW_IS_INSTALLED = False else: PILLOW_IS_INSTALLED = True NOW = datetime.now() EXTENDED_SCHEMES = ['http', 'https', 'ftp', 'ftps', 'git', 'file', 'git+ssh'] TEST_DATA = [ # (validator, value, expected), (validate_integer, '42', None), (validate_integer, '-42', None), (validate_integer, -42, None), (validate_integer, -42.5, ValidationError), (validate_integer, None, ValidationError), (validate_integer, 'a', ValidationError), (validate_integer, '\n42', ValidationError), (validate_integer, '42\n', ValidationError), (validate_email, '[email protected]', None), (validate_email, '[email protected]', None), (validate_email, 'email@[127.0.0.1]', None), (validate_email, 'email@[2001:dB8::1]', None), (validate_email, 'email@[2001:dB8:0:0:0:0:0:1]', None), (validate_email, 'email@[::fffF:127.0.0.1]', None), (validate_email, '[email protected]', None), (validate_email, '[email protected]', None), (validate_email, '[email protected].उदाहरण.परीक्षा', None), (validate_email, 'email@localhost', None), (EmailValidator(allowlist=['localdomain']), 'email@localdomain', None), (validate_email, '"test@test"@example.com', None), (validate_email, 'example@atm.%s' % ('a' * 63), None), (validate_email, 'example@%s.atm' % ('a' * 63), None), (validate_email, 'example@%s.%s.atm' % ('a' * 63, 'b' * 10), None), (validate_email, 'example@atm.%s' % ('a' * 64), ValidationError), (validate_email, 'example@%s.atm.%s' % ('b' * 64, 'a' * 63), ValidationError), (validate_email, None, ValidationError), (validate_email, '', ValidationError), (validate_email, 'abc', ValidationError), (validate_email, 'abc@', ValidationError), (validate_email, 'abc@bar', ValidationError), (validate_email, 'a @x.cz', ValidationError), (validate_email, '[email protected]', ValidationError), (validate_email, 'something@@somewhere.com', ValidationError), (validate_email, '[email protected]', ValidationError), (validate_email, 'email@[127.0.0.256]', ValidationError), (validate_email, 'email@[2001:db8::12345]', ValidationError), (validate_email, 'email@[2001:db8:0:0:0:0:1]', ValidationError), (validate_email, 'email@[::ffff:127.0.0.256]', ValidationError), (validate_email, '[email protected]', ValidationError), (validate_email, '[email protected]', ValidationError), (validate_email, '[email protected]', ValidationError), (validate_email, '[email protected]', ValidationError), (validate_email, '[email protected]', ValidationError), (validate_email, '[email protected]\n\n<script src="x.js">', ValidationError), # Quoted-string format (CR not allowed) (validate_email, '"\\\011"@here.com', None), (validate_email, '"\\\012"@here.com', ValidationError), (validate_email, '[email protected].', ValidationError), # Max length of domain name labels is 63 characters per RFC 1034. (validate_email, 'a@%s.us' % ('a' * 63), None), (validate_email, 'a@%s.us' % ('a' * 64), ValidationError), # Trailing newlines in username or domain not allowed (validate_email, '[email protected]\n', ValidationError), (validate_email, 'a\[email protected]', ValidationError), (validate_email, '"test@test"\[email protected]', ValidationError), (validate_email, 'a@[127.0.0.1]\n', ValidationError), (validate_slug, 'slug-ok', None), (validate_slug, 'longer-slug-still-ok', None), (validate_slug, '--------', None), (validate_slug, 'nohyphensoranything', None), (validate_slug, 'a', None), (validate_slug, '1', None), (validate_slug, 'a1', None), (validate_slug, '', ValidationError), (validate_slug, ' text ', ValidationError), (validate_slug, ' ', ValidationError), (validate_slug, '[email protected]', ValidationError), (validate_slug, '你好', ValidationError), (validate_slug, '你 好', ValidationError), (validate_slug, '\n', ValidationError), (validate_slug, 'trailing-newline\n', ValidationError), (validate_unicode_slug, 'slug-ok', None), (validate_unicode_slug, 'longer-slug-still-ok', None), (validate_unicode_slug, '--------', None), (validate_unicode_slug, 'nohyphensoranything', None), (validate_unicode_slug, 'a', None), (validate_unicode_slug, '1', None), (validate_unicode_slug, 'a1', None), (validate_unicode_slug, '你好', None), (validate_unicode_slug, '', ValidationError), (validate_unicode_slug, ' text ', ValidationError), (validate_unicode_slug, ' ', ValidationError), (validate_unicode_slug, '[email protected]', ValidationError), (validate_unicode_slug, '\n', ValidationError), (validate_unicode_slug, '你 好', ValidationError), (validate_unicode_slug, 'trailing-newline\n', ValidationError), (validate_ipv4_address, '1.1.1.1', None), (validate_ipv4_address, '255.0.0.0', None), (validate_ipv4_address, '0.0.0.0', None), (validate_ipv4_address, '256.1.1.1', ValidationError), (validate_ipv4_address, '25.1.1.', ValidationError), (validate_ipv4_address, '25,1,1,1', ValidationError), (validate_ipv4_address, '25.1 .1.1', ValidationError), (validate_ipv4_address, '1.1.1.1\n', ValidationError), (validate_ipv4_address, '٧.2٥.3٣.243', ValidationError), # validate_ipv6_address uses django.utils.ipv6, which # is tested in much greater detail in its own testcase (validate_ipv6_address, 'fe80::1', None), (validate_ipv6_address, '::1', None), (validate_ipv6_address, '1:2:3:4:5:6:7:8', None), (validate_ipv6_address, '1:2', ValidationError), (validate_ipv6_address, '::zzz', ValidationError), (validate_ipv6_address, '12345::', ValidationError), (validate_ipv46_address, '1.1.1.1', None), (validate_ipv46_address, '255.0.0.0', None), (validate_ipv46_address, '0.0.0.0', None), (validate_ipv46_address, 'fe80::1', None), (validate_ipv46_address, '::1', None), (validate_ipv46_address, '1:2:3:4:5:6:7:8', None), (validate_ipv46_address, '256.1.1.1', ValidationError), (validate_ipv46_address, '25.1.1.', ValidationError), (validate_ipv46_address, '25,1,1,1', ValidationError), (validate_ipv46_address, '25.1 .1.1', ValidationError), (validate_ipv46_address, '1:2', ValidationError), (validate_ipv46_address, '::zzz', ValidationError), (validate_ipv46_address, '12345::', ValidationError), (validate_comma_separated_integer_list, '1', None), (validate_comma_separated_integer_list, '12', None), (validate_comma_separated_integer_list, '1,2', None), (validate_comma_separated_integer_list, '1,2,3', None), (validate_comma_separated_integer_list, '10,32', None), (validate_comma_separated_integer_list, '', ValidationError), (validate_comma_separated_integer_list, 'a', ValidationError), (validate_comma_separated_integer_list, 'a,b,c', ValidationError), (validate_comma_separated_integer_list, '1, 2, 3', ValidationError), (validate_comma_separated_integer_list, ',', ValidationError), (validate_comma_separated_integer_list, '1,2,3,', ValidationError), (validate_comma_separated_integer_list, '1,2,', ValidationError), (validate_comma_separated_integer_list, ',1', ValidationError), (validate_comma_separated_integer_list, '1,,2', ValidationError), (int_list_validator(sep='.'), '1.2.3', None), (int_list_validator(sep='.', allow_negative=True), '1.2.3', None), (int_list_validator(allow_negative=True), '-1,-2,3', None), (int_list_validator(allow_negative=True), '1,-2,-12', None), (int_list_validator(), '-1,2,3', ValidationError), (int_list_validator(sep='.'), '1,2,3', ValidationError), (int_list_validator(sep='.'), '1.2.3\n', ValidationError), (MaxValueValidator(10), 10, None), (MaxValueValidator(10), -10, None), (MaxValueValidator(10), 0, None), (MaxValueValidator(NOW), NOW, None), (MaxValueValidator(NOW), NOW - timedelta(days=1), None), (MaxValueValidator(0), 1, ValidationError), (MaxValueValidator(NOW), NOW + timedelta(days=1), ValidationError), (MinValueValidator(-10), -10, None), (MinValueValidator(-10), 10, None), (MinValueValidator(-10), 0, None), (MinValueValidator(NOW), NOW, None), (MinValueValidator(NOW), NOW + timedelta(days=1), None), (MinValueValidator(0), -1, ValidationError), (MinValueValidator(NOW), NOW - timedelta(days=1), ValidationError), # limit_value may be a callable. (MinValueValidator(lambda: 1), 0, ValidationError), (MinValueValidator(lambda: 1), 1, None), (MaxLengthValidator(10), '', None), (MaxLengthValidator(10), 10 * 'x', None), (MaxLengthValidator(10), 15 * 'x', ValidationError), (MinLengthValidator(10), 15 * 'x', None), (MinLengthValidator(10), 10 * 'x', None), (MinLengthValidator(10), '', ValidationError), (URLValidator(EXTENDED_SCHEMES), 'file://localhost/path', None), (URLValidator(EXTENDED_SCHEMES), 'git://example.com/', None), (URLValidator(EXTENDED_SCHEMES), 'git+ssh://[email protected]/example/hg-git.git', None), (URLValidator(EXTENDED_SCHEMES), 'git://-invalid.com', ValidationError), (URLValidator(), None, ValidationError), (URLValidator(), 56, ValidationError), (URLValidator(), 'no_scheme', ValidationError), # Newlines and tabs are not accepted. (URLValidator(), 'http://www.djangoproject.com/\n', ValidationError), (URLValidator(), 'http://[::ffff:192.9.5.5]\n', ValidationError), (URLValidator(), 'http://www.djangoproject.com/\r', ValidationError), (URLValidator(), 'http://[::ffff:192.9.5.5]\r', ValidationError), (URLValidator(), 'http://www.django\rproject.com/', ValidationError), (URLValidator(), 'http://[::\rffff:192.9.5.5]', ValidationError), (URLValidator(), 'http://\twww.djangoproject.com/', ValidationError), (URLValidator(), 'http://\t[::ffff:192.9.5.5]', ValidationError), # Trailing junk does not take forever to reject (URLValidator(), 'http://www.asdasdasdasdsadfm.com.br ', ValidationError), (URLValidator(), 'http://www.asdasdasdasdsadfm.com.br z', ValidationError), (BaseValidator(True), True, None), (BaseValidator(True), False, ValidationError), (RegexValidator(), '', None), (RegexValidator(), 'x1x2', None), (RegexValidator('[0-9]+'), 'xxxxxx', ValidationError), (RegexValidator('[0-9]+'), '1234', None), (RegexValidator(re.compile('[0-9]+')), '1234', None), (RegexValidator('.*'), '', None), (RegexValidator(re.compile('.*')), '', None), (RegexValidator('.*'), 'xxxxx', None), (RegexValidator('x'), 'y', ValidationError), (RegexValidator(re.compile('x')), 'y', ValidationError), (RegexValidator('x', inverse_match=True), 'y', None), (RegexValidator(re.compile('x'), inverse_match=True), 'y', None), (RegexValidator('x', inverse_match=True), 'x', ValidationError), (RegexValidator(re.compile('x'), inverse_match=True), 'x', ValidationError), (RegexValidator('x', flags=re.IGNORECASE), 'y', ValidationError), (RegexValidator('a'), 'A', ValidationError), (RegexValidator('a', flags=re.IGNORECASE), 'A', None), (FileExtensionValidator(['txt']), ContentFile('contents', name='fileWithUnsupportedExt.jpg'), ValidationError), (FileExtensionValidator(['txt']), ContentFile('contents', name='fileWithUnsupportedExt.JPG'), ValidationError), (FileExtensionValidator(['txt']), ContentFile('contents', name='fileWithNoExtension'), ValidationError), (FileExtensionValidator(['']), ContentFile('contents', name='fileWithAnExtension.txt'), ValidationError), (FileExtensionValidator([]), ContentFile('contents', name='file.txt'), ValidationError), (FileExtensionValidator(['']), ContentFile('contents', name='fileWithNoExtension'), None), (FileExtensionValidator(['txt']), ContentFile('contents', name='file.txt'), None), (FileExtensionValidator(['txt']), ContentFile('contents', name='file.TXT'), None), (FileExtensionValidator(['TXT']), ContentFile('contents', name='file.txt'), None), (FileExtensionValidator(), ContentFile('contents', name='file.jpg'), None), (DecimalValidator(max_digits=2, decimal_places=2), Decimal('0.99'), None), (DecimalValidator(max_digits=2, decimal_places=1), Decimal('0.99'), ValidationError), (DecimalValidator(max_digits=3, decimal_places=1), Decimal('999'), ValidationError), (DecimalValidator(max_digits=4, decimal_places=1), Decimal('999'), None), (DecimalValidator(max_digits=20, decimal_places=2), Decimal('742403889818000000'), None), (DecimalValidator(20, 2), Decimal('7.42403889818E+17'), None), (DecimalValidator(max_digits=20, decimal_places=2), Decimal('7424742403889818000000'), ValidationError), (DecimalValidator(max_digits=5, decimal_places=2), Decimal('7304E-1'), None), (DecimalValidator(max_digits=5, decimal_places=2), Decimal('7304E-3'), ValidationError), (DecimalValidator(max_digits=5, decimal_places=5), Decimal('70E-5'), None), (DecimalValidator(max_digits=5, decimal_places=5), Decimal('70E-6'), ValidationError), # 'Enter a number.' errors *[ (DecimalValidator(decimal_places=2, max_digits=10), Decimal(value), ValidationError) for value in ( 'NaN', '-NaN', '+NaN', 'sNaN', '-sNaN', '+sNaN', 'Inf', '-Inf', '+Inf', 'Infinity', '-Infinity', '+Infinity', ) ], (validate_image_file_extension, ContentFile('contents', name='file.jpg'), None), (validate_image_file_extension, ContentFile('contents', name='file.png'), None), (validate_image_file_extension, ContentFile('contents', name='file.PNG'), None), (validate_image_file_extension, ContentFile('contents', name='file.txt'), ValidationError), (validate_image_file_extension, ContentFile('contents', name='file'), ValidationError), (ProhibitNullCharactersValidator(), '\x00something', ValidationError), (ProhibitNullCharactersValidator(), 'something', None), (ProhibitNullCharactersValidator(), None, None), ] def create_path(filename): return os.path.abspath(os.path.join(os.path.dirname(__file__), filename)) # Add valid and invalid URL tests. # This only tests the validator without extended schemes. with open(create_path('valid_urls.txt'), encoding='utf8') as f: for url in f: TEST_DATA.append((URLValidator(), url.strip(), None)) with open(create_path('invalid_urls.txt'), encoding='utf8') as f: for url in f: TEST_DATA.append((URLValidator(), url.strip(), ValidationError)) class TestValidators(SimpleTestCase): def test_validators(self): for validator, value, expected in TEST_DATA: name = validator.__name__ if isinstance(validator, types.FunctionType) else validator.__class__.__name__ exception_expected = expected is not None and issubclass(expected, Exception) with self.subTest(name, value=value): if validator is validate_image_file_extension and not PILLOW_IS_INSTALLED: self.skipTest('Pillow is required to test validate_image_file_extension.') if exception_expected: with self.assertRaises(expected): validator(value) else: self.assertEqual(expected, validator(value)) def test_single_message(self): v = ValidationError('Not Valid') self.assertEqual(str(v), "['Not Valid']") self.assertEqual(repr(v), "ValidationError(['Not Valid'])") def test_message_list(self): v = ValidationError(['First Problem', 'Second Problem']) self.assertEqual(str(v), "['First Problem', 'Second Problem']") self.assertEqual(repr(v), "ValidationError(['First Problem', 'Second Problem'])") def test_message_dict(self): v = ValidationError({'first': ['First Problem']}) self.assertEqual(str(v), "{'first': ['First Problem']}") self.assertEqual(repr(v), "ValidationError({'first': ['First Problem']})") def test_regex_validator_flags(self): msg = 'If the flags are set, regex must be a regular expression string.' with self.assertRaisesMessage(TypeError, msg): RegexValidator(re.compile('a'), flags=re.IGNORECASE) def test_max_length_validator_message(self): v = MaxLengthValidator(16, message='"%(value)s" has more than %(limit_value)d characters.') with self.assertRaisesMessage(ValidationError, '"djangoproject.com" has more than 16 characters.'): v('djangoproject.com') class TestValidatorEquality(TestCase): """ Validators have valid equality operators (#21638) """ def test_regex_equality(self): self.assertEqual( RegexValidator(r'^(?:[a-z0-9\.\-]*)://'), RegexValidator(r'^(?:[a-z0-9\.\-]*)://'), ) self.assertNotEqual( RegexValidator(r'^(?:[a-z0-9\.\-]*)://'), RegexValidator(r'^(?:[0-9\.\-]*)://'), ) self.assertEqual( RegexValidator(r'^(?:[a-z0-9\.\-]*)://', "oh noes", "invalid"), RegexValidator(r'^(?:[a-z0-9\.\-]*)://', "oh noes", "invalid"), ) self.assertNotEqual( RegexValidator(r'^(?:[a-z0-9\.\-]*)://', "oh", "invalid"), RegexValidator(r'^(?:[a-z0-9\.\-]*)://', "oh noes", "invalid"), ) self.assertNotEqual( RegexValidator(r'^(?:[a-z0-9\.\-]*)://', "oh noes", "invalid"), RegexValidator(r'^(?:[a-z0-9\.\-]*)://'), ) self.assertNotEqual( RegexValidator('', flags=re.IGNORECASE), RegexValidator(''), ) self.assertNotEqual( RegexValidator(''), RegexValidator('', inverse_match=True), ) def test_regex_equality_nocache(self): pattern = r'^(?:[a-z0-9\.\-]*)://' left = RegexValidator(pattern) re.purge() right = RegexValidator(pattern) self.assertEqual( left, right, ) def test_regex_equality_blank(self): self.assertEqual( RegexValidator(), RegexValidator(), ) def test_email_equality(self): self.assertEqual( EmailValidator(), EmailValidator(), ) self.assertNotEqual( EmailValidator(message="BAD EMAIL"), EmailValidator(), ) self.assertEqual( EmailValidator(message="BAD EMAIL", code="bad"), EmailValidator(message="BAD EMAIL", code="bad"), ) def test_basic_equality(self): self.assertEqual( MaxValueValidator(44), MaxValueValidator(44), ) self.assertEqual(MaxValueValidator(44), mock.ANY) self.assertNotEqual( MaxValueValidator(44), MinValueValidator(44), ) self.assertNotEqual( MinValueValidator(45), MinValueValidator(11), ) def test_decimal_equality(self): self.assertEqual( DecimalValidator(1, 2), DecimalValidator(1, 2), ) self.assertNotEqual( DecimalValidator(1, 2), DecimalValidator(1, 1), ) self.assertNotEqual( DecimalValidator(1, 2), DecimalValidator(2, 2), ) self.assertNotEqual( DecimalValidator(1, 2), MinValueValidator(11), ) def test_file_extension_equality(self): self.assertEqual( FileExtensionValidator(), FileExtensionValidator() ) self.assertEqual( FileExtensionValidator(['txt']), FileExtensionValidator(['txt']) ) self.assertEqual( FileExtensionValidator(['TXT']), FileExtensionValidator(['txt']) ) self.assertEqual( FileExtensionValidator(['TXT', 'png']), FileExtensionValidator(['txt', 'png']) ) self.assertEqual( FileExtensionValidator(['txt']), FileExtensionValidator(['txt'], code='invalid_extension') ) self.assertNotEqual( FileExtensionValidator(['txt']), FileExtensionValidator(['png']) ) self.assertNotEqual( FileExtensionValidator(['txt']), FileExtensionValidator(['png', 'jpg']) ) self.assertNotEqual( FileExtensionValidator(['txt']), FileExtensionValidator(['txt'], code='custom_code') ) self.assertNotEqual( FileExtensionValidator(['txt']), FileExtensionValidator(['txt'], message='custom error message') ) def test_prohibit_null_characters_validator_equality(self): self.assertEqual( ProhibitNullCharactersValidator(message='message', code='code'), ProhibitNullCharactersValidator(message='message', code='code') ) self.assertEqual( ProhibitNullCharactersValidator(), ProhibitNullCharactersValidator() ) self.assertNotEqual( ProhibitNullCharactersValidator(message='message1', code='code'), ProhibitNullCharactersValidator(message='message2', code='code') ) self.assertNotEqual( ProhibitNullCharactersValidator(message='message', code='code1'), ProhibitNullCharactersValidator(message='message', code='code2') ) class DeprecationTests(SimpleTestCase): @ignore_warnings(category=RemovedInDjango41Warning) def test_whitelist(self): validator = EmailValidator(whitelist=['localdomain']) self.assertEqual(validator.domain_allowlist, ['localdomain']) self.assertIsNone(validator('email@localdomain')) self.assertEqual(validator.domain_allowlist, validator.domain_whitelist) def test_whitelist_warning(self): msg = "The whitelist argument is deprecated in favor of allowlist." with self.assertRaisesMessage(RemovedInDjango41Warning, msg): EmailValidator(whitelist='localdomain') @ignore_warnings(category=RemovedInDjango41Warning) def test_domain_whitelist(self): validator = EmailValidator() validator.domain_whitelist = ['mydomain'] self.assertEqual(validator.domain_allowlist, ['mydomain']) self.assertEqual(validator.domain_allowlist, validator.domain_whitelist) def test_domain_whitelist_access_warning(self): validator = EmailValidator() msg = ( 'The domain_whitelist attribute is deprecated in favor of ' 'domain_allowlist.' ) with self.assertRaisesMessage(RemovedInDjango41Warning, msg): validator.domain_whitelist def test_domain_whitelist_set_warning(self): validator = EmailValidator() msg = ( 'The domain_whitelist attribute is deprecated in favor of ' 'domain_allowlist.' ) with self.assertRaisesMessage(RemovedInDjango41Warning, msg): validator.domain_whitelist = ['mydomain']
bc0f4492713ad9adb2d8b559f31fb86043bc96c375be504a777732604a56e9f9
import os import shutil import sys import tempfile import unittest from io import StringIO from pathlib import Path from unittest import mock from django.conf import settings from django.contrib.staticfiles import finders, storage from django.contrib.staticfiles.management.commands.collectstatic import ( Command as CollectstaticCommand, ) from django.core.management import call_command from django.test import override_settings from .cases import CollectionTestCase from .settings import TEST_ROOT def hashed_file_path(test, path): fullpath = test.render_template(test.static_template_snippet(path)) return fullpath.replace(settings.STATIC_URL, '') class TestHashedFiles: hashed_file_path = hashed_file_path def tearDown(self): # Clear hashed files to avoid side effects among tests. storage.staticfiles_storage.hashed_files.clear() def assertPostCondition(self): """ Assert post conditions for a test are met. Must be manually called at the end of each test. """ pass def test_template_tag_return(self): self.assertStaticRaises(ValueError, "does/not/exist.png", "/static/does/not/exist.png") self.assertStaticRenders("test/file.txt", "/static/test/file.dad0999e4f8f.txt") self.assertStaticRenders("test/file.txt", "/static/test/file.dad0999e4f8f.txt", asvar=True) self.assertStaticRenders("cached/styles.css", "/static/cached/styles.5e0040571e1a.css") self.assertStaticRenders("path/", "/static/path/") self.assertStaticRenders("path/?query", "/static/path/?query") self.assertPostCondition() def test_template_tag_simple_content(self): relpath = self.hashed_file_path("cached/styles.css") self.assertEqual(relpath, "cached/styles.5e0040571e1a.css") with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertNotIn(b"cached/other.css", content) self.assertIn(b"other.d41d8cd98f00.css", content) self.assertPostCondition() def test_path_ignored_completely(self): relpath = self.hashed_file_path("cached/css/ignored.css") self.assertEqual(relpath, "cached/css/ignored.554da52152af.css") with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertIn(b'#foobar', content) self.assertIn(b'http:foobar', content) self.assertIn(b'https:foobar', content) self.assertIn(b'data:foobar', content) self.assertIn(b'chrome:foobar', content) self.assertIn(b'//foobar', content) self.assertPostCondition() def test_path_with_querystring(self): relpath = self.hashed_file_path("cached/styles.css?spam=eggs") self.assertEqual(relpath, "cached/styles.5e0040571e1a.css?spam=eggs") with storage.staticfiles_storage.open("cached/styles.5e0040571e1a.css") as relfile: content = relfile.read() self.assertNotIn(b"cached/other.css", content) self.assertIn(b"other.d41d8cd98f00.css", content) self.assertPostCondition() def test_path_with_fragment(self): relpath = self.hashed_file_path("cached/styles.css#eggs") self.assertEqual(relpath, "cached/styles.5e0040571e1a.css#eggs") with storage.staticfiles_storage.open("cached/styles.5e0040571e1a.css") as relfile: content = relfile.read() self.assertNotIn(b"cached/other.css", content) self.assertIn(b"other.d41d8cd98f00.css", content) self.assertPostCondition() def test_path_with_querystring_and_fragment(self): relpath = self.hashed_file_path("cached/css/fragments.css") self.assertEqual(relpath, "cached/css/fragments.a60c0e74834f.css") with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertIn(b'fonts/font.b9b105392eb8.eot?#iefix', content) self.assertIn(b'fonts/font.b8d603e42714.svg#webfontIyfZbseF', content) self.assertIn(b'fonts/font.b8d603e42714.svg#path/to/../../fonts/font.svg', content) self.assertIn(b'data:font/woff;charset=utf-8;base64,d09GRgABAAAAADJoAA0AAAAAR2QAAQAAAAAAAAAAAAA', content) self.assertIn(b'#default#VML', content) self.assertPostCondition() def test_template_tag_absolute(self): relpath = self.hashed_file_path("cached/absolute.css") self.assertEqual(relpath, "cached/absolute.eb04def9f9a4.css") with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertNotIn(b"/static/cached/styles.css", content) self.assertIn(b"/static/cached/styles.5e0040571e1a.css", content) self.assertNotIn(b"/static/styles_root.css", content) self.assertIn(b"/static/styles_root.401f2509a628.css", content) self.assertIn(b'/static/cached/img/relative.acae32e4532b.png', content) self.assertPostCondition() def test_template_tag_absolute_root(self): """ Like test_template_tag_absolute, but for a file in STATIC_ROOT (#26249). """ relpath = self.hashed_file_path("absolute_root.css") self.assertEqual(relpath, "absolute_root.f821df1b64f7.css") with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertNotIn(b"/static/styles_root.css", content) self.assertIn(b"/static/styles_root.401f2509a628.css", content) self.assertPostCondition() def test_template_tag_relative(self): relpath = self.hashed_file_path("cached/relative.css") self.assertEqual(relpath, "cached/relative.c3e9e1ea6f2e.css") with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertNotIn(b"../cached/styles.css", content) self.assertNotIn(b'@import "styles.css"', content) self.assertNotIn(b'url(img/relative.png)', content) self.assertIn(b'url("img/relative.acae32e4532b.png")', content) self.assertIn(b"../cached/styles.5e0040571e1a.css", content) self.assertPostCondition() def test_import_replacement(self): "See #18050" relpath = self.hashed_file_path("cached/import.css") self.assertEqual(relpath, "cached/import.f53576679e5a.css") with storage.staticfiles_storage.open(relpath) as relfile: self.assertIn(b"""import url("styles.5e0040571e1a.css")""", relfile.read()) self.assertPostCondition() def test_template_tag_deep_relative(self): relpath = self.hashed_file_path("cached/css/window.css") self.assertEqual(relpath, "cached/css/window.5d5c10836967.css") with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertNotIn(b'url(img/window.png)', content) self.assertIn(b'url("img/window.acae32e4532b.png")', content) self.assertPostCondition() def test_template_tag_url(self): relpath = self.hashed_file_path("cached/url.css") self.assertEqual(relpath, "cached/url.902310b73412.css") with storage.staticfiles_storage.open(relpath) as relfile: self.assertIn(b"https://", relfile.read()) self.assertPostCondition() @override_settings( STATICFILES_DIRS=[os.path.join(TEST_ROOT, 'project', 'loop')], STATICFILES_FINDERS=['django.contrib.staticfiles.finders.FileSystemFinder'], ) def test_import_loop(self): finders.get_finder.cache_clear() err = StringIO() with self.assertRaisesMessage(RuntimeError, 'Max post-process passes exceeded'): call_command('collectstatic', interactive=False, verbosity=0, stderr=err) self.assertEqual("Post-processing 'All' failed!\n\n", err.getvalue()) self.assertPostCondition() def test_post_processing(self): """ post_processing behaves correctly. Files that are alterable should always be post-processed; files that aren't should be skipped. collectstatic has already been called once in setUp() for this testcase, therefore we check by verifying behavior on a second run. """ collectstatic_args = { 'interactive': False, 'verbosity': 0, 'link': False, 'clear': False, 'dry_run': False, 'post_process': True, 'use_default_ignore_patterns': True, 'ignore_patterns': ['*.ignoreme'], } collectstatic_cmd = CollectstaticCommand() collectstatic_cmd.set_options(**collectstatic_args) stats = collectstatic_cmd.collect() self.assertIn(os.path.join('cached', 'css', 'window.css'), stats['post_processed']) self.assertIn(os.path.join('cached', 'css', 'img', 'window.png'), stats['unmodified']) self.assertIn(os.path.join('test', 'nonascii.css'), stats['post_processed']) # No file should be yielded twice. self.assertCountEqual(stats['post_processed'], set(stats['post_processed'])) self.assertPostCondition() def test_css_import_case_insensitive(self): relpath = self.hashed_file_path("cached/styles_insensitive.css") self.assertEqual(relpath, "cached/styles_insensitive.3fa427592a53.css") with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertNotIn(b"cached/other.css", content) self.assertIn(b"other.d41d8cd98f00.css", content) self.assertPostCondition() def test_js_source_map(self): relpath = self.hashed_file_path('cached/source_map.js') self.assertEqual(relpath, 'cached/source_map.9371cbb02a26.js') with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertNotIn(b'//# sourceMappingURL=source_map.js.map', content) self.assertIn( b'//# sourceMappingURL=source_map.js.99914b932bd3.map', content, ) self.assertPostCondition() def test_js_source_map_sensitive(self): relpath = self.hashed_file_path('cached/source_map_sensitive.js') self.assertEqual(relpath, 'cached/source_map_sensitive.5da96fdd3cb3.js') with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertIn(b'//# sOuRcEMaPpInGURL=source_map.js.map', content) self.assertNotIn( b'//# sourceMappingURL=source_map.js.99914b932bd3.map', content, ) self.assertPostCondition() @override_settings( STATICFILES_DIRS=[os.path.join(TEST_ROOT, 'project', 'faulty')], STATICFILES_FINDERS=['django.contrib.staticfiles.finders.FileSystemFinder'], ) def test_post_processing_failure(self): """ post_processing indicates the origin of the error when it fails. """ finders.get_finder.cache_clear() err = StringIO() with self.assertRaises(Exception): call_command('collectstatic', interactive=False, verbosity=0, stderr=err) self.assertEqual("Post-processing 'faulty.css' failed!\n\n", err.getvalue()) self.assertPostCondition() @override_settings(STATICFILES_STORAGE='staticfiles_tests.storage.ExtraPatternsStorage') class TestExtraPatternsStorage(CollectionTestCase): def setUp(self): storage.staticfiles_storage.hashed_files.clear() # avoid cache interference super().setUp() def cached_file_path(self, path): fullpath = self.render_template(self.static_template_snippet(path)) return fullpath.replace(settings.STATIC_URL, '') def test_multi_extension_patterns(self): """ With storage classes having several file extension patterns, only the files matching a specific file pattern should be affected by the substitution (#19670). """ # CSS files shouldn't be touched by JS patterns. relpath = self.cached_file_path("cached/import.css") self.assertEqual(relpath, "cached/import.f53576679e5a.css") with storage.staticfiles_storage.open(relpath) as relfile: self.assertIn(b'import url("styles.5e0040571e1a.css")', relfile.read()) # Confirm JS patterns have been applied to JS files. relpath = self.cached_file_path("cached/test.js") self.assertEqual(relpath, "cached/test.388d7a790d46.js") with storage.staticfiles_storage.open(relpath) as relfile: self.assertIn(b'JS_URL("import.f53576679e5a.css")', relfile.read()) @override_settings( STATICFILES_STORAGE='django.contrib.staticfiles.storage.ManifestStaticFilesStorage', ) class TestCollectionManifestStorage(TestHashedFiles, CollectionTestCase): """ Tests for the Cache busting storage """ def setUp(self): super().setUp() temp_dir = tempfile.mkdtemp() os.makedirs(os.path.join(temp_dir, 'test')) self._clear_filename = os.path.join(temp_dir, 'test', 'cleared.txt') with open(self._clear_filename, 'w') as f: f.write('to be deleted in one test') self.patched_settings = self.settings( STATICFILES_DIRS=settings.STATICFILES_DIRS + [temp_dir], ) self.patched_settings.enable() self.addCleanup(shutil.rmtree, temp_dir) self._manifest_strict = storage.staticfiles_storage.manifest_strict def tearDown(self): self.patched_settings.disable() if os.path.exists(self._clear_filename): os.unlink(self._clear_filename) storage.staticfiles_storage.manifest_strict = self._manifest_strict super().tearDown() def assertPostCondition(self): hashed_files = storage.staticfiles_storage.hashed_files # The in-memory version of the manifest matches the one on disk # since a properly created manifest should cover all filenames. if hashed_files: manifest = storage.staticfiles_storage.load_manifest() self.assertEqual(hashed_files, manifest) def test_manifest_exists(self): filename = storage.staticfiles_storage.manifest_name path = storage.staticfiles_storage.path(filename) self.assertTrue(os.path.exists(path)) def test_manifest_does_not_exist(self): storage.staticfiles_storage.manifest_name = 'does.not.exist.json' self.assertIsNone(storage.staticfiles_storage.read_manifest()) def test_manifest_does_not_ignore_permission_error(self): with mock.patch('builtins.open', side_effect=PermissionError): with self.assertRaises(PermissionError): storage.staticfiles_storage.read_manifest() def test_loaded_cache(self): self.assertNotEqual(storage.staticfiles_storage.hashed_files, {}) manifest_content = storage.staticfiles_storage.read_manifest() self.assertIn( '"version": "%s"' % storage.staticfiles_storage.manifest_version, manifest_content ) def test_parse_cache(self): hashed_files = storage.staticfiles_storage.hashed_files manifest = storage.staticfiles_storage.load_manifest() self.assertEqual(hashed_files, manifest) def test_clear_empties_manifest(self): cleared_file_name = storage.staticfiles_storage.clean_name(os.path.join('test', 'cleared.txt')) # collect the additional file self.run_collectstatic() hashed_files = storage.staticfiles_storage.hashed_files self.assertIn(cleared_file_name, hashed_files) manifest_content = storage.staticfiles_storage.load_manifest() self.assertIn(cleared_file_name, manifest_content) original_path = storage.staticfiles_storage.path(cleared_file_name) self.assertTrue(os.path.exists(original_path)) # delete the original file form the app, collect with clear os.unlink(self._clear_filename) self.run_collectstatic(clear=True) self.assertFileNotFound(original_path) hashed_files = storage.staticfiles_storage.hashed_files self.assertNotIn(cleared_file_name, hashed_files) manifest_content = storage.staticfiles_storage.load_manifest() self.assertNotIn(cleared_file_name, manifest_content) def test_missing_entry(self): missing_file_name = 'cached/missing.css' configured_storage = storage.staticfiles_storage self.assertNotIn(missing_file_name, configured_storage.hashed_files) # File name not found in manifest with self.assertRaisesMessage(ValueError, "Missing staticfiles manifest entry for '%s'" % missing_file_name): self.hashed_file_path(missing_file_name) configured_storage.manifest_strict = False # File doesn't exist on disk err_msg = "The file '%s' could not be found with %r." % (missing_file_name, configured_storage._wrapped) with self.assertRaisesMessage(ValueError, err_msg): self.hashed_file_path(missing_file_name) content = StringIO() content.write('Found') configured_storage.save(missing_file_name, content) # File exists on disk self.hashed_file_path(missing_file_name) def test_intermediate_files(self): cached_files = os.listdir(os.path.join(settings.STATIC_ROOT, 'cached')) # Intermediate files shouldn't be created for reference. self.assertEqual( len([ cached_file for cached_file in cached_files if cached_file.startswith('relative.') ]), 2, ) @override_settings(STATICFILES_STORAGE='staticfiles_tests.storage.NoneHashStorage') class TestCollectionNoneHashStorage(CollectionTestCase): hashed_file_path = hashed_file_path def test_hashed_name(self): relpath = self.hashed_file_path('cached/styles.css') self.assertEqual(relpath, 'cached/styles.css') @override_settings(STATICFILES_STORAGE='staticfiles_tests.storage.SimpleStorage') class TestCollectionSimpleStorage(CollectionTestCase): hashed_file_path = hashed_file_path def setUp(self): storage.staticfiles_storage.hashed_files.clear() # avoid cache interference super().setUp() def test_template_tag_return(self): self.assertStaticRaises(ValueError, "does/not/exist.png", "/static/does/not/exist.png") self.assertStaticRenders("test/file.txt", "/static/test/file.deploy12345.txt") self.assertStaticRenders("cached/styles.css", "/static/cached/styles.deploy12345.css") self.assertStaticRenders("path/", "/static/path/") self.assertStaticRenders("path/?query", "/static/path/?query") def test_template_tag_simple_content(self): relpath = self.hashed_file_path("cached/styles.css") self.assertEqual(relpath, "cached/styles.deploy12345.css") with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertNotIn(b"cached/other.css", content) self.assertIn(b"other.deploy12345.css", content) class CustomStaticFilesStorage(storage.StaticFilesStorage): """ Used in TestStaticFilePermissions """ def __init__(self, *args, **kwargs): kwargs['file_permissions_mode'] = 0o640 kwargs['directory_permissions_mode'] = 0o740 super().__init__(*args, **kwargs) @unittest.skipIf(sys.platform == 'win32', "Windows only partially supports chmod.") class TestStaticFilePermissions(CollectionTestCase): command_params = { 'interactive': False, 'verbosity': 0, 'ignore_patterns': ['*.ignoreme'], } def setUp(self): self.umask = 0o027 self.old_umask = os.umask(self.umask) super().setUp() def tearDown(self): os.umask(self.old_umask) super().tearDown() # Don't run collectstatic command in this test class. def run_collectstatic(self, **kwargs): pass @override_settings( FILE_UPLOAD_PERMISSIONS=0o655, FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o765, ) def test_collect_static_files_permissions(self): call_command('collectstatic', **self.command_params) static_root = Path(settings.STATIC_ROOT) test_file = static_root / 'test.txt' file_mode = test_file.stat().st_mode & 0o777 self.assertEqual(file_mode, 0o655) tests = [ static_root / 'subdir', static_root / 'nested', static_root / 'nested' / 'css', ] for directory in tests: with self.subTest(directory=directory): dir_mode = directory.stat().st_mode & 0o777 self.assertEqual(dir_mode, 0o765) @override_settings( FILE_UPLOAD_PERMISSIONS=None, FILE_UPLOAD_DIRECTORY_PERMISSIONS=None, ) def test_collect_static_files_default_permissions(self): call_command('collectstatic', **self.command_params) static_root = Path(settings.STATIC_ROOT) test_file = static_root / 'test.txt' file_mode = test_file.stat().st_mode & 0o777 self.assertEqual(file_mode, 0o666 & ~self.umask) tests = [ static_root / 'subdir', static_root / 'nested', static_root / 'nested' / 'css', ] for directory in tests: with self.subTest(directory=directory): dir_mode = directory.stat().st_mode & 0o777 self.assertEqual(dir_mode, 0o777 & ~self.umask) @override_settings( FILE_UPLOAD_PERMISSIONS=0o655, FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o765, STATICFILES_STORAGE='staticfiles_tests.test_storage.CustomStaticFilesStorage', ) def test_collect_static_files_subclass_of_static_storage(self): call_command('collectstatic', **self.command_params) static_root = Path(settings.STATIC_ROOT) test_file = static_root / 'test.txt' file_mode = test_file.stat().st_mode & 0o777 self.assertEqual(file_mode, 0o640) tests = [ static_root / 'subdir', static_root / 'nested', static_root / 'nested' / 'css', ] for directory in tests: with self.subTest(directory=directory): dir_mode = directory.stat().st_mode & 0o777 self.assertEqual(dir_mode, 0o740) @override_settings( STATICFILES_STORAGE='django.contrib.staticfiles.storage.ManifestStaticFilesStorage', ) class TestCollectionHashedFilesCache(CollectionTestCase): """ Files referenced from CSS use the correct final hashed name regardless of the order in which the files are post-processed. """ hashed_file_path = hashed_file_path def setUp(self): super().setUp() self._temp_dir = temp_dir = tempfile.mkdtemp() os.makedirs(os.path.join(temp_dir, 'test')) self.addCleanup(shutil.rmtree, temp_dir) def _get_filename_path(self, filename): return os.path.join(self._temp_dir, 'test', filename) def test_file_change_after_collectstatic(self): # Create initial static files. file_contents = ( ('foo.png', 'foo'), ('bar.css', 'url("foo.png")\nurl("xyz.png")'), ('xyz.png', 'xyz'), ) for filename, content in file_contents: with open(self._get_filename_path(filename), 'w') as f: f.write(content) with self.modify_settings(STATICFILES_DIRS={'append': self._temp_dir}): finders.get_finder.cache_clear() err = StringIO() # First collectstatic run. call_command('collectstatic', interactive=False, verbosity=0, stderr=err) relpath = self.hashed_file_path('test/bar.css') with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertIn(b'foo.acbd18db4cc2.png', content) self.assertIn(b'xyz.d16fb36f0911.png', content) # Change the contents of the png files. for filename in ('foo.png', 'xyz.png'): with open(self._get_filename_path(filename), 'w+b') as f: f.write(b"new content of file to change its hash") # The hashes of the png files in the CSS file are updated after # a second collectstatic. call_command('collectstatic', interactive=False, verbosity=0, stderr=err) relpath = self.hashed_file_path('test/bar.css') with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertIn(b'foo.57a5cb9ba68d.png', content) self.assertIn(b'xyz.57a5cb9ba68d.png', content)
38a64e9ba577a47ac1704292d406b8af80ae27d8419249a7ddfcd6c685b6da7e
from datetime import date, datetime from django.test import SimpleTestCase, override_settings from django.test.utils import TZ_SUPPORT, requires_tz_support from django.utils import dateformat, translation from django.utils.dateformat import format from django.utils.timezone import ( get_default_timezone, get_fixed_timezone, make_aware, utc, ) @override_settings(TIME_ZONE='Europe/Copenhagen') class DateFormatTests(SimpleTestCase): def setUp(self): self._orig_lang = translation.get_language() translation.activate('en-us') def tearDown(self): translation.activate(self._orig_lang) def test_date(self): d = date(2009, 5, 16) self.assertEqual(date.fromtimestamp(int(format(d, 'U'))), d) def test_naive_datetime(self): dt = datetime(2009, 5, 16, 5, 30, 30) self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U'))), dt) def test_naive_ambiguous_datetime(self): # dt is ambiguous in Europe/Copenhagen. pytz raises an exception for # the ambiguity, which results in an empty string. dt = datetime(2015, 10, 25, 2, 30, 0) # Try all formatters that involve self.timezone. self.assertEqual(format(dt, 'I'), '') self.assertEqual(format(dt, 'O'), '') self.assertEqual(format(dt, 'T'), '') self.assertEqual(format(dt, 'Z'), '') @requires_tz_support def test_datetime_with_local_tzinfo(self): ltz = get_default_timezone() dt = make_aware(datetime(2009, 5, 16, 5, 30, 30), ltz) self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), ltz), dt) self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U'))), dt.replace(tzinfo=None)) @requires_tz_support def test_datetime_with_tzinfo(self): tz = get_fixed_timezone(-510) ltz = get_default_timezone() dt = make_aware(datetime(2009, 5, 16, 5, 30, 30), ltz) self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), tz), dt) self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), ltz), dt) # astimezone() is safe here because the target timezone doesn't have DST self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U'))), dt.astimezone(ltz).replace(tzinfo=None)) self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), tz).timetuple(), dt.astimezone(tz).timetuple()) self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), ltz).timetuple(), dt.astimezone(ltz).timetuple()) def test_epoch(self): udt = datetime(1970, 1, 1, tzinfo=utc) self.assertEqual(format(udt, 'U'), '0') def test_empty_format(self): my_birthday = datetime(1979, 7, 8, 22, 00) self.assertEqual(dateformat.format(my_birthday, ''), '') def test_am_pm(self): my_birthday = datetime(1979, 7, 8, 22, 00) self.assertEqual(dateformat.format(my_birthday, 'a'), 'p.m.') def test_microsecond(self): # Regression test for #18951 dt = datetime(2009, 5, 16, microsecond=123) self.assertEqual(dateformat.format(dt, 'u'), '000123') def test_date_formats(self): my_birthday = datetime(1979, 7, 8, 22, 00) timestamp = datetime(2008, 5, 19, 11, 45, 23, 123456) self.assertEqual(dateformat.format(my_birthday, 'A'), 'PM') self.assertEqual(dateformat.format(timestamp, 'c'), '2008-05-19T11:45:23.123456') self.assertEqual(dateformat.format(my_birthday, 'd'), '08') self.assertEqual(dateformat.format(my_birthday, 'j'), '8') self.assertEqual(dateformat.format(my_birthday, 'l'), 'Sunday') self.assertEqual(dateformat.format(my_birthday, 'L'), 'False') self.assertEqual(dateformat.format(my_birthday, 'm'), '07') self.assertEqual(dateformat.format(my_birthday, 'M'), 'Jul') self.assertEqual(dateformat.format(my_birthday, 'b'), 'jul') self.assertEqual(dateformat.format(my_birthday, 'n'), '7') self.assertEqual(dateformat.format(my_birthday, 'N'), 'July') def test_time_formats(self): my_birthday = datetime(1979, 7, 8, 22, 00) self.assertEqual(dateformat.format(my_birthday, 'P'), '10 p.m.') self.assertEqual(dateformat.format(my_birthday, 's'), '00') self.assertEqual(dateformat.format(my_birthday, 'S'), 'th') self.assertEqual(dateformat.format(my_birthday, 't'), '31') self.assertEqual(dateformat.format(my_birthday, 'w'), '0') self.assertEqual(dateformat.format(my_birthday, 'W'), '27') self.assertEqual(dateformat.format(my_birthday, 'y'), '79') self.assertEqual(dateformat.format(my_birthday, 'Y'), '1979') self.assertEqual(dateformat.format(my_birthday, 'z'), '189') def test_dateformat(self): my_birthday = datetime(1979, 7, 8, 22, 00) self.assertEqual(dateformat.format(my_birthday, r'Y z \C\E\T'), '1979 189 CET') self.assertEqual(dateformat.format(my_birthday, r'jS \o\f F'), '8th of July') def test_futuredates(self): the_future = datetime(2100, 10, 25, 0, 00) self.assertEqual(dateformat.format(the_future, r'Y'), '2100') def test_day_of_year_leap(self): self.assertEqual(dateformat.format(datetime(2000, 12, 31), 'z'), '366') def test_timezones(self): my_birthday = datetime(1979, 7, 8, 22, 00) summertime = datetime(2005, 10, 30, 1, 00) wintertime = datetime(2005, 10, 30, 4, 00) timestamp = datetime(2008, 5, 19, 11, 45, 23, 123456) # 3h30m to the west of UTC tz = get_fixed_timezone(-210) aware_dt = datetime(2009, 5, 16, 5, 30, 30, tzinfo=tz) if TZ_SUPPORT: self.assertEqual(dateformat.format(my_birthday, 'O'), '+0100') self.assertEqual(dateformat.format(my_birthday, 'r'), 'Sun, 08 Jul 1979 22:00:00 +0100') self.assertEqual(dateformat.format(my_birthday, 'T'), 'CET') self.assertEqual(dateformat.format(my_birthday, 'e'), '') self.assertEqual(dateformat.format(aware_dt, 'e'), '-0330') self.assertEqual(dateformat.format(my_birthday, 'U'), '300315600') self.assertEqual(dateformat.format(timestamp, 'u'), '123456') self.assertEqual(dateformat.format(my_birthday, 'Z'), '3600') self.assertEqual(dateformat.format(summertime, 'I'), '1') self.assertEqual(dateformat.format(summertime, 'O'), '+0200') self.assertEqual(dateformat.format(wintertime, 'I'), '0') self.assertEqual(dateformat.format(wintertime, 'O'), '+0100') # Ticket #16924 -- We don't need timezone support to test this self.assertEqual(dateformat.format(aware_dt, 'O'), '-0330') def test_invalid_time_format_specifiers(self): my_birthday = date(1984, 8, 7) for specifier in ['a', 'A', 'f', 'g', 'G', 'h', 'H', 'i', 'P', 'r', 's', 'u']: msg = ( "The format for date objects may not contain time-related " "format specifiers (found '%s')." % specifier ) with self.assertRaisesMessage(TypeError, msg): dateformat.format(my_birthday, specifier) def test_r_format_with_non_en_locale(self): # Changing the locale doesn't change the "r" format. dt = datetime(1979, 7, 8, 22, 00) with translation.override('fr'): self.assertEqual( dateformat.format(dt, 'r'), 'Sun, 08 Jul 1979 22:00:00 +0100', ) def test_y_format_year_before_1000(self): tests = [ (476, '76'), (42, '42'), (4, '04'), ] for year, expected_date in tests: with self.subTest(year=year): self.assertEqual( dateformat.format(datetime(year, 9, 8, 5, 0), 'y'), expected_date, ) def test_Y_format_year_before_1000(self): self.assertEqual(dateformat.format(datetime(1, 1, 1), 'Y'), '0001') self.assertEqual(dateformat.format(datetime(999, 1, 1), 'Y'), '0999') def test_twelve_hour_format(self): tests = [ (0, '12'), (1, '1'), (11, '11'), (12, '12'), (13, '1'), (23, '11'), ] for hour, expected in tests: with self.subTest(hour=hour): self.assertEqual( dateformat.format(datetime(2000, 1, 1, hour), 'g'), expected, )
bca6ab18d03f9cdb446cb4d56a4f04ed97a49a2f97acac13b45f5ac435ed4a96
import contextlib import os import py_compile import shutil import sys import tempfile import threading import time import types import weakref import zipfile from importlib import import_module from pathlib import Path from subprocess import CompletedProcess from unittest import mock, skip, skipIf import pytz import django.__main__ from django.apps.registry import Apps from django.test import SimpleTestCase from django.test.utils import extend_sys_path from django.utils import autoreload from django.utils.autoreload import WatchmanUnavailable from .test_module import __main__ as test_main from .utils import on_macos_with_hfs class TestIterModulesAndFiles(SimpleTestCase): def import_and_cleanup(self, name): import_module(name) self.addCleanup(lambda: sys.path_importer_cache.clear()) self.addCleanup(lambda: sys.modules.pop(name, None)) def clear_autoreload_caches(self): autoreload.iter_modules_and_files.cache_clear() def assertFileFound(self, filename): # Some temp directories are symlinks. Python resolves these fully while # importing. resolved_filename = filename.resolve(strict=True) self.clear_autoreload_caches() # Test uncached access self.assertIn(resolved_filename, list(autoreload.iter_all_python_module_files())) # Test cached access self.assertIn(resolved_filename, list(autoreload.iter_all_python_module_files())) self.assertEqual(autoreload.iter_modules_and_files.cache_info().hits, 1) def assertFileNotFound(self, filename): resolved_filename = filename.resolve(strict=True) self.clear_autoreload_caches() # Test uncached access self.assertNotIn(resolved_filename, list(autoreload.iter_all_python_module_files())) # Test cached access self.assertNotIn(resolved_filename, list(autoreload.iter_all_python_module_files())) self.assertEqual(autoreload.iter_modules_and_files.cache_info().hits, 1) def temporary_file(self, filename): dirname = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, dirname) return Path(dirname) / filename def test_paths_are_pathlib_instances(self): for filename in autoreload.iter_all_python_module_files(): self.assertIsInstance(filename, Path) def test_file_added(self): """ When a file is added, it's returned by iter_all_python_module_files(). """ filename = self.temporary_file('test_deleted_removed_module.py') filename.touch() with extend_sys_path(str(filename.parent)): self.import_and_cleanup('test_deleted_removed_module') self.assertFileFound(filename.absolute()) def test_check_errors(self): """ When a file containing an error is imported in a function wrapped by check_errors(), gen_filenames() returns it. """ filename = self.temporary_file('test_syntax_error.py') filename.write_text("Ceci n'est pas du Python.") with extend_sys_path(str(filename.parent)): try: with self.assertRaises(SyntaxError): autoreload.check_errors(import_module)('test_syntax_error') finally: autoreload._exception = None self.assertFileFound(filename) def test_check_errors_catches_all_exceptions(self): """ Since Python may raise arbitrary exceptions when importing code, check_errors() must catch Exception, not just some subclasses. """ filename = self.temporary_file('test_exception.py') filename.write_text('raise Exception') with extend_sys_path(str(filename.parent)): try: with self.assertRaises(Exception): autoreload.check_errors(import_module)('test_exception') finally: autoreload._exception = None self.assertFileFound(filename) def test_zip_reload(self): """ Modules imported from zipped files have their archive location included in the result. """ zip_file = self.temporary_file('zip_import.zip') with zipfile.ZipFile(str(zip_file), 'w', zipfile.ZIP_DEFLATED) as zipf: zipf.writestr('test_zipped_file.py', '') with extend_sys_path(str(zip_file)): self.import_and_cleanup('test_zipped_file') self.assertFileFound(zip_file) def test_bytecode_conversion_to_source(self): """.pyc and .pyo files are included in the files list.""" filename = self.temporary_file('test_compiled.py') filename.touch() compiled_file = Path(py_compile.compile(str(filename), str(filename.with_suffix('.pyc')))) filename.unlink() with extend_sys_path(str(compiled_file.parent)): self.import_and_cleanup('test_compiled') self.assertFileFound(compiled_file) def test_weakref_in_sys_module(self): """iter_all_python_module_file() ignores weakref modules.""" time_proxy = weakref.proxy(time) sys.modules['time_proxy'] = time_proxy self.addCleanup(lambda: sys.modules.pop('time_proxy', None)) list(autoreload.iter_all_python_module_files()) # No crash. def test_module_without_spec(self): module = types.ModuleType('test_module') del module.__spec__ self.assertEqual(autoreload.iter_modules_and_files((module,), frozenset()), frozenset()) def test_main_module_is_resolved(self): main_module = sys.modules['__main__'] self.assertFileFound(Path(main_module.__file__)) def test_main_module_without_file_is_not_resolved(self): fake_main = types.ModuleType('__main__') self.assertEqual(autoreload.iter_modules_and_files((fake_main,), frozenset()), frozenset()) def test_path_with_embedded_null_bytes(self): for path in ( 'embedded_null_byte\x00.py', 'di\x00rectory/embedded_null_byte.py', ): with self.subTest(path=path): self.assertEqual( autoreload.iter_modules_and_files((), frozenset([path])), frozenset(), ) class TestChildArguments(SimpleTestCase): @mock.patch.dict(sys.modules, {'__main__': django.__main__}) @mock.patch('sys.argv', [django.__main__.__file__, 'runserver']) @mock.patch('sys.warnoptions', []) def test_run_as_module(self): self.assertEqual( autoreload.get_child_arguments(), [sys.executable, '-m', 'django', 'runserver'] ) @mock.patch.dict(sys.modules, {'__main__': test_main}) @mock.patch('sys.argv', [test_main.__file__, 'runserver']) @mock.patch('sys.warnoptions', []) def test_run_as_non_django_module(self): self.assertEqual( autoreload.get_child_arguments(), [sys.executable, '-m', 'utils_tests.test_module', 'runserver'], ) @mock.patch('sys.argv', [__file__, 'runserver']) @mock.patch('sys.warnoptions', ['error']) def test_warnoptions(self): self.assertEqual( autoreload.get_child_arguments(), [sys.executable, '-Werror', __file__, 'runserver'] ) @mock.patch('sys.warnoptions', []) def test_exe_fallback(self): with tempfile.TemporaryDirectory() as tmpdir: exe_path = Path(tmpdir) / 'django-admin.exe' exe_path.touch() with mock.patch('sys.argv', [exe_path.with_suffix(''), 'runserver']): self.assertEqual( autoreload.get_child_arguments(), [exe_path, 'runserver'] ) @mock.patch('sys.warnoptions', []) def test_entrypoint_fallback(self): with tempfile.TemporaryDirectory() as tmpdir: script_path = Path(tmpdir) / 'django-admin-script.py' script_path.touch() with mock.patch('sys.argv', [script_path.with_name('django-admin'), 'runserver']): self.assertEqual( autoreload.get_child_arguments(), [sys.executable, script_path, 'runserver'] ) @mock.patch('sys.argv', ['does-not-exist', 'runserver']) @mock.patch('sys.warnoptions', []) def test_raises_runtimeerror(self): msg = 'Script does-not-exist does not exist.' with self.assertRaisesMessage(RuntimeError, msg): autoreload.get_child_arguments() @mock.patch('sys.argv', [__file__, 'runserver']) @mock.patch('sys.warnoptions', []) def test_module_no_spec(self): module = types.ModuleType('test_module') del module.__spec__ with mock.patch.dict(sys.modules, {'__main__': module}): self.assertEqual( autoreload.get_child_arguments(), [sys.executable, __file__, 'runserver'] ) class TestUtilities(SimpleTestCase): def test_is_django_module(self): for module, expected in ( (pytz, False), (sys, False), (autoreload, True) ): with self.subTest(module=module): self.assertIs(autoreload.is_django_module(module), expected) def test_is_django_path(self): for module, expected in ( (pytz.__file__, False), (contextlib.__file__, False), (autoreload.__file__, True) ): with self.subTest(module=module): self.assertIs(autoreload.is_django_path(module), expected) class TestCommonRoots(SimpleTestCase): def test_common_roots(self): paths = ( Path('/first/second'), Path('/first/second/third'), Path('/first/'), Path('/root/first/'), ) results = autoreload.common_roots(paths) self.assertCountEqual(results, [Path('/first/'), Path('/root/first/')]) class TestSysPathDirectories(SimpleTestCase): def setUp(self): self._directory = tempfile.TemporaryDirectory() self.directory = Path(self._directory.name).resolve(strict=True).absolute() self.file = self.directory / 'test' self.file.touch() def tearDown(self): self._directory.cleanup() def test_sys_paths_with_directories(self): with extend_sys_path(str(self.file)): paths = list(autoreload.sys_path_directories()) self.assertIn(self.file.parent, paths) def test_sys_paths_non_existing(self): nonexistent_file = Path(self.directory.name) / 'does_not_exist' with extend_sys_path(str(nonexistent_file)): paths = list(autoreload.sys_path_directories()) self.assertNotIn(nonexistent_file, paths) self.assertNotIn(nonexistent_file.parent, paths) def test_sys_paths_absolute(self): paths = list(autoreload.sys_path_directories()) self.assertTrue(all(p.is_absolute() for p in paths)) def test_sys_paths_directories(self): with extend_sys_path(str(self.directory)): paths = list(autoreload.sys_path_directories()) self.assertIn(self.directory, paths) class GetReloaderTests(SimpleTestCase): @mock.patch('django.utils.autoreload.WatchmanReloader') def test_watchman_unavailable(self, mocked_watchman): mocked_watchman.check_availability.side_effect = WatchmanUnavailable self.assertIsInstance(autoreload.get_reloader(), autoreload.StatReloader) @mock.patch.object(autoreload.WatchmanReloader, 'check_availability') def test_watchman_available(self, mocked_available): # If WatchmanUnavailable isn't raised, Watchman will be chosen. mocked_available.return_value = None result = autoreload.get_reloader() self.assertIsInstance(result, autoreload.WatchmanReloader) class RunWithReloaderTests(SimpleTestCase): @mock.patch.dict(os.environ, {autoreload.DJANGO_AUTORELOAD_ENV: 'true'}) @mock.patch('django.utils.autoreload.get_reloader') def test_swallows_keyboard_interrupt(self, mocked_get_reloader): mocked_get_reloader.side_effect = KeyboardInterrupt() autoreload.run_with_reloader(lambda: None) # No exception @mock.patch.dict(os.environ, {autoreload.DJANGO_AUTORELOAD_ENV: 'false'}) @mock.patch('django.utils.autoreload.restart_with_reloader') def test_calls_sys_exit(self, mocked_restart_reloader): mocked_restart_reloader.return_value = 1 with self.assertRaises(SystemExit) as exc: autoreload.run_with_reloader(lambda: None) self.assertEqual(exc.exception.code, 1) @mock.patch.dict(os.environ, {autoreload.DJANGO_AUTORELOAD_ENV: 'true'}) @mock.patch('django.utils.autoreload.start_django') @mock.patch('django.utils.autoreload.get_reloader') def test_calls_start_django(self, mocked_reloader, mocked_start_django): mocked_reloader.return_value = mock.sentinel.RELOADER autoreload.run_with_reloader(mock.sentinel.METHOD) self.assertEqual(mocked_start_django.call_count, 1) self.assertSequenceEqual( mocked_start_django.call_args[0], [mock.sentinel.RELOADER, mock.sentinel.METHOD] ) class StartDjangoTests(SimpleTestCase): @mock.patch('django.utils.autoreload.StatReloader') def test_watchman_becomes_unavailable(self, mocked_stat): mocked_stat.should_stop.return_value = True fake_reloader = mock.MagicMock() fake_reloader.should_stop = False fake_reloader.run.side_effect = autoreload.WatchmanUnavailable() autoreload.start_django(fake_reloader, lambda: None) self.assertEqual(mocked_stat.call_count, 1) @mock.patch('django.utils.autoreload.ensure_echo_on') def test_echo_on_called(self, mocked_echo): fake_reloader = mock.MagicMock() autoreload.start_django(fake_reloader, lambda: None) self.assertEqual(mocked_echo.call_count, 1) @mock.patch('django.utils.autoreload.check_errors') def test_check_errors_called(self, mocked_check_errors): fake_method = mock.MagicMock(return_value=None) fake_reloader = mock.MagicMock() autoreload.start_django(fake_reloader, fake_method) self.assertCountEqual(mocked_check_errors.call_args[0], [fake_method]) @mock.patch('threading.Thread') @mock.patch('django.utils.autoreload.check_errors') def test_starts_thread_with_args(self, mocked_check_errors, mocked_thread): fake_reloader = mock.MagicMock() fake_main_func = mock.MagicMock() fake_thread = mock.MagicMock() mocked_check_errors.return_value = fake_main_func mocked_thread.return_value = fake_thread autoreload.start_django(fake_reloader, fake_main_func, 123, abc=123) self.assertEqual(mocked_thread.call_count, 1) self.assertEqual( mocked_thread.call_args[1], {'target': fake_main_func, 'args': (123,), 'kwargs': {'abc': 123}, 'name': 'django-main-thread'} ) self.assertIs(fake_thread.daemon, True) self.assertTrue(fake_thread.start.called) class TestCheckErrors(SimpleTestCase): def test_mutates_error_files(self): fake_method = mock.MagicMock(side_effect=RuntimeError()) wrapped = autoreload.check_errors(fake_method) with mock.patch.object(autoreload, '_error_files') as mocked_error_files: try: with self.assertRaises(RuntimeError): wrapped() finally: autoreload._exception = None self.assertEqual(mocked_error_files.append.call_count, 1) class TestRaiseLastException(SimpleTestCase): @mock.patch('django.utils.autoreload._exception', None) def test_no_exception(self): # Should raise no exception if _exception is None autoreload.raise_last_exception() def test_raises_exception(self): class MyException(Exception): pass # Create an exception try: raise MyException('Test Message') except MyException: exc_info = sys.exc_info() with mock.patch('django.utils.autoreload._exception', exc_info): with self.assertRaisesMessage(MyException, 'Test Message'): autoreload.raise_last_exception() def test_raises_custom_exception(self): class MyException(Exception): def __init__(self, msg, extra_context): super().__init__(msg) self.extra_context = extra_context # Create an exception. try: raise MyException('Test Message', 'extra context') except MyException: exc_info = sys.exc_info() with mock.patch('django.utils.autoreload._exception', exc_info): with self.assertRaisesMessage(MyException, 'Test Message'): autoreload.raise_last_exception() def test_raises_exception_with_context(self): try: raise Exception(2) except Exception as e: try: raise Exception(1) from e except Exception: exc_info = sys.exc_info() with mock.patch('django.utils.autoreload._exception', exc_info): with self.assertRaises(Exception) as cm: autoreload.raise_last_exception() self.assertEqual(cm.exception.args[0], 1) self.assertEqual(cm.exception.__cause__.args[0], 2) class RestartWithReloaderTests(SimpleTestCase): executable = '/usr/bin/python' def patch_autoreload(self, argv): patch_call = mock.patch('django.utils.autoreload.subprocess.run', return_value=CompletedProcess(argv, 0)) patches = [ mock.patch('django.utils.autoreload.sys.argv', argv), mock.patch('django.utils.autoreload.sys.executable', self.executable), mock.patch('django.utils.autoreload.sys.warnoptions', ['all']), ] for p in patches: p.start() self.addCleanup(p.stop) mock_call = patch_call.start() self.addCleanup(patch_call.stop) return mock_call def test_manage_py(self): with tempfile.TemporaryDirectory() as temp_dir: script = Path(temp_dir) / 'manage.py' script.touch() argv = [str(script), 'runserver'] mock_call = self.patch_autoreload(argv) autoreload.restart_with_reloader() self.assertEqual(mock_call.call_count, 1) self.assertEqual( mock_call.call_args[0][0], [self.executable, '-Wall'] + argv, ) def test_python_m_django(self): main = '/usr/lib/pythonX.Y/site-packages/django/__main__.py' argv = [main, 'runserver'] mock_call = self.patch_autoreload(argv) with mock.patch('django.__main__.__file__', main): with mock.patch.dict(sys.modules, {'__main__': django.__main__}): autoreload.restart_with_reloader() self.assertEqual(mock_call.call_count, 1) self.assertEqual(mock_call.call_args[0][0], [self.executable, '-Wall', '-m', 'django'] + argv[1:]) class ReloaderTests(SimpleTestCase): RELOADER_CLS = None def setUp(self): self._tempdir = tempfile.TemporaryDirectory() self.tempdir = Path(self._tempdir.name).resolve(strict=True).absolute() self.existing_file = self.ensure_file(self.tempdir / 'test.py') self.nonexistent_file = (self.tempdir / 'does_not_exist.py').absolute() self.reloader = self.RELOADER_CLS() def tearDown(self): self._tempdir.cleanup() self.reloader.stop() def ensure_file(self, path): path.parent.mkdir(exist_ok=True, parents=True) path.touch() # On Linux and Windows updating the mtime of a file using touch() will set a timestamp # value that is in the past, as the time value for the last kernel tick is used rather # than getting the correct absolute time. # To make testing simpler set the mtime to be the observed time when this function is # called. self.set_mtime(path, time.time()) return path.absolute() def set_mtime(self, fp, value): os.utime(str(fp), (value, value)) def increment_mtime(self, fp, by=1): current_time = time.time() self.set_mtime(fp, current_time + by) @contextlib.contextmanager def tick_twice(self): ticker = self.reloader.tick() next(ticker) yield next(ticker) class IntegrationTests: @mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed') @mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset()) def test_glob(self, mocked_modules, notify_mock): non_py_file = self.ensure_file(self.tempdir / 'non_py_file') self.reloader.watch_dir(self.tempdir, '*.py') with self.tick_twice(): self.increment_mtime(non_py_file) self.increment_mtime(self.existing_file) self.assertEqual(notify_mock.call_count, 1) self.assertCountEqual(notify_mock.call_args[0], [self.existing_file]) @mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed') @mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset()) def test_multiple_globs(self, mocked_modules, notify_mock): self.ensure_file(self.tempdir / 'x.test') self.reloader.watch_dir(self.tempdir, '*.py') self.reloader.watch_dir(self.tempdir, '*.test') with self.tick_twice(): self.increment_mtime(self.existing_file) self.assertEqual(notify_mock.call_count, 1) self.assertCountEqual(notify_mock.call_args[0], [self.existing_file]) @mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed') @mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset()) def test_overlapping_globs(self, mocked_modules, notify_mock): self.reloader.watch_dir(self.tempdir, '*.py') self.reloader.watch_dir(self.tempdir, '*.p*') with self.tick_twice(): self.increment_mtime(self.existing_file) self.assertEqual(notify_mock.call_count, 1) self.assertCountEqual(notify_mock.call_args[0], [self.existing_file]) @mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed') @mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset()) def test_glob_recursive(self, mocked_modules, notify_mock): non_py_file = self.ensure_file(self.tempdir / 'dir' / 'non_py_file') py_file = self.ensure_file(self.tempdir / 'dir' / 'file.py') self.reloader.watch_dir(self.tempdir, '**/*.py') with self.tick_twice(): self.increment_mtime(non_py_file) self.increment_mtime(py_file) self.assertEqual(notify_mock.call_count, 1) self.assertCountEqual(notify_mock.call_args[0], [py_file]) @mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed') @mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset()) def test_multiple_recursive_globs(self, mocked_modules, notify_mock): non_py_file = self.ensure_file(self.tempdir / 'dir' / 'test.txt') py_file = self.ensure_file(self.tempdir / 'dir' / 'file.py') self.reloader.watch_dir(self.tempdir, '**/*.txt') self.reloader.watch_dir(self.tempdir, '**/*.py') with self.tick_twice(): self.increment_mtime(non_py_file) self.increment_mtime(py_file) self.assertEqual(notify_mock.call_count, 2) self.assertCountEqual(notify_mock.call_args_list, [mock.call(py_file), mock.call(non_py_file)]) @mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed') @mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset()) def test_nested_glob_recursive(self, mocked_modules, notify_mock): inner_py_file = self.ensure_file(self.tempdir / 'dir' / 'file.py') self.reloader.watch_dir(self.tempdir, '**/*.py') self.reloader.watch_dir(inner_py_file.parent, '**/*.py') with self.tick_twice(): self.increment_mtime(inner_py_file) self.assertEqual(notify_mock.call_count, 1) self.assertCountEqual(notify_mock.call_args[0], [inner_py_file]) @mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed') @mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset()) def test_overlapping_glob_recursive(self, mocked_modules, notify_mock): py_file = self.ensure_file(self.tempdir / 'dir' / 'file.py') self.reloader.watch_dir(self.tempdir, '**/*.p*') self.reloader.watch_dir(self.tempdir, '**/*.py*') with self.tick_twice(): self.increment_mtime(py_file) self.assertEqual(notify_mock.call_count, 1) self.assertCountEqual(notify_mock.call_args[0], [py_file]) class BaseReloaderTests(ReloaderTests): RELOADER_CLS = autoreload.BaseReloader def test_watch_dir_with_unresolvable_path(self): path = Path('unresolvable_directory') with mock.patch.object(Path, 'absolute', side_effect=FileNotFoundError): self.reloader.watch_dir(path, '**/*.mo') self.assertEqual(list(self.reloader.directory_globs), []) def test_watch_with_glob(self): self.reloader.watch_dir(self.tempdir, '*.py') watched_files = list(self.reloader.watched_files()) self.assertIn(self.existing_file, watched_files) def test_watch_files_with_recursive_glob(self): inner_file = self.ensure_file(self.tempdir / 'test' / 'test.py') self.reloader.watch_dir(self.tempdir, '**/*.py') watched_files = list(self.reloader.watched_files()) self.assertIn(self.existing_file, watched_files) self.assertIn(inner_file, watched_files) def test_run_loop_catches_stopiteration(self): def mocked_tick(): yield with mock.patch.object(self.reloader, 'tick', side_effect=mocked_tick) as tick: self.reloader.run_loop() self.assertEqual(tick.call_count, 1) def test_run_loop_stop_and_return(self): def mocked_tick(*args): yield self.reloader.stop() return # Raises StopIteration with mock.patch.object(self.reloader, 'tick', side_effect=mocked_tick) as tick: self.reloader.run_loop() self.assertEqual(tick.call_count, 1) def test_wait_for_apps_ready_checks_for_exception(self): app_reg = Apps() app_reg.ready_event.set() # thread.is_alive() is False if it's not started. dead_thread = threading.Thread() self.assertFalse(self.reloader.wait_for_apps_ready(app_reg, dead_thread)) def test_wait_for_apps_ready_without_exception(self): app_reg = Apps() app_reg.ready_event.set() thread = mock.MagicMock() thread.is_alive.return_value = True self.assertTrue(self.reloader.wait_for_apps_ready(app_reg, thread)) def skip_unless_watchman_available(): try: autoreload.WatchmanReloader.check_availability() except WatchmanUnavailable as e: return skip('Watchman unavailable: %s' % e) return lambda func: func @skip_unless_watchman_available() class WatchmanReloaderTests(ReloaderTests, IntegrationTests): RELOADER_CLS = autoreload.WatchmanReloader def setUp(self): super().setUp() # Shorten the timeout to speed up tests. self.reloader.client_timeout = 0.1 def test_watch_glob_ignores_non_existing_directories_two_levels(self): with mock.patch.object(self.reloader, '_subscribe') as mocked_subscribe: self.reloader._watch_glob(self.tempdir / 'does_not_exist' / 'more', ['*']) self.assertFalse(mocked_subscribe.called) def test_watch_glob_uses_existing_parent_directories(self): with mock.patch.object(self.reloader, '_subscribe') as mocked_subscribe: self.reloader._watch_glob(self.tempdir / 'does_not_exist', ['*']) self.assertSequenceEqual( mocked_subscribe.call_args[0], [ self.tempdir, 'glob-parent-does_not_exist:%s' % self.tempdir, ['anyof', ['match', 'does_not_exist/*', 'wholename']] ] ) def test_watch_glob_multiple_patterns(self): with mock.patch.object(self.reloader, '_subscribe') as mocked_subscribe: self.reloader._watch_glob(self.tempdir, ['*', '*.py']) self.assertSequenceEqual( mocked_subscribe.call_args[0], [ self.tempdir, 'glob:%s' % self.tempdir, ['anyof', ['match', '*', 'wholename'], ['match', '*.py', 'wholename']] ] ) def test_watched_roots_contains_files(self): paths = self.reloader.watched_roots([self.existing_file]) self.assertIn(self.existing_file.parent, paths) def test_watched_roots_contains_directory_globs(self): self.reloader.watch_dir(self.tempdir, '*.py') paths = self.reloader.watched_roots([]) self.assertIn(self.tempdir, paths) def test_watched_roots_contains_sys_path(self): with extend_sys_path(str(self.tempdir)): paths = self.reloader.watched_roots([]) self.assertIn(self.tempdir, paths) def test_check_server_status(self): self.assertTrue(self.reloader.check_server_status()) def test_check_server_status_raises_error(self): with mock.patch.object(self.reloader.client, 'query') as mocked_query: mocked_query.side_effect = Exception() with self.assertRaises(autoreload.WatchmanUnavailable): self.reloader.check_server_status() @mock.patch('pywatchman.client') def test_check_availability(self, mocked_client): mocked_client().capabilityCheck.side_effect = Exception() with self.assertRaisesMessage(WatchmanUnavailable, 'Cannot connect to the watchman service'): self.RELOADER_CLS.check_availability() @mock.patch('pywatchman.client') def test_check_availability_lower_version(self, mocked_client): mocked_client().capabilityCheck.return_value = {'version': '4.8.10'} with self.assertRaisesMessage(WatchmanUnavailable, 'Watchman 4.9 or later is required.'): self.RELOADER_CLS.check_availability() def test_pywatchman_not_available(self): with mock.patch.object(autoreload, 'pywatchman') as mocked: mocked.__bool__.return_value = False with self.assertRaisesMessage(WatchmanUnavailable, 'pywatchman not installed.'): self.RELOADER_CLS.check_availability() def test_update_watches_raises_exceptions(self): class TestException(Exception): pass with mock.patch.object(self.reloader, '_update_watches') as mocked_watches: with mock.patch.object(self.reloader, 'check_server_status') as mocked_server_status: mocked_watches.side_effect = TestException() mocked_server_status.return_value = True with self.assertRaises(TestException): self.reloader.update_watches() self.assertIsInstance(mocked_server_status.call_args[0][0], TestException) @mock.patch.dict(os.environ, {'DJANGO_WATCHMAN_TIMEOUT': '10'}) def test_setting_timeout_from_environment_variable(self): self.assertEqual(self.RELOADER_CLS().client_timeout, 10) @skipIf(on_macos_with_hfs(), "These tests do not work with HFS+ as a filesystem") class StatReloaderTests(ReloaderTests, IntegrationTests): RELOADER_CLS = autoreload.StatReloader def setUp(self): super().setUp() # Shorten the sleep time to speed up tests. self.reloader.SLEEP_TIME = 0.01 @mock.patch('django.utils.autoreload.StatReloader.notify_file_changed') def test_tick_does_not_trigger_twice(self, mock_notify_file_changed): with mock.patch.object(self.reloader, 'watched_files', return_value=[self.existing_file]): ticker = self.reloader.tick() next(ticker) self.increment_mtime(self.existing_file) next(ticker) next(ticker) self.assertEqual(mock_notify_file_changed.call_count, 1) def test_snapshot_files_ignores_missing_files(self): with mock.patch.object(self.reloader, 'watched_files', return_value=[self.nonexistent_file]): self.assertEqual(dict(self.reloader.snapshot_files()), {}) def test_snapshot_files_updates(self): with mock.patch.object(self.reloader, 'watched_files', return_value=[self.existing_file]): snapshot1 = dict(self.reloader.snapshot_files()) self.assertIn(self.existing_file, snapshot1) self.increment_mtime(self.existing_file) snapshot2 = dict(self.reloader.snapshot_files()) self.assertNotEqual(snapshot1[self.existing_file], snapshot2[self.existing_file]) def test_snapshot_files_with_duplicates(self): with mock.patch.object(self.reloader, 'watched_files', return_value=[self.existing_file, self.existing_file]): snapshot = list(self.reloader.snapshot_files()) self.assertEqual(len(snapshot), 1) self.assertEqual(snapshot[0][0], self.existing_file)
abdce12fea7b8bb81b5ce9af69e43bf5bd3672a152a33bfe25e532fa95354d73
import os import unittest.loader from argparse import ArgumentParser from contextlib import contextmanager from importlib import import_module from unittest import TestSuite, TextTestRunner, defaultTestLoader, mock from django.db import connections from django.test import SimpleTestCase from django.test.runner import DiscoverRunner from django.test.utils import ( NullTimeKeeper, TimeKeeper, captured_stderr, captured_stdout, ) @contextmanager def change_cwd(directory): current_dir = os.path.abspath(os.path.dirname(__file__)) new_dir = os.path.join(current_dir, directory) old_cwd = os.getcwd() os.chdir(new_dir) try: yield finally: os.chdir(old_cwd) class DiscoverRunnerTests(SimpleTestCase): @staticmethod def get_test_methods_names(suite): return [ t.__class__.__name__ + '.' + t._testMethodName for t in suite._tests ] def test_init_debug_mode(self): runner = DiscoverRunner() self.assertFalse(runner.debug_mode) def test_add_arguments_debug_mode(self): parser = ArgumentParser() DiscoverRunner.add_arguments(parser) ns = parser.parse_args([]) self.assertFalse(ns.debug_mode) ns = parser.parse_args(["--debug-mode"]) self.assertTrue(ns.debug_mode) def test_load_tests_for_label_file_path(self): with change_cwd('.'): msg = ( "One of the test labels is a path to a file: " "'test_discover_runner.py', which is not supported. Use a " "dotted module name instead." ) with self.assertRaisesMessage(RuntimeError, msg): DiscoverRunner().load_tests_for_label('test_discover_runner.py', {}) def test_dotted_test_module(self): count = DiscoverRunner(verbosity=0).build_suite( ['test_runner_apps.sample.tests_sample'], ).countTestCases() self.assertEqual(count, 4) def test_dotted_test_class_vanilla_unittest(self): count = DiscoverRunner(verbosity=0).build_suite( ['test_runner_apps.sample.tests_sample.TestVanillaUnittest'], ).countTestCases() self.assertEqual(count, 1) def test_dotted_test_class_django_testcase(self): count = DiscoverRunner(verbosity=0).build_suite( ['test_runner_apps.sample.tests_sample.TestDjangoTestCase'], ).countTestCases() self.assertEqual(count, 1) def test_dotted_test_method_django_testcase(self): count = DiscoverRunner(verbosity=0).build_suite( ['test_runner_apps.sample.tests_sample.TestDjangoTestCase.test_sample'], ).countTestCases() self.assertEqual(count, 1) def test_pattern(self): count = DiscoverRunner( pattern="*_tests.py", verbosity=0, ).build_suite(['test_runner_apps.sample']).countTestCases() self.assertEqual(count, 1) def test_name_patterns(self): all_test_1 = [ 'DjangoCase1.test_1', 'DjangoCase2.test_1', 'SimpleCase1.test_1', 'SimpleCase2.test_1', 'UnittestCase1.test_1', 'UnittestCase2.test_1', ] all_test_2 = [ 'DjangoCase1.test_2', 'DjangoCase2.test_2', 'SimpleCase1.test_2', 'SimpleCase2.test_2', 'UnittestCase1.test_2', 'UnittestCase2.test_2', ] all_tests = sorted([*all_test_1, *all_test_2, 'UnittestCase2.test_3_test']) for pattern, expected in [ [['test_1'], all_test_1], [['UnittestCase1'], ['UnittestCase1.test_1', 'UnittestCase1.test_2']], [['*test'], ['UnittestCase2.test_3_test']], [['test*'], all_tests], [['test'], all_tests], [['test_1', 'test_2'], sorted([*all_test_1, *all_test_2])], [['test*1'], all_test_1], ]: with self.subTest(pattern): suite = DiscoverRunner( test_name_patterns=pattern, verbosity=0, ).build_suite(['test_runner_apps.simple']) self.assertEqual(expected, self.get_test_methods_names(suite)) def test_file_path(self): with change_cwd(".."): count = DiscoverRunner(verbosity=0).build_suite( ['test_runner_apps/sample/'], ).countTestCases() self.assertEqual(count, 5) def test_empty_label(self): """ If the test label is empty, discovery should happen on the current working directory. """ with change_cwd("."): suite = DiscoverRunner(verbosity=0).build_suite([]) self.assertEqual( suite._tests[0].id().split(".")[0], os.path.basename(os.getcwd()), ) def test_empty_test_case(self): count = DiscoverRunner(verbosity=0).build_suite( ['test_runner_apps.sample.tests_sample.EmptyTestCase'], ).countTestCases() self.assertEqual(count, 0) def test_discovery_on_package(self): count = DiscoverRunner(verbosity=0).build_suite( ['test_runner_apps.sample.tests'], ).countTestCases() self.assertEqual(count, 1) def test_ignore_adjacent(self): """ When given a dotted path to a module, unittest discovery searches not just the module, but also the directory containing the module. This results in tests from adjacent modules being run when they should not. The discover runner avoids this behavior. """ count = DiscoverRunner(verbosity=0).build_suite( ['test_runner_apps.sample.empty'], ).countTestCases() self.assertEqual(count, 0) def test_testcase_ordering(self): with change_cwd(".."): suite = DiscoverRunner(verbosity=0).build_suite(['test_runner_apps/sample/']) self.assertEqual( suite._tests[0].__class__.__name__, 'TestDjangoTestCase', msg="TestDjangoTestCase should be the first test case") self.assertEqual( suite._tests[1].__class__.__name__, 'TestZimpleTestCase', msg="TestZimpleTestCase should be the second test case") # All others can follow in unspecified order, including doctests self.assertIn('DocTestCase', [t.__class__.__name__ for t in suite._tests[2:]]) def test_duplicates_ignored(self): """ Tests shouldn't be discovered twice when discovering on overlapping paths. """ base_app = 'forms_tests' sub_app = 'forms_tests.field_tests' runner = DiscoverRunner(verbosity=0) with self.modify_settings(INSTALLED_APPS={'append': sub_app}): single = runner.build_suite([base_app]).countTestCases() dups = runner.build_suite([base_app, sub_app]).countTestCases() self.assertEqual(single, dups) def test_reverse(self): """ Reverse should reorder tests while maintaining the grouping specified by ``DiscoverRunner.reorder_by``. """ runner = DiscoverRunner(reverse=True, verbosity=0) suite = runner.build_suite( test_labels=('test_runner_apps.sample', 'test_runner_apps.simple')) self.assertIn('test_runner_apps.simple', next(iter(suite)).id(), msg="Test labels should be reversed.") suite = runner.build_suite(test_labels=('test_runner_apps.simple',)) suite = tuple(suite) self.assertIn('DjangoCase', suite[0].id(), msg="Test groups should not be reversed.") self.assertIn('SimpleCase', suite[4].id(), msg="Test groups order should be preserved.") self.assertIn('DjangoCase2', suite[0].id(), msg="Django test cases should be reversed.") self.assertIn('SimpleCase2', suite[4].id(), msg="Simple test cases should be reversed.") self.assertIn('UnittestCase2', suite[8].id(), msg="Unittest test cases should be reversed.") self.assertIn('test_2', suite[0].id(), msg="Methods of Django cases should be reversed.") self.assertIn('test_2', suite[4].id(), msg="Methods of simple cases should be reversed.") self.assertIn('test_2', suite[9].id(), msg="Methods of unittest cases should be reversed.") def test_build_suite_failed_tests_first(self): # The "doesnotexist" label results in a _FailedTest instance. suite = DiscoverRunner(verbosity=0).build_suite( test_labels=['test_runner_apps.sample', 'doesnotexist'], ) tests = list(suite) self.assertIsInstance(tests[0], unittest.loader._FailedTest) self.assertNotIsInstance(tests[-1], unittest.loader._FailedTest) def test_overridable_get_test_runner_kwargs(self): self.assertIsInstance(DiscoverRunner().get_test_runner_kwargs(), dict) def test_overridable_test_suite(self): self.assertEqual(DiscoverRunner().test_suite, TestSuite) def test_overridable_test_runner(self): self.assertEqual(DiscoverRunner().test_runner, TextTestRunner) def test_overridable_test_loader(self): self.assertEqual(DiscoverRunner().test_loader, defaultTestLoader) def test_tags(self): runner = DiscoverRunner(tags=['core'], verbosity=0) self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 1) runner = DiscoverRunner(tags=['fast'], verbosity=0) self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 2) runner = DiscoverRunner(tags=['slow'], verbosity=0) self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 2) def test_exclude_tags(self): runner = DiscoverRunner(tags=['fast'], exclude_tags=['core'], verbosity=0) self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 1) runner = DiscoverRunner(tags=['fast'], exclude_tags=['slow'], verbosity=0) self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 0) runner = DiscoverRunner(exclude_tags=['slow'], verbosity=0) self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 0) def test_tag_inheritance(self): def count_tests(**kwargs): kwargs.setdefault('verbosity', 0) suite = DiscoverRunner(**kwargs).build_suite(['test_runner_apps.tagged.tests_inheritance']) return suite.countTestCases() self.assertEqual(count_tests(tags=['foo']), 4) self.assertEqual(count_tests(tags=['bar']), 2) self.assertEqual(count_tests(tags=['baz']), 2) self.assertEqual(count_tests(tags=['foo'], exclude_tags=['bar']), 2) self.assertEqual(count_tests(tags=['foo'], exclude_tags=['bar', 'baz']), 1) self.assertEqual(count_tests(exclude_tags=['foo']), 0) def test_tag_fail_to_load(self): with self.assertRaises(SyntaxError): import_module('test_runner_apps.tagged.tests_syntax_error') runner = DiscoverRunner(tags=['syntax_error'], verbosity=0) # A label that doesn't exist or cannot be loaded due to syntax errors # is always considered matching. suite = runner.build_suite(['doesnotexist', 'test_runner_apps.tagged']) self.assertEqual([test.id() for test in suite], [ 'unittest.loader._FailedTest.doesnotexist', 'unittest.loader._FailedTest.test_runner_apps.tagged.tests_syntax_error', ]) def test_included_tags_displayed(self): runner = DiscoverRunner(tags=['foo', 'bar'], verbosity=2) with captured_stdout() as stdout: runner.build_suite(['test_runner_apps.tagged.tests']) self.assertIn('Including test tag(s): bar, foo.\n', stdout.getvalue()) def test_excluded_tags_displayed(self): runner = DiscoverRunner(exclude_tags=['foo', 'bar'], verbosity=3) with captured_stdout() as stdout: runner.build_suite(['test_runner_apps.tagged.tests']) self.assertIn('Excluding test tag(s): bar, foo.\n', stdout.getvalue()) def test_number_of_tests_found_displayed(self): runner = DiscoverRunner() with captured_stdout() as stdout: runner.build_suite([ 'test_runner_apps.sample.tests_sample.TestDjangoTestCase', 'test_runner_apps.simple', ]) self.assertIn('Found 14 tests.\n', stdout.getvalue()) def test_pdb_with_parallel(self): msg = ( 'You cannot use --pdb with parallel tests; pass --parallel=1 to ' 'use it.' ) with self.assertRaisesMessage(ValueError, msg): DiscoverRunner(pdb=True, parallel=2) def test_buffer_mode_test_pass(self): runner = DiscoverRunner(buffer=True, verbose=0) with captured_stdout() as stdout, captured_stderr() as stderr: suite = runner.build_suite([ 'test_runner_apps.buffer.tests_buffer.WriteToStdoutStderrTestCase.test_pass', ]) runner.run_suite(suite) self.assertNotIn('Write to stderr.', stderr.getvalue()) self.assertNotIn('Write to stdout.', stdout.getvalue()) def test_buffer_mode_test_fail(self): runner = DiscoverRunner(buffer=True, verbose=0) with captured_stdout() as stdout, captured_stderr() as stderr: suite = runner.build_suite([ 'test_runner_apps.buffer.tests_buffer.WriteToStdoutStderrTestCase.test_fail', ]) runner.run_suite(suite) self.assertIn('Write to stderr.', stderr.getvalue()) self.assertIn('Write to stdout.', stdout.getvalue()) @mock.patch('faulthandler.enable') def test_faulthandler_enabled(self, mocked_enable): with mock.patch('faulthandler.is_enabled', return_value=False): DiscoverRunner(enable_faulthandler=True) mocked_enable.assert_called() @mock.patch('faulthandler.enable') def test_faulthandler_already_enabled(self, mocked_enable): with mock.patch('faulthandler.is_enabled', return_value=True): DiscoverRunner(enable_faulthandler=True) mocked_enable.assert_not_called() @mock.patch('faulthandler.enable') def test_faulthandler_enabled_fileno(self, mocked_enable): # sys.stderr that is not an actual file. with mock.patch('faulthandler.is_enabled', return_value=False), captured_stderr(): DiscoverRunner(enable_faulthandler=True) mocked_enable.assert_called() @mock.patch('faulthandler.enable') def test_faulthandler_disabled(self, mocked_enable): with mock.patch('faulthandler.is_enabled', return_value=False): DiscoverRunner(enable_faulthandler=False) mocked_enable.assert_not_called() def test_timings_not_captured(self): runner = DiscoverRunner(timing=False) with captured_stderr() as stderr: with runner.time_keeper.timed('test'): pass runner.time_keeper.print_results() self.assertTrue(isinstance(runner.time_keeper, NullTimeKeeper)) self.assertNotIn('test', stderr.getvalue()) def test_timings_captured(self): runner = DiscoverRunner(timing=True) with captured_stderr() as stderr: with runner.time_keeper.timed('test'): pass runner.time_keeper.print_results() self.assertTrue(isinstance(runner.time_keeper, TimeKeeper)) self.assertIn('test', stderr.getvalue()) class DiscoverRunnerGetDatabasesTests(SimpleTestCase): runner = DiscoverRunner(verbosity=2) skip_msg = 'Skipping setup of unused database(s): ' def get_databases(self, test_labels): with captured_stdout() as stdout: suite = self.runner.build_suite(test_labels) databases = self.runner.get_databases(suite) return databases, stdout.getvalue() def assertSkippedDatabases(self, test_labels, expected_databases): databases, output = self.get_databases(test_labels) self.assertEqual(databases, expected_databases) skipped_databases = set(connections) - set(expected_databases) if skipped_databases: self.assertIn(self.skip_msg + ', '.join(sorted(skipped_databases)), output) else: self.assertNotIn(self.skip_msg, output) def test_mixed(self): databases, output = self.get_databases(['test_runner_apps.databases.tests']) self.assertEqual(databases, {'default': True, 'other': False}) self.assertNotIn(self.skip_msg, output) def test_all(self): databases, output = self.get_databases(['test_runner_apps.databases.tests.AllDatabasesTests']) self.assertEqual(databases, {alias: False for alias in connections}) self.assertNotIn(self.skip_msg, output) def test_default_and_other(self): self.assertSkippedDatabases([ 'test_runner_apps.databases.tests.DefaultDatabaseTests', 'test_runner_apps.databases.tests.OtherDatabaseTests', ], {'default': False, 'other': False}) def test_default_only(self): self.assertSkippedDatabases([ 'test_runner_apps.databases.tests.DefaultDatabaseTests', ], {'default': False}) def test_other_only(self): self.assertSkippedDatabases([ 'test_runner_apps.databases.tests.OtherDatabaseTests' ], {'other': False}) def test_no_databases_required(self): self.assertSkippedDatabases([ 'test_runner_apps.databases.tests.NoDatabaseTests' ], {}) def test_serialize(self): databases, _ = self.get_databases([ 'test_runner_apps.databases.tests.DefaultDatabaseSerializedTests' ]) self.assertEqual(databases, {'default': True})
e31c2925b8832256f38379a45730f7912dca7b50f8cf3d9f846fc2975bea0938
import datetime import pickle import sys import unittest from operator import attrgetter from threading import Lock from django.core.exceptions import EmptyResultSet, FieldError from django.db import DEFAULT_DB_ALIAS, connection from django.db.models import Count, Exists, F, Max, OuterRef, Q from django.db.models.expressions import RawSQL from django.db.models.sql.constants import LOUTER from django.db.models.sql.where import NothingNode, WhereNode from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature from django.test.utils import CaptureQueriesContext from .models import ( FK1, Annotation, Article, Author, BaseA, Book, CategoryItem, CategoryRelationship, Celebrity, Channel, Chapter, Child, ChildObjectA, Classroom, CommonMixedCaseForeignKeys, Company, Cover, CustomPk, CustomPkTag, DateTimePK, Detail, DumbCategory, Eaten, Employment, ExtraInfo, Fan, Food, Identifier, Individual, Item, Job, JobResponsibilities, Join, LeafA, LeafB, LoopX, LoopZ, ManagedModel, Member, MixedCaseDbColumnCategoryItem, MixedCaseFieldCategoryItem, ModelA, ModelB, ModelC, ModelD, MyObject, NamedCategory, Node, Note, NullableName, Number, ObjectA, ObjectB, ObjectC, OneToOneCategory, Order, OrderItem, Page, Paragraph, Person, Plaything, PointerA, Program, ProxyCategory, ProxyObjectA, ProxyObjectB, Ranking, Related, RelatedIndividual, RelatedObject, Report, ReportComment, ReservedName, Responsibility, School, SharedConnection, SimpleCategory, SingleObject, SpecialCategory, Staff, StaffUser, Student, Tag, Task, Teacher, Ticket21203Child, Ticket21203Parent, Ticket23605A, Ticket23605B, Ticket23605C, TvChef, Valid, X, ) class Queries1Tests(TestCase): @classmethod def setUpTestData(cls): cls.nc1 = generic = NamedCategory.objects.create(name="Generic") cls.t1 = Tag.objects.create(name='t1', category=generic) cls.t2 = Tag.objects.create(name='t2', parent=cls.t1, category=generic) cls.t3 = Tag.objects.create(name='t3', parent=cls.t1) cls.t4 = Tag.objects.create(name='t4', parent=cls.t3) cls.t5 = Tag.objects.create(name='t5', parent=cls.t3) cls.n1 = Note.objects.create(note='n1', misc='foo', id=1) cls.n2 = Note.objects.create(note='n2', misc='bar', id=2) cls.n3 = Note.objects.create(note='n3', misc='foo', id=3, negate=False) cls.ann1 = Annotation.objects.create(name='a1', tag=cls.t1) cls.ann1.notes.add(cls.n1) ann2 = Annotation.objects.create(name='a2', tag=cls.t4) ann2.notes.add(cls.n2, cls.n3) # Create these out of order so that sorting by 'id' will be different to sorting # by 'info'. Helps detect some problems later. cls.e2 = ExtraInfo.objects.create(info='e2', note=cls.n2, value=41, filterable=False) e1 = ExtraInfo.objects.create(info='e1', note=cls.n1, value=42) cls.a1 = Author.objects.create(name='a1', num=1001, extra=e1) cls.a2 = Author.objects.create(name='a2', num=2002, extra=e1) cls.a3 = Author.objects.create(name='a3', num=3003, extra=cls.e2) cls.a4 = Author.objects.create(name='a4', num=4004, extra=cls.e2) cls.time1 = datetime.datetime(2007, 12, 19, 22, 25, 0) cls.time2 = datetime.datetime(2007, 12, 19, 21, 0, 0) time3 = datetime.datetime(2007, 12, 20, 22, 25, 0) time4 = datetime.datetime(2007, 12, 20, 21, 0, 0) cls.i1 = Item.objects.create(name='one', created=cls.time1, modified=cls.time1, creator=cls.a1, note=cls.n3) cls.i1.tags.set([cls.t1, cls.t2]) cls.i2 = Item.objects.create(name='two', created=cls.time2, creator=cls.a2, note=cls.n2) cls.i2.tags.set([cls.t1, cls.t3]) cls.i3 = Item.objects.create(name='three', created=time3, creator=cls.a2, note=cls.n3) cls.i4 = Item.objects.create(name='four', created=time4, creator=cls.a4, note=cls.n3) cls.i4.tags.set([cls.t4]) cls.r1 = Report.objects.create(name='r1', creator=cls.a1) cls.r2 = Report.objects.create(name='r2', creator=cls.a3) cls.r3 = Report.objects.create(name='r3') # Ordering by 'rank' gives us rank2, rank1, rank3. Ordering by the Meta.ordering # will be rank3, rank2, rank1. cls.rank1 = Ranking.objects.create(rank=2, author=cls.a2) cls.c1 = Cover.objects.create(title="first", item=cls.i4) cls.c2 = Cover.objects.create(title="second", item=cls.i2) def test_subquery_condition(self): qs1 = Tag.objects.filter(pk__lte=0) qs2 = Tag.objects.filter(parent__in=qs1) qs3 = Tag.objects.filter(parent__in=qs2) self.assertEqual(qs3.query.subq_aliases, {'T', 'U', 'V'}) self.assertIn('v0', str(qs3.query).lower()) qs4 = qs3.filter(parent__in=qs1) self.assertEqual(qs4.query.subq_aliases, {'T', 'U', 'V'}) # It is possible to reuse U for the second subquery, no need to use W. self.assertNotIn('w0', str(qs4.query).lower()) # So, 'U0."id"' is referenced in SELECT and WHERE twice. self.assertEqual(str(qs4.query).lower().count('u0.'), 4) def test_ticket1050(self): self.assertSequenceEqual( Item.objects.filter(tags__isnull=True), [self.i3], ) self.assertSequenceEqual( Item.objects.filter(tags__id__isnull=True), [self.i3], ) def test_ticket1801(self): self.assertSequenceEqual( Author.objects.filter(item=self.i2), [self.a2], ) self.assertSequenceEqual( Author.objects.filter(item=self.i3), [self.a2], ) self.assertSequenceEqual( Author.objects.filter(item=self.i2) & Author.objects.filter(item=self.i3), [self.a2], ) def test_ticket2306(self): # Checking that no join types are "left outer" joins. query = Item.objects.filter(tags=self.t2).query self.assertNotIn(LOUTER, [x.join_type for x in query.alias_map.values()]) self.assertSequenceEqual( Item.objects.filter(Q(tags=self.t1)).order_by('name'), [self.i1, self.i2], ) self.assertSequenceEqual( Item.objects.filter(Q(tags=self.t1)).filter(Q(tags=self.t2)), [self.i1], ) self.assertSequenceEqual( Item.objects.filter(Q(tags=self.t1)).filter(Q(creator__name='fred') | Q(tags=self.t2)), [self.i1], ) # Each filter call is processed "at once" against a single table, so this is # different from the previous example as it tries to find tags that are two # things at once (rather than two tags). self.assertSequenceEqual( Item.objects.filter(Q(tags=self.t1) & Q(tags=self.t2)), [] ) self.assertSequenceEqual( Item.objects.filter(Q(tags=self.t1), Q(creator__name='fred') | Q(tags=self.t2)), [] ) qs = Author.objects.filter(ranking__rank=2, ranking__id=self.rank1.id) self.assertSequenceEqual(list(qs), [self.a2]) self.assertEqual(2, qs.query.count_active_tables(), 2) qs = Author.objects.filter(ranking__rank=2).filter(ranking__id=self.rank1.id) self.assertEqual(qs.query.count_active_tables(), 3) def test_ticket4464(self): self.assertSequenceEqual( Item.objects.filter(tags=self.t1).filter(tags=self.t2), [self.i1], ) self.assertSequenceEqual( Item.objects.filter(tags__in=[self.t1, self.t2]).distinct().order_by('name'), [self.i1, self.i2], ) self.assertSequenceEqual( Item.objects.filter(tags__in=[self.t1, self.t2]).filter(tags=self.t3), [self.i2], ) # Make sure .distinct() works with slicing (this was broken in Oracle). self.assertSequenceEqual( Item.objects.filter(tags__in=[self.t1, self.t2]).order_by('name')[:3], [self.i1, self.i1, self.i2], ) self.assertSequenceEqual( Item.objects.filter(tags__in=[self.t1, self.t2]).distinct().order_by('name')[:3], [self.i1, self.i2], ) def test_tickets_2080_3592(self): self.assertSequenceEqual( Author.objects.filter(item__name='one') | Author.objects.filter(name='a3'), [self.a1, self.a3], ) self.assertSequenceEqual( Author.objects.filter(Q(item__name='one') | Q(name='a3')), [self.a1, self.a3], ) self.assertSequenceEqual( Author.objects.filter(Q(name='a3') | Q(item__name='one')), [self.a1, self.a3], ) self.assertSequenceEqual( Author.objects.filter(Q(item__name='three') | Q(report__name='r3')), [self.a2], ) def test_ticket6074(self): # Merging two empty result sets shouldn't leave a queryset with no constraints # (which would match everything). self.assertSequenceEqual(Author.objects.filter(Q(id__in=[])), []) self.assertSequenceEqual( Author.objects.filter(Q(id__in=[]) | Q(id__in=[])), [] ) def test_tickets_1878_2939(self): self.assertEqual(Item.objects.values('creator').distinct().count(), 3) # Create something with a duplicate 'name' so that we can test multi-column # cases (which require some tricky SQL transformations under the covers). xx = Item(name='four', created=self.time1, creator=self.a2, note=self.n1) xx.save() self.assertEqual( Item.objects.exclude(name='two').values('creator', 'name').distinct().count(), 4 ) self.assertEqual( ( Item.objects .exclude(name='two') .extra(select={'foo': '%s'}, select_params=(1,)) .values('creator', 'name', 'foo') .distinct() .count() ), 4 ) self.assertEqual( ( Item.objects .exclude(name='two') .extra(select={'foo': '%s'}, select_params=(1,)) .values('creator', 'name') .distinct() .count() ), 4 ) xx.delete() def test_ticket7323(self): self.assertEqual(Item.objects.values('creator', 'name').count(), 4) def test_ticket2253(self): q1 = Item.objects.order_by('name') q2 = Item.objects.filter(id=self.i1.id) self.assertSequenceEqual(q1, [self.i4, self.i1, self.i3, self.i2]) self.assertSequenceEqual(q2, [self.i1]) self.assertSequenceEqual( (q1 | q2).order_by('name'), [self.i4, self.i1, self.i3, self.i2], ) self.assertSequenceEqual((q1 & q2).order_by('name'), [self.i1]) q1 = Item.objects.filter(tags=self.t1) q2 = Item.objects.filter(note=self.n3, tags=self.t2) q3 = Item.objects.filter(creator=self.a4) self.assertSequenceEqual( ((q1 & q2) | q3).order_by('name'), [self.i4, self.i1], ) def test_order_by_tables(self): q1 = Item.objects.order_by('name') q2 = Item.objects.filter(id=self.i1.id) list(q2) combined_query = (q1 & q2).order_by('name').query self.assertEqual(len([ t for t in combined_query.alias_map if combined_query.alias_refcount[t] ]), 1) def test_order_by_join_unref(self): """ This test is related to the above one, testing that there aren't old JOINs in the query. """ qs = Celebrity.objects.order_by('greatest_fan__fan_of') self.assertIn('OUTER JOIN', str(qs.query)) qs = qs.order_by('id') self.assertNotIn('OUTER JOIN', str(qs.query)) def test_get_clears_ordering(self): """ get() should clear ordering for optimization purposes. """ with CaptureQueriesContext(connection) as captured_queries: Author.objects.order_by('name').get(pk=self.a1.pk) self.assertNotIn('order by', captured_queries[0]['sql'].lower()) def test_tickets_4088_4306(self): self.assertSequenceEqual(Report.objects.filter(creator=1001), [self.r1]) self.assertSequenceEqual( Report.objects.filter(creator__num=1001), [self.r1] ) self.assertSequenceEqual(Report.objects.filter(creator__id=1001), []) self.assertSequenceEqual( Report.objects.filter(creator__id=self.a1.id), [self.r1] ) self.assertSequenceEqual( Report.objects.filter(creator__name='a1'), [self.r1] ) def test_ticket4510(self): self.assertSequenceEqual( Author.objects.filter(report__name='r1'), [self.a1], ) def test_ticket7378(self): self.assertSequenceEqual(self.a1.report_set.all(), [self.r1]) def test_tickets_5324_6704(self): self.assertSequenceEqual( Item.objects.filter(tags__name='t4'), [self.i4], ) self.assertSequenceEqual( Item.objects.exclude(tags__name='t4').order_by('name').distinct(), [self.i1, self.i3, self.i2], ) self.assertSequenceEqual( Item.objects.exclude(tags__name='t4').order_by('name').distinct().reverse(), [self.i2, self.i3, self.i1], ) self.assertSequenceEqual( Author.objects.exclude(item__name='one').distinct().order_by('name'), [self.a2, self.a3, self.a4], ) # Excluding across a m2m relation when there is more than one related # object associated was problematic. self.assertSequenceEqual( Item.objects.exclude(tags__name='t1').order_by('name'), [self.i4, self.i3], ) self.assertSequenceEqual( Item.objects.exclude(tags__name='t1').exclude(tags__name='t4'), [self.i3], ) # Excluding from a relation that cannot be NULL should not use outer joins. query = Item.objects.exclude(creator__in=[self.a1, self.a2]).query self.assertNotIn(LOUTER, [x.join_type for x in query.alias_map.values()]) # Similarly, when one of the joins cannot possibly, ever, involve NULL # values (Author -> ExtraInfo, in the following), it should never be # promoted to a left outer join. So the following query should only # involve one "left outer" join (Author -> Item is 0-to-many). qs = Author.objects.filter(id=self.a1.id).filter(Q(extra__note=self.n1) | Q(item__note=self.n3)) self.assertEqual( len([ x for x in qs.query.alias_map.values() if x.join_type == LOUTER and qs.query.alias_refcount[x.table_alias] ]), 1 ) # The previous changes shouldn't affect nullable foreign key joins. self.assertSequenceEqual( Tag.objects.filter(parent__isnull=True).order_by('name'), [self.t1] ) self.assertSequenceEqual( Tag.objects.exclude(parent__isnull=True).order_by('name'), [self.t2, self.t3, self.t4, self.t5], ) self.assertSequenceEqual( Tag.objects.exclude(Q(parent__name='t1') | Q(parent__isnull=True)).order_by('name'), [self.t4, self.t5], ) self.assertSequenceEqual( Tag.objects.exclude(Q(parent__isnull=True) | Q(parent__name='t1')).order_by('name'), [self.t4, self.t5], ) self.assertSequenceEqual( Tag.objects.exclude(Q(parent__parent__isnull=True)).order_by('name'), [self.t4, self.t5], ) self.assertSequenceEqual( Tag.objects.filter(~Q(parent__parent__isnull=True)).order_by('name'), [self.t4, self.t5], ) def test_ticket2091(self): t = Tag.objects.get(name='t4') self.assertSequenceEqual(Item.objects.filter(tags__in=[t]), [self.i4]) def test_avoid_infinite_loop_on_too_many_subqueries(self): x = Tag.objects.filter(pk=1) local_recursion_limit = sys.getrecursionlimit() // 16 msg = 'Maximum recursion depth exceeded: too many subqueries.' with self.assertRaisesMessage(RecursionError, msg): for i in range(local_recursion_limit + 2): x = Tag.objects.filter(pk__in=x) def test_reasonable_number_of_subq_aliases(self): x = Tag.objects.filter(pk=1) for _ in range(20): x = Tag.objects.filter(pk__in=x) self.assertEqual( x.query.subq_aliases, { 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'AA', 'AB', 'AC', 'AD', 'AE', 'AF', 'AG', 'AH', 'AI', 'AJ', 'AK', 'AL', 'AM', 'AN', } ) def test_heterogeneous_qs_combination(self): # Combining querysets built on different models should behave in a well-defined # fashion. We raise an error. with self.assertRaisesMessage(AssertionError, 'Cannot combine queries on two different base models.'): Author.objects.all() & Tag.objects.all() with self.assertRaisesMessage(AssertionError, 'Cannot combine queries on two different base models.'): Author.objects.all() | Tag.objects.all() def test_ticket3141(self): self.assertEqual(Author.objects.extra(select={'foo': '1'}).count(), 4) self.assertEqual( Author.objects.extra(select={'foo': '%s'}, select_params=(1,)).count(), 4 ) def test_ticket2400(self): self.assertSequenceEqual( Author.objects.filter(item__isnull=True), [self.a3], ) self.assertSequenceEqual( Tag.objects.filter(item__isnull=True), [self.t5], ) def test_ticket2496(self): self.assertSequenceEqual( Item.objects.extra(tables=['queries_author']).select_related().order_by('name')[:1], [self.i4], ) def test_error_raised_on_filter_with_dictionary(self): with self.assertRaisesMessage(FieldError, 'Cannot parse keyword query as dict'): Note.objects.filter({'note': 'n1', 'misc': 'foo'}) def test_tickets_2076_7256(self): # Ordering on related tables should be possible, even if the table is # not otherwise involved. self.assertSequenceEqual( Item.objects.order_by('note__note', 'name'), [self.i2, self.i4, self.i1, self.i3], ) # Ordering on a related field should use the remote model's default # ordering as a final step. self.assertSequenceEqual( Author.objects.order_by('extra', '-name'), [self.a2, self.a1, self.a4, self.a3], ) # Using remote model default ordering can span multiple models (in this # case, Cover is ordered by Item's default, which uses Note's default). self.assertSequenceEqual(Cover.objects.all(), [self.c1, self.c2]) # If the remote model does not have a default ordering, we order by its 'id' # field. self.assertSequenceEqual( Item.objects.order_by('creator', 'name'), [self.i1, self.i3, self.i2, self.i4], ) # Ordering by a many-valued attribute (e.g. a many-to-many or reverse # ForeignKey) is legal, but the results might not make sense. That # isn't Django's problem. Garbage in, garbage out. self.assertSequenceEqual( Item.objects.filter(tags__isnull=False).order_by('tags', 'id'), [self.i1, self.i2, self.i1, self.i2, self.i4], ) # If we replace the default ordering, Django adjusts the required # tables automatically. Item normally requires a join with Note to do # the default ordering, but that isn't needed here. qs = Item.objects.order_by('name') self.assertSequenceEqual(qs, [self.i4, self.i1, self.i3, self.i2]) self.assertEqual(len(qs.query.alias_map), 1) def test_tickets_2874_3002(self): qs = Item.objects.select_related().order_by('note__note', 'name') self.assertQuerysetEqual(qs, [self.i2, self.i4, self.i1, self.i3]) # This is also a good select_related() test because there are multiple # Note entries in the SQL. The two Note items should be different. self.assertEqual(repr(qs[0].note), '<Note: n2>') self.assertEqual(repr(qs[0].creator.extra.note), '<Note: n1>') def test_ticket3037(self): self.assertSequenceEqual( Item.objects.filter(Q(creator__name='a3', name='two') | Q(creator__name='a4', name='four')), [self.i4], ) def test_tickets_5321_7070(self): # Ordering columns must be included in the output columns. Note that # this means results that might otherwise be distinct are not (if there # are multiple values in the ordering cols), as in this example. This # isn't a bug; it's a warning to be careful with the selection of # ordering columns. self.assertSequenceEqual( Note.objects.values('misc').distinct().order_by('note', '-misc'), [{'misc': 'foo'}, {'misc': 'bar'}, {'misc': 'foo'}] ) def test_ticket4358(self): # If you don't pass any fields to values(), relation fields are # returned as "foo_id" keys, not "foo". For consistency, you should be # able to pass "foo_id" in the fields list and have it work, too. We # actually allow both "foo" and "foo_id". # The *_id version is returned by default. self.assertIn('note_id', ExtraInfo.objects.values()[0]) # You can also pass it in explicitly. self.assertSequenceEqual(ExtraInfo.objects.values('note_id'), [{'note_id': 1}, {'note_id': 2}]) # ...or use the field name. self.assertSequenceEqual(ExtraInfo.objects.values('note'), [{'note': 1}, {'note': 2}]) def test_ticket6154(self): # Multiple filter statements are joined using "AND" all the time. self.assertSequenceEqual( Author.objects.filter(id=self.a1.id).filter(Q(extra__note=self.n1) | Q(item__note=self.n3)), [self.a1], ) self.assertSequenceEqual( Author.objects.filter(Q(extra__note=self.n1) | Q(item__note=self.n3)).filter(id=self.a1.id), [self.a1], ) def test_ticket6981(self): self.assertSequenceEqual( Tag.objects.select_related('parent').order_by('name'), [self.t1, self.t2, self.t3, self.t4, self.t5], ) def test_ticket9926(self): self.assertSequenceEqual( Tag.objects.select_related("parent", "category").order_by('name'), [self.t1, self.t2, self.t3, self.t4, self.t5], ) self.assertSequenceEqual( Tag.objects.select_related('parent', "parent__category").order_by('name'), [self.t1, self.t2, self.t3, self.t4, self.t5], ) def test_tickets_6180_6203(self): # Dates with limits and/or counts self.assertEqual(Item.objects.count(), 4) self.assertEqual(Item.objects.datetimes('created', 'month').count(), 1) self.assertEqual(Item.objects.datetimes('created', 'day').count(), 2) self.assertEqual(len(Item.objects.datetimes('created', 'day')), 2) self.assertEqual(Item.objects.datetimes('created', 'day')[0], datetime.datetime(2007, 12, 19, 0, 0)) def test_tickets_7087_12242(self): # Dates with extra select columns self.assertSequenceEqual( Item.objects.datetimes('created', 'day').extra(select={'a': 1}), [datetime.datetime(2007, 12, 19, 0, 0), datetime.datetime(2007, 12, 20, 0, 0)], ) self.assertSequenceEqual( Item.objects.extra(select={'a': 1}).datetimes('created', 'day'), [datetime.datetime(2007, 12, 19, 0, 0), datetime.datetime(2007, 12, 20, 0, 0)], ) name = "one" self.assertSequenceEqual( Item.objects.datetimes('created', 'day').extra(where=['name=%s'], params=[name]), [datetime.datetime(2007, 12, 19, 0, 0)], ) self.assertSequenceEqual( Item.objects.extra(where=['name=%s'], params=[name]).datetimes('created', 'day'), [datetime.datetime(2007, 12, 19, 0, 0)], ) def test_ticket7155(self): # Nullable dates self.assertSequenceEqual( Item.objects.datetimes('modified', 'day'), [datetime.datetime(2007, 12, 19, 0, 0)], ) def test_order_by_rawsql(self): self.assertSequenceEqual( Item.objects.values('note__note').order_by( RawSQL('queries_note.note', ()), 'id', ), [ {'note__note': 'n2'}, {'note__note': 'n3'}, {'note__note': 'n3'}, {'note__note': 'n3'}, ], ) def test_ticket7096(self): # Make sure exclude() with multiple conditions continues to work. self.assertSequenceEqual( Tag.objects.filter(parent=self.t1, name='t3').order_by('name'), [self.t3], ) self.assertSequenceEqual( Tag.objects.exclude(parent=self.t1, name='t3').order_by('name'), [self.t1, self.t2, self.t4, self.t5], ) self.assertSequenceEqual( Item.objects.exclude(tags__name='t1', name='one').order_by('name').distinct(), [self.i4, self.i3, self.i2], ) self.assertSequenceEqual( Item.objects.filter(name__in=['three', 'four']).exclude(tags__name='t1').order_by('name'), [self.i4, self.i3], ) # More twisted cases, involving nested negations. self.assertSequenceEqual( Item.objects.exclude(~Q(tags__name='t1', name='one')), [self.i1], ) self.assertSequenceEqual( Item.objects.filter(~Q(tags__name='t1', name='one'), name='two'), [self.i2], ) self.assertSequenceEqual( Item.objects.exclude(~Q(tags__name='t1', name='one'), name='two'), [self.i4, self.i1, self.i3], ) def test_tickets_7204_7506(self): # Make sure querysets with related fields can be pickled. If this # doesn't crash, it's a Good Thing. pickle.dumps(Item.objects.all()) def test_ticket7813(self): # We should also be able to pickle things that use select_related(). # The only tricky thing here is to ensure that we do the related # selections properly after unpickling. qs = Item.objects.select_related() query = qs.query.get_compiler(qs.db).as_sql()[0] query2 = pickle.loads(pickle.dumps(qs.query)) self.assertEqual( query2.get_compiler(qs.db).as_sql()[0], query ) def test_deferred_load_qs_pickling(self): # Check pickling of deferred-loading querysets qs = Item.objects.defer('name', 'creator') q2 = pickle.loads(pickle.dumps(qs)) self.assertEqual(list(qs), list(q2)) q3 = pickle.loads(pickle.dumps(qs, pickle.HIGHEST_PROTOCOL)) self.assertEqual(list(qs), list(q3)) def test_ticket7277(self): self.assertSequenceEqual( self.n1.annotation_set.filter( Q(tag=self.t5) | Q(tag__children=self.t5) | Q(tag__children__children=self.t5) ), [self.ann1], ) def test_tickets_7448_7707(self): # Complex objects should be converted to strings before being used in # lookups. self.assertSequenceEqual( Item.objects.filter(created__in=[self.time1, self.time2]), [self.i1, self.i2], ) def test_ticket7235(self): # An EmptyQuerySet should not raise exceptions if it is filtered. Eaten.objects.create(meal='m') q = Eaten.objects.none() with self.assertNumQueries(0): self.assertQuerysetEqual(q.all(), []) self.assertQuerysetEqual(q.filter(meal='m'), []) self.assertQuerysetEqual(q.exclude(meal='m'), []) self.assertQuerysetEqual(q.complex_filter({'pk': 1}), []) self.assertQuerysetEqual(q.select_related('food'), []) self.assertQuerysetEqual(q.annotate(Count('food')), []) self.assertQuerysetEqual(q.order_by('meal', 'food'), []) self.assertQuerysetEqual(q.distinct(), []) self.assertQuerysetEqual( q.extra(select={'foo': "1"}), [] ) self.assertQuerysetEqual(q.reverse(), []) q.query.low_mark = 1 msg = 'Cannot change a query once a slice has been taken.' with self.assertRaisesMessage(TypeError, msg): q.extra(select={'foo': "1"}) self.assertQuerysetEqual(q.defer('meal'), []) self.assertQuerysetEqual(q.only('meal'), []) def test_ticket7791(self): # There were "issues" when ordering and distinct-ing on fields related # via ForeignKeys. self.assertEqual( len(Note.objects.order_by('extrainfo__info').distinct()), 3 ) # Pickling of QuerySets using datetimes() should work. qs = Item.objects.datetimes('created', 'month') pickle.loads(pickle.dumps(qs)) def test_ticket9997(self): # If a ValuesList or Values queryset is passed as an inner query, we # make sure it's only requesting a single value and use that as the # thing to select. self.assertSequenceEqual( Tag.objects.filter(name__in=Tag.objects.filter(parent=self.t1).values('name')), [self.t2, self.t3], ) # Multi-valued values() and values_list() querysets should raise errors. with self.assertRaisesMessage(TypeError, 'Cannot use multi-field values as a filter value.'): Tag.objects.filter(name__in=Tag.objects.filter(parent=self.t1).values('name', 'id')) with self.assertRaisesMessage(TypeError, 'Cannot use multi-field values as a filter value.'): Tag.objects.filter(name__in=Tag.objects.filter(parent=self.t1).values_list('name', 'id')) def test_ticket9985(self): # qs.values_list(...).values(...) combinations should work. self.assertSequenceEqual( Note.objects.values_list("note", flat=True).values("id").order_by("id"), [{'id': 1}, {'id': 2}, {'id': 3}] ) self.assertSequenceEqual( Annotation.objects.filter(notes__in=Note.objects.filter(note="n1").values_list('note').values('id')), [self.ann1], ) def test_ticket10205(self): # When bailing out early because of an empty "__in" filter, we need # to set things up correctly internally so that subqueries can continue properly. self.assertEqual(Tag.objects.filter(name__in=()).update(name="foo"), 0) def test_ticket10432(self): # Testing an empty "__in" filter with a generator as the value. def f(): return iter([]) n_obj = Note.objects.all()[0] def g(): yield n_obj.pk self.assertQuerysetEqual(Note.objects.filter(pk__in=f()), []) self.assertEqual(list(Note.objects.filter(pk__in=g())), [n_obj]) def test_ticket10742(self): # Queries used in an __in clause don't execute subqueries subq = Author.objects.filter(num__lt=3000) qs = Author.objects.filter(pk__in=subq) self.assertSequenceEqual(qs, [self.a1, self.a2]) # The subquery result cache should not be populated self.assertIsNone(subq._result_cache) subq = Author.objects.filter(num__lt=3000) qs = Author.objects.exclude(pk__in=subq) self.assertSequenceEqual(qs, [self.a3, self.a4]) # The subquery result cache should not be populated self.assertIsNone(subq._result_cache) subq = Author.objects.filter(num__lt=3000) self.assertSequenceEqual( Author.objects.filter(Q(pk__in=subq) & Q(name='a1')), [self.a1], ) # The subquery result cache should not be populated self.assertIsNone(subq._result_cache) def test_ticket7076(self): # Excluding shouldn't eliminate NULL entries. self.assertSequenceEqual( Item.objects.exclude(modified=self.time1).order_by('name'), [self.i4, self.i3, self.i2], ) self.assertSequenceEqual( Tag.objects.exclude(parent__name=self.t1.name), [self.t1, self.t4, self.t5], ) def test_ticket7181(self): # Ordering by related tables should accommodate nullable fields (this # test is a little tricky, since NULL ordering is database dependent. # Instead, we just count the number of results). self.assertEqual(len(Tag.objects.order_by('parent__name')), 5) # Empty querysets can be merged with others. self.assertSequenceEqual( Note.objects.none() | Note.objects.all(), [self.n1, self.n2, self.n3], ) self.assertSequenceEqual( Note.objects.all() | Note.objects.none(), [self.n1, self.n2, self.n3], ) self.assertSequenceEqual(Note.objects.none() & Note.objects.all(), []) self.assertSequenceEqual(Note.objects.all() & Note.objects.none(), []) def test_ticket8439(self): # Complex combinations of conjunctions, disjunctions and nullable # relations. self.assertSequenceEqual( Author.objects.filter(Q(item__note__extrainfo=self.e2) | Q(report=self.r1, name='xyz')), [self.a2], ) self.assertSequenceEqual( Author.objects.filter(Q(report=self.r1, name='xyz') | Q(item__note__extrainfo=self.e2)), [self.a2], ) self.assertSequenceEqual( Annotation.objects.filter(Q(tag__parent=self.t1) | Q(notes__note='n1', name='a1')), [self.ann1], ) xx = ExtraInfo.objects.create(info='xx', note=self.n3) self.assertSequenceEqual( Note.objects.filter(Q(extrainfo__author=self.a1) | Q(extrainfo=xx)), [self.n1, self.n3], ) q = Note.objects.filter(Q(extrainfo__author=self.a1) | Q(extrainfo=xx)).query self.assertEqual( len([x for x in q.alias_map.values() if x.join_type == LOUTER and q.alias_refcount[x.table_alias]]), 1 ) def test_ticket17429(self): """ Meta.ordering=None works the same as Meta.ordering=[] """ original_ordering = Tag._meta.ordering Tag._meta.ordering = None try: self.assertCountEqual( Tag.objects.all(), [self.t1, self.t2, self.t3, self.t4, self.t5], ) finally: Tag._meta.ordering = original_ordering def test_exclude(self): self.assertQuerysetEqual( Item.objects.exclude(tags__name='t4'), Item.objects.filter(~Q(tags__name='t4'))) self.assertQuerysetEqual( Item.objects.exclude(Q(tags__name='t4') | Q(tags__name='t3')), Item.objects.filter(~(Q(tags__name='t4') | Q(tags__name='t3')))) self.assertQuerysetEqual( Item.objects.exclude(Q(tags__name='t4') | ~Q(tags__name='t3')), Item.objects.filter(~(Q(tags__name='t4') | ~Q(tags__name='t3')))) def test_nested_exclude(self): self.assertQuerysetEqual( Item.objects.exclude(~Q(tags__name='t4')), Item.objects.filter(~~Q(tags__name='t4'))) def test_double_exclude(self): self.assertQuerysetEqual( Item.objects.filter(Q(tags__name='t4')), Item.objects.filter(~~Q(tags__name='t4'))) self.assertQuerysetEqual( Item.objects.filter(Q(tags__name='t4')), Item.objects.filter(~Q(~Q(tags__name='t4')))) def test_exclude_in(self): self.assertQuerysetEqual( Item.objects.exclude(Q(tags__name__in=['t4', 't3'])), Item.objects.filter(~Q(tags__name__in=['t4', 't3']))) self.assertQuerysetEqual( Item.objects.filter(Q(tags__name__in=['t4', 't3'])), Item.objects.filter(~~Q(tags__name__in=['t4', 't3']))) def test_ticket_10790_1(self): # Querying direct fields with isnull should trim the left outer join. # It also should not create INNER JOIN. q = Tag.objects.filter(parent__isnull=True) self.assertSequenceEqual(q, [self.t1]) self.assertNotIn('JOIN', str(q.query)) q = Tag.objects.filter(parent__isnull=False) self.assertSequenceEqual(q, [self.t2, self.t3, self.t4, self.t5]) self.assertNotIn('JOIN', str(q.query)) q = Tag.objects.exclude(parent__isnull=True) self.assertSequenceEqual(q, [self.t2, self.t3, self.t4, self.t5]) self.assertNotIn('JOIN', str(q.query)) q = Tag.objects.exclude(parent__isnull=False) self.assertSequenceEqual(q, [self.t1]) self.assertNotIn('JOIN', str(q.query)) q = Tag.objects.exclude(parent__parent__isnull=False) self.assertSequenceEqual(q, [self.t1, self.t2, self.t3]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 1) self.assertNotIn('INNER JOIN', str(q.query)) def test_ticket_10790_2(self): # Querying across several tables should strip only the last outer join, # while preserving the preceding inner joins. q = Tag.objects.filter(parent__parent__isnull=False) self.assertSequenceEqual(q, [self.t4, self.t5]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q.query).count('INNER JOIN'), 1) # Querying without isnull should not convert anything to left outer join. q = Tag.objects.filter(parent__parent=self.t1) self.assertSequenceEqual(q, [self.t4, self.t5]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q.query).count('INNER JOIN'), 1) def test_ticket_10790_3(self): # Querying via indirect fields should populate the left outer join q = NamedCategory.objects.filter(tag__isnull=True) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 1) # join to dumbcategory ptr_id self.assertEqual(str(q.query).count('INNER JOIN'), 1) self.assertSequenceEqual(q, []) # Querying across several tables should strip only the last join, while # preserving the preceding left outer joins. q = NamedCategory.objects.filter(tag__parent__isnull=True) self.assertEqual(str(q.query).count('INNER JOIN'), 1) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 1) self.assertSequenceEqual(q, [self.nc1]) def test_ticket_10790_4(self): # Querying across m2m field should not strip the m2m table from join. q = Author.objects.filter(item__tags__isnull=True) self.assertSequenceEqual(q, [self.a2, self.a3]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 2) self.assertNotIn('INNER JOIN', str(q.query)) q = Author.objects.filter(item__tags__parent__isnull=True) self.assertSequenceEqual(q, [self.a1, self.a2, self.a2, self.a3]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 3) self.assertNotIn('INNER JOIN', str(q.query)) def test_ticket_10790_5(self): # Querying with isnull=False across m2m field should not create outer joins q = Author.objects.filter(item__tags__isnull=False) self.assertSequenceEqual(q, [self.a1, self.a1, self.a2, self.a2, self.a4]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q.query).count('INNER JOIN'), 2) q = Author.objects.filter(item__tags__parent__isnull=False) self.assertSequenceEqual(q, [self.a1, self.a2, self.a4]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q.query).count('INNER JOIN'), 3) q = Author.objects.filter(item__tags__parent__parent__isnull=False) self.assertSequenceEqual(q, [self.a4]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q.query).count('INNER JOIN'), 4) def test_ticket_10790_6(self): # Querying with isnull=True across m2m field should not create inner joins # and strip last outer join q = Author.objects.filter(item__tags__parent__parent__isnull=True) self.assertSequenceEqual( q, [self.a1, self.a1, self.a2, self.a2, self.a2, self.a3], ) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 4) self.assertEqual(str(q.query).count('INNER JOIN'), 0) q = Author.objects.filter(item__tags__parent__isnull=True) self.assertSequenceEqual(q, [self.a1, self.a2, self.a2, self.a3]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 3) self.assertEqual(str(q.query).count('INNER JOIN'), 0) def test_ticket_10790_7(self): # Reverse querying with isnull should not strip the join q = Author.objects.filter(item__isnull=True) self.assertSequenceEqual(q, [self.a3]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 1) self.assertEqual(str(q.query).count('INNER JOIN'), 0) q = Author.objects.filter(item__isnull=False) self.assertSequenceEqual(q, [self.a1, self.a2, self.a2, self.a4]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q.query).count('INNER JOIN'), 1) def test_ticket_10790_8(self): # Querying with combined q-objects should also strip the left outer join q = Tag.objects.filter(Q(parent__isnull=True) | Q(parent=self.t1)) self.assertSequenceEqual(q, [self.t1, self.t2, self.t3]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q.query).count('INNER JOIN'), 0) def test_ticket_10790_combine(self): # Combining queries should not re-populate the left outer join q1 = Tag.objects.filter(parent__isnull=True) q2 = Tag.objects.filter(parent__isnull=False) q3 = q1 | q2 self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3, self.t4, self.t5]) self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q3.query).count('INNER JOIN'), 0) q3 = q1 & q2 self.assertSequenceEqual(q3, []) self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q3.query).count('INNER JOIN'), 0) q2 = Tag.objects.filter(parent=self.t1) q3 = q1 | q2 self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3]) self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q3.query).count('INNER JOIN'), 0) q3 = q2 | q1 self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3]) self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q3.query).count('INNER JOIN'), 0) q1 = Tag.objects.filter(parent__isnull=True) q2 = Tag.objects.filter(parent__parent__isnull=True) q3 = q1 | q2 self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3]) self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 1) self.assertEqual(str(q3.query).count('INNER JOIN'), 0) q3 = q2 | q1 self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3]) self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 1) self.assertEqual(str(q3.query).count('INNER JOIN'), 0) def test_ticket19672(self): self.assertSequenceEqual( Report.objects.filter(Q(creator__isnull=False) & ~Q(creator__extra__value=41)), [self.r1], ) def test_ticket_20250(self): # A negated Q along with an annotated queryset failed in Django 1.4 qs = Author.objects.annotate(Count('item')) qs = qs.filter(~Q(extra__value=0)).order_by('name') self.assertIn('SELECT', str(qs.query)) self.assertSequenceEqual(qs, [self.a1, self.a2, self.a3, self.a4]) def test_lookup_constraint_fielderror(self): msg = ( "Cannot resolve keyword 'unknown_field' into field. Choices are: " "annotation, category, category_id, children, id, item, " "managedmodel, name, note, parent, parent_id" ) with self.assertRaisesMessage(FieldError, msg): Tag.objects.filter(unknown_field__name='generic') def test_common_mixed_case_foreign_keys(self): """ Valid query should be generated when fields fetched from joined tables include FKs whose names only differ by case. """ c1 = SimpleCategory.objects.create(name='c1') c2 = SimpleCategory.objects.create(name='c2') c3 = SimpleCategory.objects.create(name='c3') category = CategoryItem.objects.create(category=c1) mixed_case_field_category = MixedCaseFieldCategoryItem.objects.create(CaTeGoRy=c2) mixed_case_db_column_category = MixedCaseDbColumnCategoryItem.objects.create(category=c3) CommonMixedCaseForeignKeys.objects.create( category=category, mixed_case_field_category=mixed_case_field_category, mixed_case_db_column_category=mixed_case_db_column_category, ) qs = CommonMixedCaseForeignKeys.objects.values( 'category', 'mixed_case_field_category', 'mixed_case_db_column_category', 'category__category', 'mixed_case_field_category__CaTeGoRy', 'mixed_case_db_column_category__category', ) self.assertTrue(qs.first()) def test_excluded_intermediary_m2m_table_joined(self): self.assertSequenceEqual( Note.objects.filter(~Q(tag__annotation__name=F('note'))), [self.n1, self.n2, self.n3], ) self.assertSequenceEqual( Note.objects.filter(tag__annotation__name='a1').filter(~Q(tag__annotation__name=F('note'))), [], ) def test_field_with_filterable(self): self.assertSequenceEqual( Author.objects.filter(extra=self.e2), [self.a3, self.a4], ) def test_negate_field(self): self.assertSequenceEqual( Note.objects.filter(negate=True), [self.n1, self.n2], ) self.assertSequenceEqual(Note.objects.exclude(negate=True), [self.n3]) class Queries2Tests(TestCase): @classmethod def setUpTestData(cls): cls.num4 = Number.objects.create(num=4) cls.num8 = Number.objects.create(num=8) cls.num12 = Number.objects.create(num=12) def test_ticket4289(self): # A slight variation on the restricting the filtering choices by the # lookup constraints. self.assertSequenceEqual(Number.objects.filter(num__lt=4), []) self.assertSequenceEqual(Number.objects.filter(num__gt=8, num__lt=12), []) self.assertSequenceEqual( Number.objects.filter(num__gt=8, num__lt=13), [self.num12], ) self.assertSequenceEqual( Number.objects.filter(Q(num__lt=4) | Q(num__gt=8, num__lt=12)), [] ) self.assertSequenceEqual( Number.objects.filter(Q(num__gt=8, num__lt=12) | Q(num__lt=4)), [] ) self.assertSequenceEqual( Number.objects.filter(Q(num__gt=8) & Q(num__lt=12) | Q(num__lt=4)), [] ) self.assertSequenceEqual( Number.objects.filter(Q(num__gt=7) & Q(num__lt=12) | Q(num__lt=4)), [self.num8], ) def test_ticket12239(self): # Custom lookups are registered to round float values correctly on gte # and lt IntegerField queries. self.assertSequenceEqual( Number.objects.filter(num__gt=11.9), [self.num12], ) self.assertSequenceEqual(Number.objects.filter(num__gt=12), []) self.assertSequenceEqual(Number.objects.filter(num__gt=12.0), []) self.assertSequenceEqual(Number.objects.filter(num__gt=12.1), []) self.assertCountEqual( Number.objects.filter(num__lt=12), [self.num4, self.num8], ) self.assertCountEqual( Number.objects.filter(num__lt=12.0), [self.num4, self.num8], ) self.assertCountEqual( Number.objects.filter(num__lt=12.1), [self.num4, self.num8, self.num12], ) self.assertCountEqual( Number.objects.filter(num__gte=11.9), [self.num12], ) self.assertCountEqual( Number.objects.filter(num__gte=12), [self.num12], ) self.assertCountEqual( Number.objects.filter(num__gte=12.0), [self.num12], ) self.assertSequenceEqual(Number.objects.filter(num__gte=12.1), []) self.assertSequenceEqual(Number.objects.filter(num__gte=12.9), []) self.assertCountEqual( Number.objects.filter(num__lte=11.9), [self.num4, self.num8], ) self.assertCountEqual( Number.objects.filter(num__lte=12), [self.num4, self.num8, self.num12], ) self.assertCountEqual( Number.objects.filter(num__lte=12.0), [self.num4, self.num8, self.num12], ) self.assertCountEqual( Number.objects.filter(num__lte=12.1), [self.num4, self.num8, self.num12], ) self.assertCountEqual( Number.objects.filter(num__lte=12.9), [self.num4, self.num8, self.num12], ) def test_ticket7759(self): # Count should work with a partially read result set. count = Number.objects.count() qs = Number.objects.all() def run(): for obj in qs: return qs.count() == count self.assertTrue(run()) class Queries3Tests(TestCase): def test_ticket7107(self): # This shouldn't create an infinite loop. self.assertQuerysetEqual(Valid.objects.all(), []) def test_ticket8683(self): # An error should be raised when QuerySet.datetimes() is passed the # wrong type of field. with self.assertRaisesMessage(AssertionError, "'name' isn't a DateField, TimeField, or DateTimeField."): Item.objects.datetimes('name', 'month') def test_ticket22023(self): with self.assertRaisesMessage(TypeError, "Cannot call only() after .values() or .values_list()"): Valid.objects.values().only() with self.assertRaisesMessage(TypeError, "Cannot call defer() after .values() or .values_list()"): Valid.objects.values().defer() class Queries4Tests(TestCase): @classmethod def setUpTestData(cls): generic = NamedCategory.objects.create(name="Generic") cls.t1 = Tag.objects.create(name='t1', category=generic) n1 = Note.objects.create(note='n1', misc='foo') n2 = Note.objects.create(note='n2', misc='bar') e1 = ExtraInfo.objects.create(info='e1', note=n1) e2 = ExtraInfo.objects.create(info='e2', note=n2) cls.a1 = Author.objects.create(name='a1', num=1001, extra=e1) cls.a3 = Author.objects.create(name='a3', num=3003, extra=e2) cls.r1 = Report.objects.create(name='r1', creator=cls.a1) cls.r2 = Report.objects.create(name='r2', creator=cls.a3) cls.r3 = Report.objects.create(name='r3') cls.i1 = Item.objects.create(name='i1', created=datetime.datetime.now(), note=n1, creator=cls.a1) cls.i2 = Item.objects.create(name='i2', created=datetime.datetime.now(), note=n1, creator=cls.a3) def test_ticket24525(self): tag = Tag.objects.create() anth100 = tag.note_set.create(note='ANTH', misc='100') math101 = tag.note_set.create(note='MATH', misc='101') s1 = tag.annotation_set.create(name='1') s2 = tag.annotation_set.create(name='2') s1.notes.set([math101, anth100]) s2.notes.set([math101]) result = math101.annotation_set.all() & tag.annotation_set.exclude(notes__in=[anth100]) self.assertEqual(list(result), [s2]) def test_ticket11811(self): unsaved_category = NamedCategory(name="Other") msg = 'Unsaved model instance <NamedCategory: Other> cannot be used in an ORM query.' with self.assertRaisesMessage(ValueError, msg): Tag.objects.filter(pk=self.t1.pk).update(category=unsaved_category) def test_ticket14876(self): # Note: when combining the query we need to have information available # about the join type of the trimmed "creator__isnull" join. If we # don't have that information, then the join is created as INNER JOIN # and results will be incorrect. q1 = Report.objects.filter(Q(creator__isnull=True) | Q(creator__extra__info='e1')) q2 = Report.objects.filter(Q(creator__isnull=True)) | Report.objects.filter(Q(creator__extra__info='e1')) self.assertCountEqual(q1, [self.r1, self.r3]) self.assertEqual(str(q1.query), str(q2.query)) q1 = Report.objects.filter(Q(creator__extra__info='e1') | Q(creator__isnull=True)) q2 = Report.objects.filter(Q(creator__extra__info='e1')) | Report.objects.filter(Q(creator__isnull=True)) self.assertCountEqual(q1, [self.r1, self.r3]) self.assertEqual(str(q1.query), str(q2.query)) q1 = Item.objects.filter(Q(creator=self.a1) | Q(creator__report__name='r1')).order_by() q2 = ( Item.objects .filter(Q(creator=self.a1)).order_by() | Item.objects.filter(Q(creator__report__name='r1')) .order_by() ) self.assertCountEqual(q1, [self.i1]) self.assertEqual(str(q1.query), str(q2.query)) q1 = Item.objects.filter(Q(creator__report__name='e1') | Q(creator=self.a1)).order_by() q2 = ( Item.objects.filter(Q(creator__report__name='e1')).order_by() | Item.objects.filter(Q(creator=self.a1)).order_by() ) self.assertCountEqual(q1, [self.i1]) self.assertEqual(str(q1.query), str(q2.query)) def test_combine_join_reuse(self): # Joins having identical connections are correctly recreated in the # rhs query, in case the query is ORed together (#18748). Report.objects.create(name='r4', creator=self.a1) q1 = Author.objects.filter(report__name='r5') q2 = Author.objects.filter(report__name='r4').filter(report__name='r1') combined = q1 | q2 self.assertEqual(str(combined.query).count('JOIN'), 2) self.assertEqual(len(combined), 1) self.assertEqual(combined[0].name, 'a1') def test_combine_or_filter_reuse(self): combined = Author.objects.filter(name='a1') | Author.objects.filter(name='a3') self.assertEqual(combined.get(name='a1'), self.a1) def test_join_reuse_order(self): # Join aliases are reused in order. This shouldn't raise AssertionError # because change_map contains a circular reference (#26522). s1 = School.objects.create() s2 = School.objects.create() s3 = School.objects.create() t1 = Teacher.objects.create() otherteachers = Teacher.objects.exclude(pk=t1.pk).exclude(friends=t1) qs1 = otherteachers.filter(schools=s1).filter(schools=s2) qs2 = otherteachers.filter(schools=s1).filter(schools=s3) self.assertQuerysetEqual(qs1 | qs2, []) def test_ticket7095(self): # Updates that are filtered on the model being updated are somewhat # tricky in MySQL. ManagedModel.objects.create(data='mm1', tag=self.t1, public=True) self.assertEqual(ManagedModel.objects.update(data='mm'), 1) # A values() or values_list() query across joined models must use outer # joins appropriately. # Note: In Oracle, we expect a null CharField to return '' instead of # None. if connection.features.interprets_empty_strings_as_nulls: expected_null_charfield_repr = '' else: expected_null_charfield_repr = None self.assertSequenceEqual( Report.objects.values_list("creator__extra__info", flat=True).order_by("name"), ['e1', 'e2', expected_null_charfield_repr], ) # Similarly for select_related(), joins beyond an initial nullable join # must use outer joins so that all results are included. self.assertSequenceEqual( Report.objects.select_related("creator", "creator__extra").order_by("name"), [self.r1, self.r2, self.r3] ) # When there are multiple paths to a table from another table, we have # to be careful not to accidentally reuse an inappropriate join when # using select_related(). We used to return the parent's Detail record # here by mistake. d1 = Detail.objects.create(data="d1") d2 = Detail.objects.create(data="d2") m1 = Member.objects.create(name="m1", details=d1) m2 = Member.objects.create(name="m2", details=d2) Child.objects.create(person=m2, parent=m1) obj = m1.children.select_related("person__details")[0] self.assertEqual(obj.person.details.data, 'd2') def test_order_by_resetting(self): # Calling order_by() with no parameters removes any existing ordering on the # model. But it should still be possible to add new ordering after that. qs = Author.objects.order_by().order_by('name') self.assertIn('ORDER BY', qs.query.get_compiler(qs.db).as_sql()[0]) def test_order_by_reverse_fk(self): # It is possible to order by reverse of foreign key, although that can lead # to duplicate results. c1 = SimpleCategory.objects.create(name="category1") c2 = SimpleCategory.objects.create(name="category2") CategoryItem.objects.create(category=c1) CategoryItem.objects.create(category=c2) CategoryItem.objects.create(category=c1) self.assertSequenceEqual(SimpleCategory.objects.order_by('categoryitem', 'pk'), [c1, c2, c1]) def test_filter_reverse_non_integer_pk(self): date_obj = DateTimePK.objects.create() extra_obj = ExtraInfo.objects.create(info='extra', date=date_obj) self.assertEqual( DateTimePK.objects.filter(extrainfo=extra_obj).get(), date_obj, ) def test_ticket10181(self): # Avoid raising an EmptyResultSet if an inner query is probably # empty (and hence, not executed). self.assertQuerysetEqual( Tag.objects.filter(id__in=Tag.objects.filter(id__in=[])), [] ) def test_ticket15316_filter_false(self): c1 = SimpleCategory.objects.create(name="category1") c2 = SpecialCategory.objects.create(name="named category1", special_name="special1") c3 = SpecialCategory.objects.create(name="named category2", special_name="special2") CategoryItem.objects.create(category=c1) ci2 = CategoryItem.objects.create(category=c2) ci3 = CategoryItem.objects.create(category=c3) qs = CategoryItem.objects.filter(category__specialcategory__isnull=False) self.assertEqual(qs.count(), 2) self.assertSequenceEqual(qs, [ci2, ci3]) def test_ticket15316_exclude_false(self): c1 = SimpleCategory.objects.create(name="category1") c2 = SpecialCategory.objects.create(name="named category1", special_name="special1") c3 = SpecialCategory.objects.create(name="named category2", special_name="special2") ci1 = CategoryItem.objects.create(category=c1) CategoryItem.objects.create(category=c2) CategoryItem.objects.create(category=c3) qs = CategoryItem.objects.exclude(category__specialcategory__isnull=False) self.assertEqual(qs.count(), 1) self.assertSequenceEqual(qs, [ci1]) def test_ticket15316_filter_true(self): c1 = SimpleCategory.objects.create(name="category1") c2 = SpecialCategory.objects.create(name="named category1", special_name="special1") c3 = SpecialCategory.objects.create(name="named category2", special_name="special2") ci1 = CategoryItem.objects.create(category=c1) CategoryItem.objects.create(category=c2) CategoryItem.objects.create(category=c3) qs = CategoryItem.objects.filter(category__specialcategory__isnull=True) self.assertEqual(qs.count(), 1) self.assertSequenceEqual(qs, [ci1]) def test_ticket15316_exclude_true(self): c1 = SimpleCategory.objects.create(name="category1") c2 = SpecialCategory.objects.create(name="named category1", special_name="special1") c3 = SpecialCategory.objects.create(name="named category2", special_name="special2") CategoryItem.objects.create(category=c1) ci2 = CategoryItem.objects.create(category=c2) ci3 = CategoryItem.objects.create(category=c3) qs = CategoryItem.objects.exclude(category__specialcategory__isnull=True) self.assertEqual(qs.count(), 2) self.assertSequenceEqual(qs, [ci2, ci3]) def test_ticket15316_one2one_filter_false(self): c = SimpleCategory.objects.create(name="cat") c0 = SimpleCategory.objects.create(name="cat0") c1 = SimpleCategory.objects.create(name="category1") OneToOneCategory.objects.create(category=c1, new_name="new1") OneToOneCategory.objects.create(category=c0, new_name="new2") CategoryItem.objects.create(category=c) ci2 = CategoryItem.objects.create(category=c0) ci3 = CategoryItem.objects.create(category=c1) qs = CategoryItem.objects.filter(category__onetoonecategory__isnull=False).order_by('pk') self.assertEqual(qs.count(), 2) self.assertSequenceEqual(qs, [ci2, ci3]) def test_ticket15316_one2one_exclude_false(self): c = SimpleCategory.objects.create(name="cat") c0 = SimpleCategory.objects.create(name="cat0") c1 = SimpleCategory.objects.create(name="category1") OneToOneCategory.objects.create(category=c1, new_name="new1") OneToOneCategory.objects.create(category=c0, new_name="new2") ci1 = CategoryItem.objects.create(category=c) CategoryItem.objects.create(category=c0) CategoryItem.objects.create(category=c1) qs = CategoryItem.objects.exclude(category__onetoonecategory__isnull=False) self.assertEqual(qs.count(), 1) self.assertSequenceEqual(qs, [ci1]) def test_ticket15316_one2one_filter_true(self): c = SimpleCategory.objects.create(name="cat") c0 = SimpleCategory.objects.create(name="cat0") c1 = SimpleCategory.objects.create(name="category1") OneToOneCategory.objects.create(category=c1, new_name="new1") OneToOneCategory.objects.create(category=c0, new_name="new2") ci1 = CategoryItem.objects.create(category=c) CategoryItem.objects.create(category=c0) CategoryItem.objects.create(category=c1) qs = CategoryItem.objects.filter(category__onetoonecategory__isnull=True) self.assertEqual(qs.count(), 1) self.assertSequenceEqual(qs, [ci1]) def test_ticket15316_one2one_exclude_true(self): c = SimpleCategory.objects.create(name="cat") c0 = SimpleCategory.objects.create(name="cat0") c1 = SimpleCategory.objects.create(name="category1") OneToOneCategory.objects.create(category=c1, new_name="new1") OneToOneCategory.objects.create(category=c0, new_name="new2") CategoryItem.objects.create(category=c) ci2 = CategoryItem.objects.create(category=c0) ci3 = CategoryItem.objects.create(category=c1) qs = CategoryItem.objects.exclude(category__onetoonecategory__isnull=True).order_by('pk') self.assertEqual(qs.count(), 2) self.assertSequenceEqual(qs, [ci2, ci3]) class Queries5Tests(TestCase): @classmethod def setUpTestData(cls): # Ordering by 'rank' gives us rank2, rank1, rank3. Ordering by the # Meta.ordering will be rank3, rank2, rank1. cls.n1 = Note.objects.create(note='n1', misc='foo', id=1) cls.n2 = Note.objects.create(note='n2', misc='bar', id=2) e1 = ExtraInfo.objects.create(info='e1', note=cls.n1) e2 = ExtraInfo.objects.create(info='e2', note=cls.n2) a1 = Author.objects.create(name='a1', num=1001, extra=e1) a2 = Author.objects.create(name='a2', num=2002, extra=e1) a3 = Author.objects.create(name='a3', num=3003, extra=e2) cls.rank2 = Ranking.objects.create(rank=2, author=a2) cls.rank1 = Ranking.objects.create(rank=1, author=a3) cls.rank3 = Ranking.objects.create(rank=3, author=a1) def test_ordering(self): # Cross model ordering is possible in Meta, too. self.assertSequenceEqual( Ranking.objects.all(), [self.rank3, self.rank2, self.rank1], ) self.assertSequenceEqual( Ranking.objects.all().order_by('rank'), [self.rank1, self.rank2, self.rank3], ) # Ordering of extra() pieces is possible, too and you can mix extra # fields and model fields in the ordering. self.assertSequenceEqual( Ranking.objects.extra(tables=['django_site'], order_by=['-django_site.id', 'rank']), [self.rank1, self.rank2, self.rank3], ) sql = 'case when %s > 2 then 1 else 0 end' % connection.ops.quote_name('rank') qs = Ranking.objects.extra(select={'good': sql}) self.assertEqual( [o.good for o in qs.extra(order_by=('-good',))], [True, False, False] ) self.assertSequenceEqual( qs.extra(order_by=('-good', 'id')), [self.rank3, self.rank2, self.rank1], ) # Despite having some extra aliases in the query, we can still omit # them in a values() query. dicts = qs.values('id', 'rank').order_by('id') self.assertEqual( [d['rank'] for d in dicts], [2, 1, 3] ) def test_ticket7256(self): # An empty values() call includes all aliases, including those from an # extra() sql = 'case when %s > 2 then 1 else 0 end' % connection.ops.quote_name('rank') qs = Ranking.objects.extra(select={'good': sql}) dicts = qs.values().order_by('id') for d in dicts: del d['id'] del d['author_id'] self.assertEqual( [sorted(d.items()) for d in dicts], [[('good', 0), ('rank', 2)], [('good', 0), ('rank', 1)], [('good', 1), ('rank', 3)]] ) def test_ticket7045(self): # Extra tables used to crash SQL construction on the second use. qs = Ranking.objects.extra(tables=['django_site']) qs.query.get_compiler(qs.db).as_sql() # test passes if this doesn't raise an exception. qs.query.get_compiler(qs.db).as_sql() def test_ticket9848(self): # Make sure that updates which only filter on sub-tables don't # inadvertently update the wrong records (bug #9848). author_start = Author.objects.get(name='a1') ranking_start = Ranking.objects.get(author__name='a1') # Make sure that the IDs from different tables don't happen to match. self.assertSequenceEqual( Ranking.objects.filter(author__name='a1'), [self.rank3], ) self.assertEqual( Ranking.objects.filter(author__name='a1').update(rank=4636), 1 ) r = Ranking.objects.get(author__name='a1') self.assertEqual(r.id, ranking_start.id) self.assertEqual(r.author.id, author_start.id) self.assertEqual(r.rank, 4636) r.rank = 3 r.save() self.assertSequenceEqual( Ranking.objects.all(), [self.rank3, self.rank2, self.rank1], ) def test_ticket5261(self): # Test different empty excludes. self.assertSequenceEqual( Note.objects.exclude(Q()), [self.n1, self.n2], ) self.assertSequenceEqual( Note.objects.filter(~Q()), [self.n1, self.n2], ) self.assertSequenceEqual( Note.objects.filter(~Q() | ~Q()), [self.n1, self.n2], ) self.assertSequenceEqual( Note.objects.exclude(~Q() & ~Q()), [self.n1, self.n2], ) def test_extra_select_literal_percent_s(self): # Allow %%s to escape select clauses self.assertEqual( Note.objects.extra(select={'foo': "'%%s'"})[0].foo, '%s' ) self.assertEqual( Note.objects.extra(select={'foo': "'%%s bar %%s'"})[0].foo, '%s bar %s' ) self.assertEqual( Note.objects.extra(select={'foo': "'bar %%s'"})[0].foo, 'bar %s' ) class SelectRelatedTests(TestCase): def test_tickets_3045_3288(self): # Once upon a time, select_related() with circular relations would loop # infinitely if you forgot to specify "depth". Now we set an arbitrary # default upper bound. self.assertQuerysetEqual(X.objects.all(), []) self.assertQuerysetEqual(X.objects.select_related(), []) class SubclassFKTests(TestCase): def test_ticket7778(self): # Model subclasses could not be deleted if a nullable foreign key # relates to a model that relates back. num_celebs = Celebrity.objects.count() tvc = TvChef.objects.create(name="Huey") self.assertEqual(Celebrity.objects.count(), num_celebs + 1) Fan.objects.create(fan_of=tvc) Fan.objects.create(fan_of=tvc) tvc.delete() # The parent object should have been deleted as well. self.assertEqual(Celebrity.objects.count(), num_celebs) class CustomPkTests(TestCase): def test_ticket7371(self): self.assertQuerysetEqual(Related.objects.order_by('custom'), []) class NullableRelOrderingTests(TestCase): def test_ticket10028(self): # Ordering by model related to nullable relations(!) should use outer # joins, so that all results are included. p1 = Plaything.objects.create(name="p1") self.assertSequenceEqual(Plaything.objects.all(), [p1]) def test_join_already_in_query(self): # Ordering by model related to nullable relations should not change # the join type of already existing joins. Plaything.objects.create(name="p1") s = SingleObject.objects.create(name='s') r = RelatedObject.objects.create(single=s, f=1) p2 = Plaything.objects.create(name="p2", others=r) qs = Plaything.objects.all().filter(others__isnull=False).order_by('pk') self.assertNotIn('JOIN', str(qs.query)) qs = Plaything.objects.all().filter(others__f__isnull=False).order_by('pk') self.assertIn('INNER', str(qs.query)) qs = qs.order_by('others__single__name') # The ordering by others__single__pk will add one new join (to single) # and that join must be LEFT join. The already existing join to related # objects must be kept INNER. So, we have both an INNER and a LEFT join # in the query. self.assertEqual(str(qs.query).count('LEFT'), 1) self.assertEqual(str(qs.query).count('INNER'), 1) self.assertSequenceEqual(qs, [p2]) class DisjunctiveFilterTests(TestCase): @classmethod def setUpTestData(cls): cls.n1 = Note.objects.create(note='n1', misc='foo', id=1) cls.e1 = ExtraInfo.objects.create(info='e1', note=cls.n1) def test_ticket7872(self): # Another variation on the disjunctive filtering theme. # For the purposes of this regression test, it's important that there is no # Join object related to the LeafA we create. l1 = LeafA.objects.create(data='first') self.assertSequenceEqual(LeafA.objects.all(), [l1]) self.assertSequenceEqual( LeafA.objects.filter(Q(data='first') | Q(join__b__data='second')), [l1], ) def test_ticket8283(self): # Checking that applying filters after a disjunction works correctly. self.assertSequenceEqual( (ExtraInfo.objects.filter(note=self.n1) | ExtraInfo.objects.filter(info='e2')).filter(note=self.n1), [self.e1], ) self.assertSequenceEqual( (ExtraInfo.objects.filter(info='e2') | ExtraInfo.objects.filter(note=self.n1)).filter(note=self.n1), [self.e1], ) class Queries6Tests(TestCase): @classmethod def setUpTestData(cls): generic = NamedCategory.objects.create(name="Generic") cls.t1 = Tag.objects.create(name='t1', category=generic) cls.t2 = Tag.objects.create(name='t2', parent=cls.t1, category=generic) cls.t3 = Tag.objects.create(name='t3', parent=cls.t1) cls.t4 = Tag.objects.create(name='t4', parent=cls.t3) cls.t5 = Tag.objects.create(name='t5', parent=cls.t3) n1 = Note.objects.create(note='n1', misc='foo', id=1) cls.ann1 = Annotation.objects.create(name='a1', tag=cls.t1) cls.ann1.notes.add(n1) cls.ann2 = Annotation.objects.create(name='a2', tag=cls.t4) def test_parallel_iterators(self): # Parallel iterators work. qs = Tag.objects.all() i1, i2 = iter(qs), iter(qs) self.assertEqual(repr(next(i1)), '<Tag: t1>') self.assertEqual(repr(next(i1)), '<Tag: t2>') self.assertEqual(repr(next(i2)), '<Tag: t1>') self.assertEqual(repr(next(i2)), '<Tag: t2>') self.assertEqual(repr(next(i2)), '<Tag: t3>') self.assertEqual(repr(next(i1)), '<Tag: t3>') qs = X.objects.all() self.assertFalse(qs) self.assertFalse(qs) def test_nested_queries_sql(self): # Nested queries should not evaluate the inner query as part of constructing the # SQL (so we should see a nested query here, indicated by two "SELECT" calls). qs = Annotation.objects.filter(notes__in=Note.objects.filter(note="xyzzy")) self.assertEqual( qs.query.get_compiler(qs.db).as_sql()[0].count('SELECT'), 2 ) def test_tickets_8921_9188(self): # Incorrect SQL was being generated for certain types of exclude() # queries that crossed multi-valued relations (#8921, #9188 and some # preemptively discovered cases). self.assertSequenceEqual( PointerA.objects.filter(connection__pointerb__id=1), [] ) self.assertSequenceEqual( PointerA.objects.exclude(connection__pointerb__id=1), [] ) self.assertSequenceEqual( Tag.objects.exclude(children=None), [self.t1, self.t3], ) # This example is tricky because the parent could be NULL, so only checking # parents with annotations omits some results (tag t1, in this case). self.assertSequenceEqual( Tag.objects.exclude(parent__annotation__name="a1"), [self.t1, self.t4, self.t5], ) # The annotation->tag link is single values and tag->children links is # multi-valued. So we have to split the exclude filter in the middle # and then optimize the inner query without losing results. self.assertSequenceEqual( Annotation.objects.exclude(tag__children__name="t2"), [self.ann2], ) # Nested queries are possible (although should be used with care, since # they have performance problems on backends like MySQL. self.assertSequenceEqual( Annotation.objects.filter(notes__in=Note.objects.filter(note="n1")), [self.ann1], ) def test_ticket3739(self): # The all() method on querysets returns a copy of the queryset. q1 = Tag.objects.order_by('name') self.assertIsNot(q1, q1.all()) def test_ticket_11320(self): qs = Tag.objects.exclude(category=None).exclude(category__name='foo') self.assertEqual(str(qs.query).count(' INNER JOIN '), 1) def test_distinct_ordered_sliced_subquery_aggregation(self): self.assertEqual(Tag.objects.distinct().order_by('category__name')[:3].count(), 3) def test_multiple_columns_with_the_same_name_slice(self): self.assertEqual( list(Tag.objects.order_by('name').values_list('name', 'category__name')[:2]), [('t1', 'Generic'), ('t2', 'Generic')], ) self.assertSequenceEqual( Tag.objects.order_by('name').select_related('category')[:2], [self.t1, self.t2], ) self.assertEqual( list(Tag.objects.order_by('-name').values_list('name', 'parent__name')[:2]), [('t5', 't3'), ('t4', 't3')], ) self.assertSequenceEqual( Tag.objects.order_by('-name').select_related('parent')[:2], [self.t5, self.t4], ) def test_col_alias_quoted(self): with CaptureQueriesContext(connection) as captured_queries: self.assertEqual( Tag.objects.values('parent').annotate( tag_per_parent=Count('pk'), ).aggregate(Max('tag_per_parent')), {'tag_per_parent__max': 2}, ) sql = captured_queries[0]['sql'] self.assertIn('AS %s' % connection.ops.quote_name('col1'), sql) class RawQueriesTests(TestCase): @classmethod def setUpTestData(cls): Note.objects.create(note='n1', misc='foo', id=1) def test_ticket14729(self): # Test representation of raw query with one or few parameters passed as list query = "SELECT * FROM queries_note WHERE note = %s" params = ['n1'] qs = Note.objects.raw(query, params=params) self.assertEqual(repr(qs), "<RawQuerySet: SELECT * FROM queries_note WHERE note = n1>") query = "SELECT * FROM queries_note WHERE note = %s and misc = %s" params = ['n1', 'foo'] qs = Note.objects.raw(query, params=params) self.assertEqual(repr(qs), "<RawQuerySet: SELECT * FROM queries_note WHERE note = n1 and misc = foo>") class GeneratorExpressionTests(SimpleTestCase): def test_ticket10432(self): # Using an empty iterator as the rvalue for an "__in" # lookup is legal. self.assertCountEqual(Note.objects.filter(pk__in=iter(())), []) class ComparisonTests(TestCase): @classmethod def setUpTestData(cls): cls.n1 = Note.objects.create(note='n1', misc='foo', id=1) e1 = ExtraInfo.objects.create(info='e1', note=cls.n1) cls.a2 = Author.objects.create(name='a2', num=2002, extra=e1) def test_ticket8597(self): # Regression tests for case-insensitive comparisons item_ab = Item.objects.create(name="a_b", created=datetime.datetime.now(), creator=self.a2, note=self.n1) item_xy = Item.objects.create(name="x%y", created=datetime.datetime.now(), creator=self.a2, note=self.n1) self.assertSequenceEqual( Item.objects.filter(name__iexact="A_b"), [item_ab], ) self.assertSequenceEqual( Item.objects.filter(name__iexact="x%Y"), [item_xy], ) self.assertSequenceEqual( Item.objects.filter(name__istartswith="A_b"), [item_ab], ) self.assertSequenceEqual( Item.objects.filter(name__iendswith="A_b"), [item_ab], ) class ExistsSql(TestCase): def test_exists(self): with CaptureQueriesContext(connection) as captured_queries: self.assertFalse(Tag.objects.exists()) # Ok - so the exist query worked - but did it include too many columns? self.assertEqual(len(captured_queries), 1) qstr = captured_queries[0]['sql'] id, name = connection.ops.quote_name('id'), connection.ops.quote_name('name') self.assertNotIn(id, qstr) self.assertNotIn(name, qstr) def test_ticket_18414(self): Article.objects.create(name='one', created=datetime.datetime.now()) Article.objects.create(name='one', created=datetime.datetime.now()) Article.objects.create(name='two', created=datetime.datetime.now()) self.assertTrue(Article.objects.exists()) self.assertTrue(Article.objects.distinct().exists()) self.assertTrue(Article.objects.distinct()[1:3].exists()) self.assertFalse(Article.objects.distinct()[1:1].exists()) @skipUnlessDBFeature('can_distinct_on_fields') def test_ticket_18414_distinct_on(self): Article.objects.create(name='one', created=datetime.datetime.now()) Article.objects.create(name='one', created=datetime.datetime.now()) Article.objects.create(name='two', created=datetime.datetime.now()) self.assertTrue(Article.objects.distinct('name').exists()) self.assertTrue(Article.objects.distinct('name')[1:2].exists()) self.assertFalse(Article.objects.distinct('name')[2:3].exists()) class QuerysetOrderedTests(unittest.TestCase): """ Tests for the Queryset.ordered attribute. """ def test_no_default_or_explicit_ordering(self): self.assertIs(Annotation.objects.all().ordered, False) def test_cleared_default_ordering(self): self.assertIs(Tag.objects.all().ordered, True) self.assertIs(Tag.objects.all().order_by().ordered, False) def test_explicit_ordering(self): self.assertIs(Annotation.objects.all().order_by('id').ordered, True) def test_empty_queryset(self): self.assertIs(Annotation.objects.none().ordered, True) def test_order_by_extra(self): self.assertIs(Annotation.objects.all().extra(order_by=['id']).ordered, True) def test_annotated_ordering(self): qs = Annotation.objects.annotate(num_notes=Count('notes')) self.assertIs(qs.ordered, False) self.assertIs(qs.order_by('num_notes').ordered, True) def test_annotated_default_ordering(self): qs = Tag.objects.annotate(num_notes=Count('pk')) self.assertIs(qs.ordered, False) self.assertIs(qs.order_by('name').ordered, True) def test_annotated_values_default_ordering(self): qs = Tag.objects.values('name').annotate(num_notes=Count('pk')) self.assertIs(qs.ordered, False) self.assertIs(qs.order_by('name').ordered, True) @skipUnlessDBFeature('allow_sliced_subqueries_with_in') class SubqueryTests(TestCase): @classmethod def setUpTestData(cls): NamedCategory.objects.create(id=1, name='first') NamedCategory.objects.create(id=2, name='second') NamedCategory.objects.create(id=3, name='third') NamedCategory.objects.create(id=4, name='fourth') def test_ordered_subselect(self): "Subselects honor any manual ordering" query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[0:2]) self.assertEqual(set(query.values_list('id', flat=True)), {3, 4}) query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[:2]) self.assertEqual(set(query.values_list('id', flat=True)), {3, 4}) query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[1:2]) self.assertEqual(set(query.values_list('id', flat=True)), {3}) query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[2:]) self.assertEqual(set(query.values_list('id', flat=True)), {1, 2}) def test_slice_subquery_and_query(self): """ Slice a query that has a sliced subquery """ query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[0:2])[0:2] self.assertEqual({x.id for x in query}, {3, 4}) query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[1:3])[1:3] self.assertEqual({x.id for x in query}, {3}) query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[2:])[1:] self.assertEqual({x.id for x in query}, {2}) def test_related_sliced_subquery(self): """ Related objects constraints can safely contain sliced subqueries. refs #22434 """ generic = NamedCategory.objects.create(id=5, name="Generic") t1 = Tag.objects.create(name='t1', category=generic) t2 = Tag.objects.create(name='t2', category=generic) ManagedModel.objects.create(data='mm1', tag=t1, public=True) mm2 = ManagedModel.objects.create(data='mm2', tag=t2, public=True) query = ManagedModel.normal_manager.filter( tag__in=Tag.objects.order_by('-id')[:1] ) self.assertEqual({x.id for x in query}, {mm2.id}) def test_sliced_delete(self): "Delete queries can safely contain sliced subqueries" DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[0:1]).delete() self.assertEqual(set(DumbCategory.objects.values_list('id', flat=True)), {1, 2, 3}) DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[1:2]).delete() self.assertEqual(set(DumbCategory.objects.values_list('id', flat=True)), {1, 3}) DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[1:]).delete() self.assertEqual(set(DumbCategory.objects.values_list('id', flat=True)), {3}) def test_distinct_ordered_sliced_subquery(self): # Implicit values('id'). self.assertSequenceEqual( NamedCategory.objects.filter( id__in=NamedCategory.objects.distinct().order_by('name')[0:2], ).order_by('name').values_list('name', flat=True), ['first', 'fourth'] ) # Explicit values('id'). self.assertSequenceEqual( NamedCategory.objects.filter( id__in=NamedCategory.objects.distinct().order_by('-name').values('id')[0:2], ).order_by('name').values_list('name', flat=True), ['second', 'third'] ) # Annotated value. self.assertSequenceEqual( DumbCategory.objects.filter( id__in=DumbCategory.objects.annotate( double_id=F('id') * 2 ).order_by('id').distinct().values('double_id')[0:2], ).order_by('id').values_list('id', flat=True), [2, 4] ) class QuerySetBitwiseOperationTests(TestCase): @classmethod def setUpTestData(cls): cls.school = School.objects.create() cls.room_1 = Classroom.objects.create(school=cls.school, has_blackboard=False, name='Room 1') cls.room_2 = Classroom.objects.create(school=cls.school, has_blackboard=True, name='Room 2') cls.room_3 = Classroom.objects.create(school=cls.school, has_blackboard=True, name='Room 3') cls.room_4 = Classroom.objects.create(school=cls.school, has_blackboard=False, name='Room 4') @skipUnlessDBFeature('allow_sliced_subqueries_with_in') def test_or_with_rhs_slice(self): qs1 = Classroom.objects.filter(has_blackboard=True) qs2 = Classroom.objects.filter(has_blackboard=False)[:1] self.assertCountEqual(qs1 | qs2, [self.room_1, self.room_2, self.room_3]) @skipUnlessDBFeature('allow_sliced_subqueries_with_in') def test_or_with_lhs_slice(self): qs1 = Classroom.objects.filter(has_blackboard=True)[:1] qs2 = Classroom.objects.filter(has_blackboard=False) self.assertCountEqual(qs1 | qs2, [self.room_1, self.room_2, self.room_4]) @skipUnlessDBFeature('allow_sliced_subqueries_with_in') def test_or_with_both_slice(self): qs1 = Classroom.objects.filter(has_blackboard=False)[:1] qs2 = Classroom.objects.filter(has_blackboard=True)[:1] self.assertCountEqual(qs1 | qs2, [self.room_1, self.room_2]) @skipUnlessDBFeature('allow_sliced_subqueries_with_in') def test_or_with_both_slice_and_ordering(self): qs1 = Classroom.objects.filter(has_blackboard=False).order_by('-pk')[:1] qs2 = Classroom.objects.filter(has_blackboard=True).order_by('-name')[:1] self.assertCountEqual(qs1 | qs2, [self.room_3, self.room_4]) def test_subquery_aliases(self): combined = School.objects.filter(pk__isnull=False) & School.objects.filter( Exists(Classroom.objects.filter( has_blackboard=True, school=OuterRef('pk'), )), ) self.assertSequenceEqual(combined, [self.school]) nested_combined = School.objects.filter(pk__in=combined.values('pk')) self.assertSequenceEqual(nested_combined, [self.school]) class CloneTests(TestCase): def test_evaluated_queryset_as_argument(self): "#13227 -- If a queryset is already evaluated, it can still be used as a query arg" n = Note(note='Test1', misc='misc') n.save() e = ExtraInfo(info='good', note=n) e.save() n_list = Note.objects.all() # Evaluate the Note queryset, populating the query cache list(n_list) # Make one of cached results unpickable. n_list._result_cache[0].lock = Lock() with self.assertRaises(TypeError): pickle.dumps(n_list) # Use the note queryset in a query, and evaluate # that query in a way that involves cloning. self.assertEqual(ExtraInfo.objects.filter(note__in=n_list)[0].info, 'good') def test_no_model_options_cloning(self): """ Cloning a queryset does not get out of hand. While complete testing is impossible, this is a sanity check against invalid use of deepcopy. refs #16759. """ opts_class = type(Note._meta) note_deepcopy = getattr(opts_class, "__deepcopy__", None) opts_class.__deepcopy__ = lambda obj, memo: self.fail("Model options shouldn't be cloned.") try: Note.objects.filter(pk__lte=F('pk') + 1).all() finally: if note_deepcopy is None: delattr(opts_class, "__deepcopy__") else: opts_class.__deepcopy__ = note_deepcopy def test_no_fields_cloning(self): """ Cloning a queryset does not get out of hand. While complete testing is impossible, this is a sanity check against invalid use of deepcopy. refs #16759. """ opts_class = type(Note._meta.get_field("misc")) note_deepcopy = getattr(opts_class, "__deepcopy__", None) opts_class.__deepcopy__ = lambda obj, memo: self.fail("Model fields shouldn't be cloned") try: Note.objects.filter(note=F('misc')).all() finally: if note_deepcopy is None: delattr(opts_class, "__deepcopy__") else: opts_class.__deepcopy__ = note_deepcopy class EmptyQuerySetTests(SimpleTestCase): def test_emptyqueryset_values(self): # #14366 -- Calling .values() on an empty QuerySet and then cloning # that should not cause an error self.assertCountEqual(Number.objects.none().values('num').order_by('num'), []) def test_values_subquery(self): self.assertCountEqual(Number.objects.filter(pk__in=Number.objects.none().values('pk')), []) self.assertCountEqual(Number.objects.filter(pk__in=Number.objects.none().values_list('pk')), []) def test_ticket_19151(self): # #19151 -- Calling .values() or .values_list() on an empty QuerySet # should return an empty QuerySet and not cause an error. q = Author.objects.none() self.assertCountEqual(q.values(), []) self.assertCountEqual(q.values_list(), []) class ValuesQuerysetTests(TestCase): @classmethod def setUpTestData(cls): Number.objects.create(num=72) def test_flat_values_list(self): qs = Number.objects.values_list("num") qs = qs.values_list("num", flat=True) self.assertSequenceEqual(qs, [72]) def test_extra_values(self): # testing for ticket 14930 issues qs = Number.objects.extra(select={'value_plus_x': 'num+%s', 'value_minus_x': 'num-%s'}, select_params=(1, 2)) qs = qs.order_by('value_minus_x') qs = qs.values('num') self.assertSequenceEqual(qs, [{'num': 72}]) def test_extra_values_order_twice(self): # testing for ticket 14930 issues qs = Number.objects.extra(select={'value_plus_one': 'num+1', 'value_minus_one': 'num-1'}) qs = qs.order_by('value_minus_one').order_by('value_plus_one') qs = qs.values('num') self.assertSequenceEqual(qs, [{'num': 72}]) def test_extra_values_order_multiple(self): # Postgres doesn't allow constants in order by, so check for that. qs = Number.objects.extra(select={ 'value_plus_one': 'num+1', 'value_minus_one': 'num-1', 'constant_value': '1' }) qs = qs.order_by('value_plus_one', 'value_minus_one', 'constant_value') qs = qs.values('num') self.assertSequenceEqual(qs, [{'num': 72}]) def test_extra_values_order_in_extra(self): # testing for ticket 14930 issues qs = Number.objects.extra( select={'value_plus_one': 'num+1', 'value_minus_one': 'num-1'}, order_by=['value_minus_one'], ) qs = qs.values('num') def test_extra_select_params_values_order_in_extra(self): # testing for 23259 issue qs = Number.objects.extra( select={'value_plus_x': 'num+%s'}, select_params=[1], order_by=['value_plus_x'], ) qs = qs.filter(num=72) qs = qs.values('num') self.assertSequenceEqual(qs, [{'num': 72}]) def test_extra_multiple_select_params_values_order_by(self): # testing for 23259 issue qs = Number.objects.extra(select={'value_plus_x': 'num+%s', 'value_minus_x': 'num-%s'}, select_params=(72, 72)) qs = qs.order_by('value_minus_x') qs = qs.filter(num=1) qs = qs.values('num') self.assertSequenceEqual(qs, []) def test_extra_values_list(self): # testing for ticket 14930 issues qs = Number.objects.extra(select={'value_plus_one': 'num+1'}) qs = qs.order_by('value_plus_one') qs = qs.values_list('num') self.assertSequenceEqual(qs, [(72,)]) def test_flat_extra_values_list(self): # testing for ticket 14930 issues qs = Number.objects.extra(select={'value_plus_one': 'num+1'}) qs = qs.order_by('value_plus_one') qs = qs.values_list('num', flat=True) self.assertSequenceEqual(qs, [72]) def test_field_error_values_list(self): # see #23443 msg = "Cannot resolve keyword %r into field. Join on 'name' not permitted." % 'foo' with self.assertRaisesMessage(FieldError, msg): Tag.objects.values_list('name__foo') def test_named_values_list_flat(self): msg = "'flat' and 'named' can't be used together." with self.assertRaisesMessage(TypeError, msg): Number.objects.values_list('num', flat=True, named=True) def test_named_values_list_bad_field_name(self): msg = "Type names and field names must be valid identifiers: '1'" with self.assertRaisesMessage(ValueError, msg): Number.objects.extra(select={'1': 'num+1'}).values_list('1', named=True).first() def test_named_values_list_with_fields(self): qs = Number.objects.extra(select={'num2': 'num+1'}).annotate(Count('id')) values = qs.values_list('num', 'num2', named=True).first() self.assertEqual(type(values).__name__, 'Row') self.assertEqual(values._fields, ('num', 'num2')) self.assertEqual(values.num, 72) self.assertEqual(values.num2, 73) def test_named_values_list_without_fields(self): qs = Number.objects.extra(select={'num2': 'num+1'}).annotate(Count('id')) values = qs.values_list(named=True).first() self.assertEqual(type(values).__name__, 'Row') self.assertEqual( values._fields, ('num2', 'id', 'num', 'other_num', 'another_num', 'id__count'), ) self.assertEqual(values.num, 72) self.assertEqual(values.num2, 73) self.assertEqual(values.id__count, 1) def test_named_values_list_expression_with_default_alias(self): expr = Count('id') values = Number.objects.annotate(id__count1=expr).values_list(expr, 'id__count1', named=True).first() self.assertEqual(values._fields, ('id__count2', 'id__count1')) def test_named_values_list_expression(self): expr = F('num') + 1 qs = Number.objects.annotate(combinedexpression1=expr).values_list(expr, 'combinedexpression1', named=True) values = qs.first() self.assertEqual(values._fields, ('combinedexpression2', 'combinedexpression1')) def test_named_values_pickle(self): value = Number.objects.values_list('num', 'other_num', named=True).get() self.assertEqual(value, (72, None)) self.assertEqual(pickle.loads(pickle.dumps(value)), value) class QuerySetSupportsPythonIdioms(TestCase): @classmethod def setUpTestData(cls): some_date = datetime.datetime(2014, 5, 16, 12, 1) cls.articles = [ Article.objects.create(name=f'Article {i}', created=some_date) for i in range(1, 8) ] def get_ordered_articles(self): return Article.objects.all().order_by('name') def test_can_get_items_using_index_and_slice_notation(self): self.assertEqual(self.get_ordered_articles()[0].name, 'Article 1') self.assertSequenceEqual( self.get_ordered_articles()[1:3], [self.articles[1], self.articles[2]], ) def test_slicing_with_steps_can_be_used(self): self.assertSequenceEqual( self.get_ordered_articles()[::2], [ self.articles[0], self.articles[2], self.articles[4], self.articles[6], ] ) def test_slicing_without_step_is_lazy(self): with self.assertNumQueries(0): self.get_ordered_articles()[0:5] def test_slicing_with_tests_is_not_lazy(self): with self.assertNumQueries(1): self.get_ordered_articles()[0:5:3] def test_slicing_can_slice_again_after_slicing(self): self.assertSequenceEqual( self.get_ordered_articles()[0:5][0:2], [self.articles[0], self.articles[1]], ) self.assertSequenceEqual(self.get_ordered_articles()[0:5][4:], [self.articles[4]]) self.assertSequenceEqual(self.get_ordered_articles()[0:5][5:], []) # Some more tests! self.assertSequenceEqual( self.get_ordered_articles()[2:][0:2], [self.articles[2], self.articles[3]], ) self.assertSequenceEqual( self.get_ordered_articles()[2:][:2], [self.articles[2], self.articles[3]], ) self.assertSequenceEqual(self.get_ordered_articles()[2:][2:3], [self.articles[4]]) # Using an offset without a limit is also possible. self.assertSequenceEqual( self.get_ordered_articles()[5:], [self.articles[5], self.articles[6]], ) def test_slicing_cannot_filter_queryset_once_sliced(self): msg = 'Cannot filter a query once a slice has been taken.' with self.assertRaisesMessage(TypeError, msg): Article.objects.all()[0:5].filter(id=1) def test_slicing_cannot_reorder_queryset_once_sliced(self): msg = 'Cannot reorder a query once a slice has been taken.' with self.assertRaisesMessage(TypeError, msg): Article.objects.all()[0:5].order_by('id') def test_slicing_cannot_combine_queries_once_sliced(self): msg = 'Cannot combine queries once a slice has been taken.' with self.assertRaisesMessage(TypeError, msg): Article.objects.all()[0:1] & Article.objects.all()[4:5] def test_slicing_negative_indexing_not_supported_for_single_element(self): """hint: inverting your ordering might do what you need""" with self.assertRaisesMessage(AssertionError, "Negative indexing is not supported."): Article.objects.all()[-1] def test_slicing_negative_indexing_not_supported_for_range(self): """hint: inverting your ordering might do what you need""" with self.assertRaisesMessage(AssertionError, "Negative indexing is not supported."): Article.objects.all()[0:-5] def test_invalid_index(self): msg = 'QuerySet indices must be integers or slices, not str.' with self.assertRaisesMessage(TypeError, msg): Article.objects.all()['foo'] def test_can_get_number_of_items_in_queryset_using_standard_len(self): self.assertEqual(len(Article.objects.filter(name__exact='Article 1')), 1) def test_can_combine_queries_using_and_and_or_operators(self): s1 = Article.objects.filter(name__exact='Article 1') s2 = Article.objects.filter(name__exact='Article 2') self.assertSequenceEqual( (s1 | s2).order_by('name'), [self.articles[0], self.articles[1]], ) self.assertSequenceEqual(s1 & s2, []) class WeirdQuerysetSlicingTests(TestCase): @classmethod def setUpTestData(cls): Number.objects.create(num=1) Number.objects.create(num=2) Article.objects.create(name='one', created=datetime.datetime.now()) Article.objects.create(name='two', created=datetime.datetime.now()) Article.objects.create(name='three', created=datetime.datetime.now()) Article.objects.create(name='four', created=datetime.datetime.now()) food = Food.objects.create(name='spam') Eaten.objects.create(meal='spam with eggs', food=food) def test_tickets_7698_10202(self): # People like to slice with '0' as the high-water mark. self.assertQuerysetEqual(Article.objects.all()[0:0], []) self.assertQuerysetEqual(Article.objects.all()[0:0][:10], []) self.assertEqual(Article.objects.all()[:0].count(), 0) msg = 'Cannot change a query once a slice has been taken.' with self.assertRaisesMessage(TypeError, msg): Article.objects.all()[:0].latest('created') def test_empty_resultset_sql(self): # ticket #12192 self.assertNumQueries(0, lambda: list(Number.objects.all()[1:1])) def test_empty_sliced_subquery(self): self.assertEqual(Eaten.objects.filter(food__in=Food.objects.all()[0:0]).count(), 0) def test_empty_sliced_subquery_exclude(self): self.assertEqual(Eaten.objects.exclude(food__in=Food.objects.all()[0:0]).count(), 1) def test_zero_length_values_slicing(self): n = 42 with self.assertNumQueries(0): self.assertQuerysetEqual(Article.objects.values()[n:n], []) self.assertQuerysetEqual(Article.objects.values_list()[n:n], []) class EscapingTests(TestCase): def test_ticket_7302(self): # Reserved names are appropriately escaped r_a = ReservedName.objects.create(name='a', order=42) r_b = ReservedName.objects.create(name='b', order=37) self.assertSequenceEqual( ReservedName.objects.all().order_by('order'), [r_b, r_a], ) self.assertSequenceEqual( ReservedName.objects.extra(select={'stuff': 'name'}, order_by=('order', 'stuff')), [r_b, r_a], ) class ToFieldTests(TestCase): def test_in_query(self): apple = Food.objects.create(name="apple") pear = Food.objects.create(name="pear") lunch = Eaten.objects.create(food=apple, meal="lunch") dinner = Eaten.objects.create(food=pear, meal="dinner") self.assertEqual( set(Eaten.objects.filter(food__in=[apple, pear])), {lunch, dinner}, ) def test_in_subquery(self): apple = Food.objects.create(name="apple") lunch = Eaten.objects.create(food=apple, meal="lunch") self.assertEqual( set(Eaten.objects.filter(food__in=Food.objects.filter(name='apple'))), {lunch} ) self.assertEqual( set(Eaten.objects.filter(food__in=Food.objects.filter(name='apple').values('eaten__meal'))), set() ) self.assertEqual( set(Food.objects.filter(eaten__in=Eaten.objects.filter(meal='lunch'))), {apple} ) def test_nested_in_subquery(self): extra = ExtraInfo.objects.create() author = Author.objects.create(num=42, extra=extra) report = Report.objects.create(creator=author) comment = ReportComment.objects.create(report=report) comments = ReportComment.objects.filter( report__in=Report.objects.filter( creator__in=extra.author_set.all(), ), ) self.assertSequenceEqual(comments, [comment]) def test_reverse_in(self): apple = Food.objects.create(name="apple") pear = Food.objects.create(name="pear") lunch_apple = Eaten.objects.create(food=apple, meal="lunch") lunch_pear = Eaten.objects.create(food=pear, meal="dinner") self.assertEqual( set(Food.objects.filter(eaten__in=[lunch_apple, lunch_pear])), {apple, pear} ) def test_single_object(self): apple = Food.objects.create(name="apple") lunch = Eaten.objects.create(food=apple, meal="lunch") dinner = Eaten.objects.create(food=apple, meal="dinner") self.assertEqual( set(Eaten.objects.filter(food=apple)), {lunch, dinner} ) def test_single_object_reverse(self): apple = Food.objects.create(name="apple") lunch = Eaten.objects.create(food=apple, meal="lunch") self.assertEqual( set(Food.objects.filter(eaten=lunch)), {apple} ) def test_recursive_fk(self): node1 = Node.objects.create(num=42) node2 = Node.objects.create(num=1, parent=node1) self.assertEqual( list(Node.objects.filter(parent=node1)), [node2] ) def test_recursive_fk_reverse(self): node1 = Node.objects.create(num=42) node2 = Node.objects.create(num=1, parent=node1) self.assertEqual( list(Node.objects.filter(node=node2)), [node1] ) class IsNullTests(TestCase): def test_primary_key(self): custom = CustomPk.objects.create(name='pk') null = Related.objects.create() notnull = Related.objects.create(custom=custom) self.assertSequenceEqual(Related.objects.filter(custom__isnull=False), [notnull]) self.assertSequenceEqual(Related.objects.filter(custom__isnull=True), [null]) def test_to_field(self): apple = Food.objects.create(name="apple") e1 = Eaten.objects.create(food=apple, meal="lunch") e2 = Eaten.objects.create(meal="lunch") self.assertSequenceEqual( Eaten.objects.filter(food__isnull=False), [e1], ) self.assertSequenceEqual( Eaten.objects.filter(food__isnull=True), [e2], ) class ConditionalTests(TestCase): """Tests whose execution depend on different environment conditions like Python version or DB backend features""" @classmethod def setUpTestData(cls): generic = NamedCategory.objects.create(name="Generic") t1 = Tag.objects.create(name='t1', category=generic) Tag.objects.create(name='t2', parent=t1, category=generic) t3 = Tag.objects.create(name='t3', parent=t1) Tag.objects.create(name='t4', parent=t3) Tag.objects.create(name='t5', parent=t3) def test_infinite_loop(self): # If you're not careful, it's possible to introduce infinite loops via # default ordering on foreign keys in a cycle. We detect that. with self.assertRaisesMessage(FieldError, 'Infinite loop caused by ordering.'): list(LoopX.objects.all()) # Force queryset evaluation with list() with self.assertRaisesMessage(FieldError, 'Infinite loop caused by ordering.'): list(LoopZ.objects.all()) # Force queryset evaluation with list() # Note that this doesn't cause an infinite loop, since the default # ordering on the Tag model is empty (and thus defaults to using "id" # for the related field). self.assertEqual(len(Tag.objects.order_by('parent')), 5) # ... but you can still order in a non-recursive fashion among linked # fields (the previous test failed because the default ordering was # recursive). self.assertQuerysetEqual( LoopX.objects.all().order_by('y__x__y__x__id'), [] ) # When grouping without specifying ordering, we add an explicit "ORDER BY NULL" # portion in MySQL to prevent unnecessary sorting. @skipUnlessDBFeature('requires_explicit_null_ordering_when_grouping') def test_null_ordering_added(self): query = Tag.objects.values_list('parent_id', flat=True).order_by().query query.group_by = ['parent_id'] sql = query.get_compiler(DEFAULT_DB_ALIAS).as_sql()[0] fragment = "ORDER BY " pos = sql.find(fragment) self.assertEqual(sql.find(fragment, pos + 1), -1) self.assertEqual(sql.find("NULL", pos + len(fragment)), pos + len(fragment)) def test_in_list_limit(self): # The "in" lookup works with lists of 1000 items or more. # The numbers amount is picked to force three different IN batches # for Oracle, yet to be less than 2100 parameter limit for MSSQL. numbers = list(range(2050)) max_query_params = connection.features.max_query_params if max_query_params is None or max_query_params >= len(numbers): Number.objects.bulk_create(Number(num=num) for num in numbers) for number in [1000, 1001, 2000, len(numbers)]: with self.subTest(number=number): self.assertEqual(Number.objects.filter(num__in=numbers[:number]).count(), number) class UnionTests(unittest.TestCase): """ Tests for the union of two querysets. Bug #12252. """ @classmethod def setUpTestData(cls): objectas = [] objectbs = [] objectcs = [] a_info = ['one', 'two', 'three'] for name in a_info: o = ObjectA(name=name) o.save() objectas.append(o) b_info = [('un', 1, objectas[0]), ('deux', 2, objectas[0]), ('trois', 3, objectas[2])] for name, number, objecta in b_info: o = ObjectB(name=name, num=number, objecta=objecta) o.save() objectbs.append(o) c_info = [('ein', objectas[2], objectbs[2]), ('zwei', objectas[1], objectbs[1])] for name, objecta, objectb in c_info: o = ObjectC(name=name, objecta=objecta, objectb=objectb) o.save() objectcs.append(o) def check_union(self, model, Q1, Q2): filter = model.objects.filter self.assertEqual(set(filter(Q1) | filter(Q2)), set(filter(Q1 | Q2))) self.assertEqual(set(filter(Q2) | filter(Q1)), set(filter(Q1 | Q2))) def test_A_AB(self): Q1 = Q(name='two') Q2 = Q(objectb__name='deux') self.check_union(ObjectA, Q1, Q2) def test_A_AB2(self): Q1 = Q(name='two') Q2 = Q(objectb__name='deux', objectb__num=2) self.check_union(ObjectA, Q1, Q2) def test_AB_ACB(self): Q1 = Q(objectb__name='deux') Q2 = Q(objectc__objectb__name='deux') self.check_union(ObjectA, Q1, Q2) def test_BAB_BAC(self): Q1 = Q(objecta__objectb__name='deux') Q2 = Q(objecta__objectc__name='ein') self.check_union(ObjectB, Q1, Q2) def test_BAB_BACB(self): Q1 = Q(objecta__objectb__name='deux') Q2 = Q(objecta__objectc__objectb__name='trois') self.check_union(ObjectB, Q1, Q2) def test_BA_BCA__BAB_BAC_BCA(self): Q1 = Q(objecta__name='one', objectc__objecta__name='two') Q2 = Q(objecta__objectc__name='ein', objectc__objecta__name='three', objecta__objectb__name='trois') self.check_union(ObjectB, Q1, Q2) class DefaultValuesInsertTest(TestCase): def test_no_extra_params(self): """ Can create an instance of a model with only the PK field (#17056)." """ DumbCategory.objects.create() class ExcludeTests(TestCase): @classmethod def setUpTestData(cls): f1 = Food.objects.create(name='apples') cls.f2 = Food.objects.create(name='oranges') Eaten.objects.create(food=f1, meal='dinner') cls.j1 = Job.objects.create(name='Manager') cls.r1 = Responsibility.objects.create(description='Playing golf') cls.j2 = Job.objects.create(name='Programmer') cls.r2 = Responsibility.objects.create(description='Programming') JobResponsibilities.objects.create(job=cls.j1, responsibility=cls.r1) JobResponsibilities.objects.create(job=cls.j2, responsibility=cls.r2) def test_to_field(self): self.assertSequenceEqual( Food.objects.exclude(eaten__meal='dinner'), [self.f2], ) self.assertSequenceEqual( Job.objects.exclude(responsibilities__description='Playing golf'), [self.j2], ) self.assertSequenceEqual( Responsibility.objects.exclude(jobs__name='Manager'), [self.r2], ) def test_ticket14511(self): alex = Person.objects.get_or_create(name='Alex')[0] jane = Person.objects.get_or_create(name='Jane')[0] oracle = Company.objects.get_or_create(name='Oracle')[0] google = Company.objects.get_or_create(name='Google')[0] microsoft = Company.objects.get_or_create(name='Microsoft')[0] intel = Company.objects.get_or_create(name='Intel')[0] def employ(employer, employee, title): Employment.objects.get_or_create(employee=employee, employer=employer, title=title) employ(oracle, alex, 'Engineer') employ(oracle, alex, 'Developer') employ(google, alex, 'Engineer') employ(google, alex, 'Manager') employ(microsoft, alex, 'Manager') employ(intel, alex, 'Manager') employ(microsoft, jane, 'Developer') employ(intel, jane, 'Manager') alex_tech_employers = alex.employers.filter( employment__title__in=('Engineer', 'Developer')).distinct().order_by('name') self.assertSequenceEqual(alex_tech_employers, [google, oracle]) alex_nontech_employers = alex.employers.exclude( employment__title__in=('Engineer', 'Developer')).distinct().order_by('name') self.assertSequenceEqual(alex_nontech_employers, [google, intel, microsoft]) def test_exclude_reverse_fk_field_ref(self): tag = Tag.objects.create() Note.objects.create(tag=tag, note='note') annotation = Annotation.objects.create(name='annotation', tag=tag) self.assertEqual(Annotation.objects.exclude(tag__note__note=F('name')).get(), annotation) def test_exclude_with_circular_fk_relation(self): self.assertEqual(ObjectB.objects.exclude(objecta__objectb__name=F('name')).count(), 0) def test_subquery_exclude_outerref(self): qs = JobResponsibilities.objects.filter( Exists(Responsibility.objects.exclude(jobs=OuterRef('job'))), ) self.assertTrue(qs.exists()) self.r1.delete() self.assertFalse(qs.exists()) def test_exclude_nullable_fields(self): number = Number.objects.create(num=1, other_num=1) Number.objects.create(num=2, other_num=2, another_num=2) self.assertSequenceEqual( Number.objects.exclude(other_num=F('another_num')), [number], ) self.assertSequenceEqual( Number.objects.exclude(num=F('another_num')), [number], ) def test_exclude_multivalued_exists(self): with CaptureQueriesContext(connection) as captured_queries: self.assertSequenceEqual( Job.objects.exclude(responsibilities__description='Programming'), [self.j1], ) self.assertIn('exists', captured_queries[0]['sql'].lower()) def test_exclude_subquery(self): subquery = JobResponsibilities.objects.filter( responsibility__description='bar', ) | JobResponsibilities.objects.exclude( job__responsibilities__description='foo', ) self.assertCountEqual( Job.objects.annotate( responsibility=subquery.filter( job=OuterRef('name'), ).values('id')[:1] ), [self.j1, self.j2], ) class ExcludeTest17600(TestCase): """ Some regressiontests for ticket #17600. Some of these likely duplicate other existing tests. """ @classmethod def setUpTestData(cls): # Create a few Orders. cls.o1 = Order.objects.create(pk=1) cls.o2 = Order.objects.create(pk=2) cls.o3 = Order.objects.create(pk=3) # Create some OrderItems for the first order with homogeneous # status_id values cls.oi1 = OrderItem.objects.create(order=cls.o1, status=1) cls.oi2 = OrderItem.objects.create(order=cls.o1, status=1) cls.oi3 = OrderItem.objects.create(order=cls.o1, status=1) # Create some OrderItems for the second order with heterogeneous # status_id values cls.oi4 = OrderItem.objects.create(order=cls.o2, status=1) cls.oi5 = OrderItem.objects.create(order=cls.o2, status=2) cls.oi6 = OrderItem.objects.create(order=cls.o2, status=3) # Create some OrderItems for the second order with heterogeneous # status_id values cls.oi7 = OrderItem.objects.create(order=cls.o3, status=2) cls.oi8 = OrderItem.objects.create(order=cls.o3, status=3) cls.oi9 = OrderItem.objects.create(order=cls.o3, status=4) def test_exclude_plain(self): """ This should exclude Orders which have some items with status 1 """ self.assertSequenceEqual( Order.objects.exclude(items__status=1), [self.o3], ) def test_exclude_plain_distinct(self): """ This should exclude Orders which have some items with status 1 """ self.assertSequenceEqual( Order.objects.exclude(items__status=1).distinct(), [self.o3], ) def test_exclude_with_q_object_distinct(self): """ This should exclude Orders which have some items with status 1 """ self.assertSequenceEqual( Order.objects.exclude(Q(items__status=1)).distinct(), [self.o3], ) def test_exclude_with_q_object_no_distinct(self): """ This should exclude Orders which have some items with status 1 """ self.assertSequenceEqual( Order.objects.exclude(Q(items__status=1)), [self.o3], ) def test_exclude_with_q_is_equal_to_plain_exclude(self): """ Using exclude(condition) and exclude(Q(condition)) should yield the same QuerySet """ self.assertEqual( list(Order.objects.exclude(items__status=1).distinct()), list(Order.objects.exclude(Q(items__status=1)).distinct())) def test_exclude_with_q_is_equal_to_plain_exclude_variation(self): """ Using exclude(condition) and exclude(Q(condition)) should yield the same QuerySet """ self.assertEqual( list(Order.objects.exclude(items__status=1)), list(Order.objects.exclude(Q(items__status=1)).distinct())) @unittest.expectedFailure def test_only_orders_with_all_items_having_status_1(self): """ This should only return orders having ALL items set to status 1, or those items not having any orders at all. The correct way to write this query in SQL seems to be using two nested subqueries. """ self.assertQuerysetEqual( Order.objects.exclude(~Q(items__status=1)).distinct(), [self.o1], ) class Exclude15786(TestCase): """Regression test for #15786""" def test_ticket15786(self): c1 = SimpleCategory.objects.create(name='c1') c2 = SimpleCategory.objects.create(name='c2') OneToOneCategory.objects.create(category=c1) OneToOneCategory.objects.create(category=c2) rel = CategoryRelationship.objects.create(first=c1, second=c2) self.assertEqual( CategoryRelationship.objects.exclude( first__onetoonecategory=F('second__onetoonecategory') ).get(), rel ) class NullInExcludeTest(TestCase): @classmethod def setUpTestData(cls): NullableName.objects.create(name='i1') NullableName.objects.create() def test_null_in_exclude_qs(self): none_val = '' if connection.features.interprets_empty_strings_as_nulls else None self.assertQuerysetEqual( NullableName.objects.exclude(name__in=[]), ['i1', none_val], attrgetter('name')) self.assertQuerysetEqual( NullableName.objects.exclude(name__in=['i1']), [none_val], attrgetter('name')) self.assertQuerysetEqual( NullableName.objects.exclude(name__in=['i3']), ['i1', none_val], attrgetter('name')) inner_qs = NullableName.objects.filter(name='i1').values_list('name') self.assertQuerysetEqual( NullableName.objects.exclude(name__in=inner_qs), [none_val], attrgetter('name')) # The inner queryset wasn't executed - it should be turned # into subquery above self.assertIs(inner_qs._result_cache, None) @unittest.expectedFailure def test_col_not_in_list_containing_null(self): """ The following case is not handled properly because SQL's COL NOT IN (list containing null) handling is too weird to abstract away. """ self.assertQuerysetEqual( NullableName.objects.exclude(name__in=[None]), ['i1'], attrgetter('name')) def test_double_exclude(self): self.assertEqual( list(NullableName.objects.filter(~~Q(name='i1'))), list(NullableName.objects.filter(Q(name='i1')))) self.assertNotIn( 'IS NOT NULL', str(NullableName.objects.filter(~~Q(name='i1')).query)) class EmptyStringsAsNullTest(TestCase): """ Filtering on non-null character fields works as expected. The reason for these tests is that Oracle treats '' as NULL, and this can cause problems in query construction. Refs #17957. """ @classmethod def setUpTestData(cls): cls.nc = NamedCategory.objects.create(name='') def test_direct_exclude(self): self.assertQuerysetEqual( NamedCategory.objects.exclude(name__in=['nonexistent']), [self.nc.pk], attrgetter('pk') ) def test_joined_exclude(self): self.assertQuerysetEqual( DumbCategory.objects.exclude(namedcategory__name__in=['nonexistent']), [self.nc.pk], attrgetter('pk') ) def test_21001(self): foo = NamedCategory.objects.create(name='foo') self.assertQuerysetEqual( NamedCategory.objects.exclude(name=''), [foo.pk], attrgetter('pk') ) class ProxyQueryCleanupTest(TestCase): def test_evaluated_proxy_count(self): """ Generating the query string doesn't alter the query's state in irreversible ways. Refs #18248. """ ProxyCategory.objects.create() qs = ProxyCategory.objects.all() self.assertEqual(qs.count(), 1) str(qs.query) self.assertEqual(qs.count(), 1) class WhereNodeTest(SimpleTestCase): class DummyNode: def as_sql(self, compiler, connection): return 'dummy', [] class MockCompiler: def compile(self, node): return node.as_sql(self, connection) def __call__(self, name): return connection.ops.quote_name(name) def test_empty_full_handling_conjunction(self): compiler = WhereNodeTest.MockCompiler() w = WhereNode(children=[NothingNode()]) with self.assertRaises(EmptyResultSet): w.as_sql(compiler, connection) w.negate() self.assertEqual(w.as_sql(compiler, connection), ('', [])) w = WhereNode(children=[self.DummyNode(), self.DummyNode()]) self.assertEqual(w.as_sql(compiler, connection), ('(dummy AND dummy)', [])) w.negate() self.assertEqual(w.as_sql(compiler, connection), ('NOT (dummy AND dummy)', [])) w = WhereNode(children=[NothingNode(), self.DummyNode()]) with self.assertRaises(EmptyResultSet): w.as_sql(compiler, connection) w.negate() self.assertEqual(w.as_sql(compiler, connection), ('', [])) def test_empty_full_handling_disjunction(self): compiler = WhereNodeTest.MockCompiler() w = WhereNode(children=[NothingNode()], connector='OR') with self.assertRaises(EmptyResultSet): w.as_sql(compiler, connection) w.negate() self.assertEqual(w.as_sql(compiler, connection), ('', [])) w = WhereNode(children=[self.DummyNode(), self.DummyNode()], connector='OR') self.assertEqual(w.as_sql(compiler, connection), ('(dummy OR dummy)', [])) w.negate() self.assertEqual(w.as_sql(compiler, connection), ('NOT (dummy OR dummy)', [])) w = WhereNode(children=[NothingNode(), self.DummyNode()], connector='OR') self.assertEqual(w.as_sql(compiler, connection), ('dummy', [])) w.negate() self.assertEqual(w.as_sql(compiler, connection), ('NOT (dummy)', [])) def test_empty_nodes(self): compiler = WhereNodeTest.MockCompiler() empty_w = WhereNode() w = WhereNode(children=[empty_w, empty_w]) self.assertEqual(w.as_sql(compiler, connection), ('', [])) w.negate() with self.assertRaises(EmptyResultSet): w.as_sql(compiler, connection) w.connector = 'OR' with self.assertRaises(EmptyResultSet): w.as_sql(compiler, connection) w.negate() self.assertEqual(w.as_sql(compiler, connection), ('', [])) w = WhereNode(children=[empty_w, NothingNode()], connector='OR') self.assertEqual(w.as_sql(compiler, connection), ('', [])) w = WhereNode(children=[empty_w, NothingNode()], connector='AND') with self.assertRaises(EmptyResultSet): w.as_sql(compiler, connection) class QuerySetExceptionTests(SimpleTestCase): def test_iter_exceptions(self): qs = ExtraInfo.objects.only('author') msg = "'ManyToOneRel' object has no attribute 'attname'" with self.assertRaisesMessage(AttributeError, msg): list(qs) def test_invalid_order_by(self): msg = ( "Cannot resolve keyword '*' into field. Choices are: created, id, " "name" ) with self.assertRaisesMessage(FieldError, msg): Article.objects.order_by('*') def test_invalid_order_by_raw_column_alias(self): msg = ( "Cannot resolve keyword 'queries_author.name' into field. Choices " "are: cover, created, creator, creator_id, id, modified, name, " "note, note_id, tags" ) with self.assertRaisesMessage(FieldError, msg): Item.objects.values('creator__name').order_by('queries_author.name') def test_invalid_queryset_model(self): msg = 'Cannot use QuerySet for "Article": Use a QuerySet for "ExtraInfo".' with self.assertRaisesMessage(ValueError, msg): list(Author.objects.filter(extra=Article.objects.all())) class NullJoinPromotionOrTest(TestCase): @classmethod def setUpTestData(cls): cls.d1 = ModelD.objects.create(name='foo') d2 = ModelD.objects.create(name='bar') cls.a1 = ModelA.objects.create(name='a1', d=cls.d1) c = ModelC.objects.create(name='c') b = ModelB.objects.create(name='b', c=c) cls.a2 = ModelA.objects.create(name='a2', b=b, d=d2) def test_ticket_17886(self): # The first Q-object is generating the match, the rest of the filters # should not remove the match even if they do not match anything. The # problem here was that b__name generates a LOUTER JOIN, then # b__c__name generates join to c, which the ORM tried to promote but # failed as that join isn't nullable. q_obj = ( Q(d__name='foo') | Q(b__name='foo') | Q(b__c__name='foo') ) qset = ModelA.objects.filter(q_obj) self.assertEqual(list(qset), [self.a1]) # We generate one INNER JOIN to D. The join is direct and not nullable # so we can use INNER JOIN for it. However, we can NOT use INNER JOIN # for the b->c join, as a->b is nullable. self.assertEqual(str(qset.query).count('INNER JOIN'), 1) def test_isnull_filter_promotion(self): qs = ModelA.objects.filter(Q(b__name__isnull=True)) self.assertEqual(str(qs.query).count('LEFT OUTER'), 1) self.assertEqual(list(qs), [self.a1]) qs = ModelA.objects.filter(~Q(b__name__isnull=True)) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(list(qs), [self.a2]) qs = ModelA.objects.filter(~~Q(b__name__isnull=True)) self.assertEqual(str(qs.query).count('LEFT OUTER'), 1) self.assertEqual(list(qs), [self.a1]) qs = ModelA.objects.filter(Q(b__name__isnull=False)) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(list(qs), [self.a2]) qs = ModelA.objects.filter(~Q(b__name__isnull=False)) self.assertEqual(str(qs.query).count('LEFT OUTER'), 1) self.assertEqual(list(qs), [self.a1]) qs = ModelA.objects.filter(~~Q(b__name__isnull=False)) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(list(qs), [self.a2]) def test_null_join_demotion(self): qs = ModelA.objects.filter(Q(b__name__isnull=False) & Q(b__name__isnull=True)) self.assertIn(' INNER JOIN ', str(qs.query)) qs = ModelA.objects.filter(Q(b__name__isnull=True) & Q(b__name__isnull=False)) self.assertIn(' INNER JOIN ', str(qs.query)) qs = ModelA.objects.filter(Q(b__name__isnull=False) | Q(b__name__isnull=True)) self.assertIn(' LEFT OUTER JOIN ', str(qs.query)) qs = ModelA.objects.filter(Q(b__name__isnull=True) | Q(b__name__isnull=False)) self.assertIn(' LEFT OUTER JOIN ', str(qs.query)) def test_ticket_21366(self): n = Note.objects.create(note='n', misc='m') e = ExtraInfo.objects.create(info='info', note=n) a = Author.objects.create(name='Author1', num=1, extra=e) Ranking.objects.create(rank=1, author=a) r1 = Report.objects.create(name='Foo', creator=a) r2 = Report.objects.create(name='Bar') Report.objects.create(name='Bar', creator=a) qs = Report.objects.filter( Q(creator__ranking__isnull=True) | Q(creator__ranking__rank=1, name='Foo') ) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2) self.assertEqual(str(qs.query).count(' JOIN '), 2) self.assertSequenceEqual(qs.order_by('name'), [r2, r1]) def test_ticket_21748(self): i1 = Identifier.objects.create(name='i1') i2 = Identifier.objects.create(name='i2') i3 = Identifier.objects.create(name='i3') Program.objects.create(identifier=i1) Channel.objects.create(identifier=i1) Program.objects.create(identifier=i2) self.assertSequenceEqual(Identifier.objects.filter(program=None, channel=None), [i3]) self.assertSequenceEqual(Identifier.objects.exclude(program=None, channel=None).order_by('name'), [i1, i2]) def test_ticket_21748_double_negated_and(self): i1 = Identifier.objects.create(name='i1') i2 = Identifier.objects.create(name='i2') Identifier.objects.create(name='i3') p1 = Program.objects.create(identifier=i1) c1 = Channel.objects.create(identifier=i1) Program.objects.create(identifier=i2) # Check the ~~Q() (or equivalently .exclude(~Q)) works like Q() for # join promotion. qs1_doubleneg = Identifier.objects.exclude(~Q(program__id=p1.id, channel__id=c1.id)).order_by('pk') qs1_filter = Identifier.objects.filter(program__id=p1.id, channel__id=c1.id).order_by('pk') self.assertQuerysetEqual(qs1_doubleneg, qs1_filter, lambda x: x) self.assertEqual(str(qs1_filter.query).count('JOIN'), str(qs1_doubleneg.query).count('JOIN')) self.assertEqual(2, str(qs1_doubleneg.query).count('INNER JOIN')) self.assertEqual(str(qs1_filter.query).count('INNER JOIN'), str(qs1_doubleneg.query).count('INNER JOIN')) def test_ticket_21748_double_negated_or(self): i1 = Identifier.objects.create(name='i1') i2 = Identifier.objects.create(name='i2') Identifier.objects.create(name='i3') p1 = Program.objects.create(identifier=i1) c1 = Channel.objects.create(identifier=i1) p2 = Program.objects.create(identifier=i2) # Test OR + doubleneg. The expected result is that channel is LOUTER # joined, program INNER joined qs1_filter = Identifier.objects.filter( Q(program__id=p2.id, channel__id=c1.id) | Q(program__id=p1.id) ).order_by('pk') qs1_doubleneg = Identifier.objects.exclude( ~Q(Q(program__id=p2.id, channel__id=c1.id) | Q(program__id=p1.id)) ).order_by('pk') self.assertQuerysetEqual(qs1_doubleneg, qs1_filter, lambda x: x) self.assertEqual(str(qs1_filter.query).count('JOIN'), str(qs1_doubleneg.query).count('JOIN')) self.assertEqual(1, str(qs1_doubleneg.query).count('INNER JOIN')) self.assertEqual(str(qs1_filter.query).count('INNER JOIN'), str(qs1_doubleneg.query).count('INNER JOIN')) def test_ticket_21748_complex_filter(self): i1 = Identifier.objects.create(name='i1') i2 = Identifier.objects.create(name='i2') Identifier.objects.create(name='i3') p1 = Program.objects.create(identifier=i1) c1 = Channel.objects.create(identifier=i1) p2 = Program.objects.create(identifier=i2) # Finally, a more complex case, one time in a way where each # NOT is pushed to lowest level in the boolean tree, and # another query where this isn't done. qs1 = Identifier.objects.filter( ~Q(~Q(program__id=p2.id, channel__id=c1.id) & Q(program__id=p1.id)) ).order_by('pk') qs2 = Identifier.objects.filter( Q(Q(program__id=p2.id, channel__id=c1.id) | ~Q(program__id=p1.id)) ).order_by('pk') self.assertQuerysetEqual(qs1, qs2, lambda x: x) self.assertEqual(str(qs1.query).count('JOIN'), str(qs2.query).count('JOIN')) self.assertEqual(0, str(qs1.query).count('INNER JOIN')) self.assertEqual(str(qs1.query).count('INNER JOIN'), str(qs2.query).count('INNER JOIN')) class ReverseJoinTrimmingTest(TestCase): def test_reverse_trimming(self): # We don't accidentally trim reverse joins - we can't know if there is # anything on the other side of the join, so trimming reverse joins # can't be done, ever. t = Tag.objects.create() qs = Tag.objects.filter(annotation__tag=t.pk) self.assertIn('INNER JOIN', str(qs.query)) self.assertEqual(list(qs), []) class JoinReuseTest(TestCase): """ The queries reuse joins sensibly (for example, direct joins are always reused). """ def test_fk_reuse(self): qs = Annotation.objects.filter(tag__name='foo').filter(tag__name='bar') self.assertEqual(str(qs.query).count('JOIN'), 1) def test_fk_reuse_select_related(self): qs = Annotation.objects.filter(tag__name='foo').select_related('tag') self.assertEqual(str(qs.query).count('JOIN'), 1) def test_fk_reuse_annotation(self): qs = Annotation.objects.filter(tag__name='foo').annotate(cnt=Count('tag__name')) self.assertEqual(str(qs.query).count('JOIN'), 1) def test_fk_reuse_disjunction(self): qs = Annotation.objects.filter(Q(tag__name='foo') | Q(tag__name='bar')) self.assertEqual(str(qs.query).count('JOIN'), 1) def test_fk_reuse_order_by(self): qs = Annotation.objects.filter(tag__name='foo').order_by('tag__name') self.assertEqual(str(qs.query).count('JOIN'), 1) def test_revo2o_reuse(self): qs = Detail.objects.filter(member__name='foo').filter(member__name='foo') self.assertEqual(str(qs.query).count('JOIN'), 1) def test_revfk_noreuse(self): qs = Author.objects.filter(report__name='r4').filter(report__name='r1') self.assertEqual(str(qs.query).count('JOIN'), 2) def test_inverted_q_across_relations(self): """ When a trimmable join is specified in the query (here school__), the ORM detects it and removes unnecessary joins. The set of reusable joins are updated after trimming the query so that other lookups don't consider that the outer query's filters are in effect for the subquery (#26551). """ springfield_elementary = School.objects.create() hogward = School.objects.create() Student.objects.create(school=springfield_elementary) hp = Student.objects.create(school=hogward) Classroom.objects.create(school=hogward, name='Potion') Classroom.objects.create(school=springfield_elementary, name='Main') qs = Student.objects.filter( ~(Q(school__classroom__name='Main') & Q(school__classroom__has_blackboard=None)) ) self.assertSequenceEqual(qs, [hp]) class DisjunctionPromotionTests(TestCase): def test_disjunction_promotion_select_related(self): fk1 = FK1.objects.create(f1='f1', f2='f2') basea = BaseA.objects.create(a=fk1) qs = BaseA.objects.filter(Q(a=fk1) | Q(b=2)) self.assertEqual(str(qs.query).count(' JOIN '), 0) qs = qs.select_related('a', 'b') self.assertEqual(str(qs.query).count(' INNER JOIN '), 0) self.assertEqual(str(qs.query).count(' LEFT OUTER JOIN '), 2) with self.assertNumQueries(1): self.assertSequenceEqual(qs, [basea]) self.assertEqual(qs[0].a, fk1) self.assertIs(qs[0].b, None) def test_disjunction_promotion1(self): # Pre-existing join, add two ORed filters to the same join, # all joins can be INNER JOINS. qs = BaseA.objects.filter(a__f1='foo') self.assertEqual(str(qs.query).count('INNER JOIN'), 1) qs = qs.filter(Q(b__f1='foo') | Q(b__f2='foo')) self.assertEqual(str(qs.query).count('INNER JOIN'), 2) # Reverse the order of AND and OR filters. qs = BaseA.objects.filter(Q(b__f1='foo') | Q(b__f2='foo')) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) qs = qs.filter(a__f1='foo') self.assertEqual(str(qs.query).count('INNER JOIN'), 2) def test_disjunction_promotion2(self): qs = BaseA.objects.filter(a__f1='foo') self.assertEqual(str(qs.query).count('INNER JOIN'), 1) # Now we have two different joins in an ORed condition, these # must be OUTER joins. The pre-existing join should remain INNER. qs = qs.filter(Q(b__f1='foo') | Q(c__f2='foo')) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2) # Reverse case. qs = BaseA.objects.filter(Q(b__f1='foo') | Q(c__f2='foo')) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2) qs = qs.filter(a__f1='foo') self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2) def test_disjunction_promotion3(self): qs = BaseA.objects.filter(a__f2='bar') self.assertEqual(str(qs.query).count('INNER JOIN'), 1) # The ANDed a__f2 filter allows us to use keep using INNER JOIN # even inside the ORed case. If the join to a__ returns nothing, # the ANDed filter for a__f2 can't be true. qs = qs.filter(Q(a__f1='foo') | Q(b__f2='foo')) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1) def test_disjunction_promotion3_demote(self): # This one needs demotion logic: the first filter causes a to be # outer joined, the second filter makes it inner join again. qs = BaseA.objects.filter( Q(a__f1='foo') | Q(b__f2='foo')).filter(a__f2='bar') self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1) def test_disjunction_promotion4_demote(self): qs = BaseA.objects.filter(Q(a=1) | Q(a=2)) self.assertEqual(str(qs.query).count('JOIN'), 0) # Demote needed for the "a" join. It is marked as outer join by # above filter (even if it is trimmed away). qs = qs.filter(a__f1='foo') self.assertEqual(str(qs.query).count('INNER JOIN'), 1) def test_disjunction_promotion4(self): qs = BaseA.objects.filter(a__f1='foo') self.assertEqual(str(qs.query).count('INNER JOIN'), 1) qs = qs.filter(Q(a=1) | Q(a=2)) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) def test_disjunction_promotion5_demote(self): qs = BaseA.objects.filter(Q(a=1) | Q(a=2)) # Note that the above filters on a force the join to an # inner join even if it is trimmed. self.assertEqual(str(qs.query).count('JOIN'), 0) qs = qs.filter(Q(a__f1='foo') | Q(b__f1='foo')) # So, now the a__f1 join doesn't need promotion. self.assertEqual(str(qs.query).count('INNER JOIN'), 1) # But b__f1 does. self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1) qs = BaseA.objects.filter(Q(a__f1='foo') | Q(b__f1='foo')) # Now the join to a is created as LOUTER self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2) qs = qs.filter(Q(a=1) | Q(a=2)) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1) def test_disjunction_promotion6(self): qs = BaseA.objects.filter(Q(a=1) | Q(a=2)) self.assertEqual(str(qs.query).count('JOIN'), 0) qs = BaseA.objects.filter(Q(a__f1='foo') & Q(b__f1='foo')) self.assertEqual(str(qs.query).count('INNER JOIN'), 2) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 0) qs = BaseA.objects.filter(Q(a__f1='foo') & Q(b__f1='foo')) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(qs.query).count('INNER JOIN'), 2) qs = qs.filter(Q(a=1) | Q(a=2)) self.assertEqual(str(qs.query).count('INNER JOIN'), 2) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 0) def test_disjunction_promotion7(self): qs = BaseA.objects.filter(Q(a=1) | Q(a=2)) self.assertEqual(str(qs.query).count('JOIN'), 0) qs = BaseA.objects.filter(Q(a__f1='foo') | (Q(b__f1='foo') & Q(a__f1='bar'))) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1) qs = BaseA.objects.filter( (Q(a__f1='foo') | Q(b__f1='foo')) & (Q(a__f1='bar') | Q(c__f1='foo')) ) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 3) self.assertEqual(str(qs.query).count('INNER JOIN'), 0) qs = BaseA.objects.filter( Q(a__f1='foo') | Q(a__f1='bar') & (Q(b__f1='bar') | Q(c__f1='foo')) ) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) def test_disjunction_promotion_fexpression(self): qs = BaseA.objects.filter(Q(a__f1=F('b__f1')) | Q(b__f1='foo')) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) qs = BaseA.objects.filter(Q(a__f1=F('c__f1')) | Q(b__f1='foo')) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 3) qs = BaseA.objects.filter(Q(a__f1=F('b__f1')) | Q(a__f2=F('b__f2')) | Q(c__f1='foo')) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 3) qs = BaseA.objects.filter(Q(a__f1=F('c__f1')) | (Q(pk=1) & Q(pk=2))) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2) self.assertEqual(str(qs.query).count('INNER JOIN'), 0) class ManyToManyExcludeTest(TestCase): def test_exclude_many_to_many(self): i_extra = Identifier.objects.create(name='extra') i_program = Identifier.objects.create(name='program') program = Program.objects.create(identifier=i_program) i_channel = Identifier.objects.create(name='channel') channel = Channel.objects.create(identifier=i_channel) channel.programs.add(program) # channel contains 'program1', so all Identifiers except that one # should be returned self.assertSequenceEqual( Identifier.objects.exclude(program__channel=channel).order_by('name'), [i_channel, i_extra], ) self.assertSequenceEqual( Identifier.objects.exclude(program__channel=None).order_by('name'), [i_program], ) def test_ticket_12823(self): pg3 = Page.objects.create(text='pg3') pg2 = Page.objects.create(text='pg2') pg1 = Page.objects.create(text='pg1') pa1 = Paragraph.objects.create(text='pa1') pa1.page.set([pg1, pg2]) pa2 = Paragraph.objects.create(text='pa2') pa2.page.set([pg2, pg3]) pa3 = Paragraph.objects.create(text='pa3') ch1 = Chapter.objects.create(title='ch1', paragraph=pa1) ch2 = Chapter.objects.create(title='ch2', paragraph=pa2) ch3 = Chapter.objects.create(title='ch3', paragraph=pa3) b1 = Book.objects.create(title='b1', chapter=ch1) b2 = Book.objects.create(title='b2', chapter=ch2) b3 = Book.objects.create(title='b3', chapter=ch3) q = Book.objects.exclude(chapter__paragraph__page__text='pg1') self.assertNotIn('IS NOT NULL', str(q.query)) self.assertEqual(len(q), 2) self.assertNotIn(b1, q) self.assertIn(b2, q) self.assertIn(b3, q) class RelabelCloneTest(TestCase): def test_ticket_19964(self): my1 = MyObject.objects.create(data='foo') my1.parent = my1 my1.save() my2 = MyObject.objects.create(data='bar', parent=my1) parents = MyObject.objects.filter(parent=F('id')) children = MyObject.objects.filter(parent__in=parents).exclude(parent=F('id')) self.assertEqual(list(parents), [my1]) # Evaluating the children query (which has parents as part of it) does # not change results for the parents query. self.assertEqual(list(children), [my2]) self.assertEqual(list(parents), [my1]) class Ticket20101Tests(TestCase): def test_ticket_20101(self): """ Tests QuerySet ORed combining in exclude subquery case. """ t = Tag.objects.create(name='foo') a1 = Annotation.objects.create(tag=t, name='a1') a2 = Annotation.objects.create(tag=t, name='a2') a3 = Annotation.objects.create(tag=t, name='a3') n = Note.objects.create(note='foo', misc='bar') qs1 = Note.objects.exclude(annotation__in=[a1, a2]) qs2 = Note.objects.filter(annotation__in=[a3]) self.assertIn(n, qs1) self.assertNotIn(n, qs2) self.assertIn(n, (qs1 | qs2)) class EmptyStringPromotionTests(SimpleTestCase): def test_empty_string_promotion(self): qs = RelatedObject.objects.filter(single__name='') if connection.features.interprets_empty_strings_as_nulls: self.assertIn('LEFT OUTER JOIN', str(qs.query)) else: self.assertNotIn('LEFT OUTER JOIN', str(qs.query)) class ValuesSubqueryTests(TestCase): def test_values_in_subquery(self): # If a values() queryset is used, then the given values # will be used instead of forcing use of the relation's field. o1 = Order.objects.create(id=-2) o2 = Order.objects.create(id=-1) oi1 = OrderItem.objects.create(order=o1, status=0) oi1.status = oi1.pk oi1.save() OrderItem.objects.create(order=o2, status=0) # The query below should match o1 as it has related order_item # with id == status. self.assertSequenceEqual(Order.objects.filter(items__in=OrderItem.objects.values_list('status')), [o1]) class DoubleInSubqueryTests(TestCase): def test_double_subquery_in(self): lfa1 = LeafA.objects.create(data='foo') lfa2 = LeafA.objects.create(data='bar') lfb1 = LeafB.objects.create(data='lfb1') lfb2 = LeafB.objects.create(data='lfb2') Join.objects.create(a=lfa1, b=lfb1) Join.objects.create(a=lfa2, b=lfb2) leaf_as = LeafA.objects.filter(data='foo').values_list('pk', flat=True) joins = Join.objects.filter(a__in=leaf_as).values_list('b__id', flat=True) qs = LeafB.objects.filter(pk__in=joins) self.assertSequenceEqual(qs, [lfb1]) class Ticket18785Tests(SimpleTestCase): def test_ticket_18785(self): # Test join trimming from ticket18785 qs = Item.objects.exclude( note__isnull=False ).filter( name='something', creator__extra__isnull=True ).order_by() self.assertEqual(1, str(qs.query).count('INNER JOIN')) self.assertEqual(0, str(qs.query).count('OUTER JOIN')) class Ticket20788Tests(TestCase): def test_ticket_20788(self): Paragraph.objects.create() paragraph = Paragraph.objects.create() page = paragraph.page.create() chapter = Chapter.objects.create(paragraph=paragraph) Book.objects.create(chapter=chapter) paragraph2 = Paragraph.objects.create() Page.objects.create() chapter2 = Chapter.objects.create(paragraph=paragraph2) book2 = Book.objects.create(chapter=chapter2) sentences_not_in_pub = Book.objects.exclude(chapter__paragraph__page=page) self.assertSequenceEqual(sentences_not_in_pub, [book2]) class Ticket12807Tests(TestCase): def test_ticket_12807(self): p1 = Paragraph.objects.create() p2 = Paragraph.objects.create() # The ORed condition below should have no effect on the query - the # ~Q(pk__in=[]) will always be True. qs = Paragraph.objects.filter((Q(pk=p2.pk) | ~Q(pk__in=[])) & Q(pk=p1.pk)) self.assertSequenceEqual(qs, [p1]) class RelatedLookupTypeTests(TestCase): error = 'Cannot query "%s": Must be "%s" instance.' @classmethod def setUpTestData(cls): cls.oa = ObjectA.objects.create(name="oa") cls.poa = ProxyObjectA.objects.get(name="oa") cls.coa = ChildObjectA.objects.create(name="coa") cls.wrong_type = Order.objects.create(id=cls.oa.pk) cls.ob = ObjectB.objects.create(name="ob", objecta=cls.oa, num=1) cls.pob1 = ProxyObjectB.objects.create(name="pob", objecta=cls.oa, num=2) cls.pob = ProxyObjectB.objects.all() cls.c = ObjectC.objects.create(childobjecta=cls.coa) def test_wrong_type_lookup(self): """ A ValueError is raised when the incorrect object type is passed to a query lookup. """ # Passing incorrect object type with self.assertRaisesMessage(ValueError, self.error % (self.wrong_type, ObjectA._meta.object_name)): ObjectB.objects.get(objecta=self.wrong_type) with self.assertRaisesMessage(ValueError, self.error % (self.wrong_type, ObjectA._meta.object_name)): ObjectB.objects.filter(objecta__in=[self.wrong_type]) with self.assertRaisesMessage(ValueError, self.error % (self.wrong_type, ObjectA._meta.object_name)): ObjectB.objects.filter(objecta=self.wrong_type) with self.assertRaisesMessage(ValueError, self.error % (self.wrong_type, ObjectB._meta.object_name)): ObjectA.objects.filter(objectb__in=[self.wrong_type, self.ob]) # Passing an object of the class on which query is done. with self.assertRaisesMessage(ValueError, self.error % (self.ob, ObjectA._meta.object_name)): ObjectB.objects.filter(objecta__in=[self.poa, self.ob]) with self.assertRaisesMessage(ValueError, self.error % (self.ob, ChildObjectA._meta.object_name)): ObjectC.objects.exclude(childobjecta__in=[self.coa, self.ob]) def test_wrong_backward_lookup(self): """ A ValueError is raised when the incorrect object type is passed to a query lookup for backward relations. """ with self.assertRaisesMessage(ValueError, self.error % (self.oa, ObjectB._meta.object_name)): ObjectA.objects.filter(objectb__in=[self.oa, self.ob]) with self.assertRaisesMessage(ValueError, self.error % (self.oa, ObjectB._meta.object_name)): ObjectA.objects.exclude(objectb=self.oa) with self.assertRaisesMessage(ValueError, self.error % (self.wrong_type, ObjectB._meta.object_name)): ObjectA.objects.get(objectb=self.wrong_type) def test_correct_lookup(self): """ When passing proxy model objects, child objects, or parent objects, lookups work fine. """ out_a = [self.oa] out_b = [self.ob, self.pob1] out_c = [self.c] # proxy model objects self.assertSequenceEqual(ObjectB.objects.filter(objecta=self.poa).order_by('name'), out_b) self.assertSequenceEqual(ObjectA.objects.filter(objectb__in=self.pob).order_by('pk'), out_a * 2) # child objects self.assertSequenceEqual(ObjectB.objects.filter(objecta__in=[self.coa]), []) self.assertSequenceEqual(ObjectB.objects.filter(objecta__in=[self.poa, self.coa]).order_by('name'), out_b) self.assertSequenceEqual( ObjectB.objects.filter(objecta__in=iter([self.poa, self.coa])).order_by('name'), out_b ) # parent objects self.assertSequenceEqual(ObjectC.objects.exclude(childobjecta=self.oa), out_c) # QuerySet related object type checking shouldn't issue queries # (the querysets aren't evaluated here, hence zero queries) (#23266). with self.assertNumQueries(0): ObjectB.objects.filter(objecta__in=ObjectA.objects.all()) def test_values_queryset_lookup(self): """ #23396 - Ensure ValueQuerySets are not checked for compatibility with the lookup field """ # Make sure the num and objecta field values match. ob = ObjectB.objects.get(name='ob') ob.num = ob.objecta.pk ob.save() pob = ObjectB.objects.get(name='pob') pob.num = pob.objecta.pk pob.save() self.assertSequenceEqual(ObjectB.objects.filter( objecta__in=ObjectB.objects.all().values_list('num') ).order_by('pk'), [ob, pob]) class Ticket14056Tests(TestCase): def test_ticket_14056(self): s1 = SharedConnection.objects.create(data='s1') s2 = SharedConnection.objects.create(data='s2') s3 = SharedConnection.objects.create(data='s3') PointerA.objects.create(connection=s2) expected_ordering = ( [s1, s3, s2] if connection.features.nulls_order_largest else [s2, s1, s3] ) self.assertSequenceEqual(SharedConnection.objects.order_by('-pointera__connection', 'pk'), expected_ordering) class Ticket20955Tests(TestCase): def test_ticket_20955(self): jack = Staff.objects.create(name='jackstaff') jackstaff = StaffUser.objects.create(staff=jack) jill = Staff.objects.create(name='jillstaff') jillstaff = StaffUser.objects.create(staff=jill) task = Task.objects.create(creator=jackstaff, owner=jillstaff, title="task") task_get = Task.objects.get(pk=task.pk) # Load data so that assertNumQueries doesn't complain about the get # version's queries. task_get.creator.staffuser.staff task_get.owner.staffuser.staff qs = Task.objects.select_related( 'creator__staffuser__staff', 'owner__staffuser__staff') self.assertEqual(str(qs.query).count(' JOIN '), 6) task_select_related = qs.get(pk=task.pk) with self.assertNumQueries(0): self.assertEqual(task_select_related.creator.staffuser.staff, task_get.creator.staffuser.staff) self.assertEqual(task_select_related.owner.staffuser.staff, task_get.owner.staffuser.staff) class Ticket21203Tests(TestCase): def test_ticket_21203(self): p = Ticket21203Parent.objects.create(parent_bool=True) c = Ticket21203Child.objects.create(parent=p) qs = Ticket21203Child.objects.select_related('parent').defer('parent__created') self.assertSequenceEqual(qs, [c]) self.assertIs(qs[0].parent.parent_bool, True) class ValuesJoinPromotionTests(TestCase): def test_values_no_promotion_for_existing(self): qs = Node.objects.filter(parent__parent__isnull=False) self.assertIn(' INNER JOIN ', str(qs.query)) qs = qs.values('parent__parent__id') self.assertIn(' INNER JOIN ', str(qs.query)) # Make sure there is a left outer join without the filter. qs = Node.objects.values('parent__parent__id') self.assertIn(' LEFT OUTER JOIN ', str(qs.query)) def test_non_nullable_fk_not_promoted(self): qs = ObjectB.objects.values('objecta__name') self.assertIn(' INNER JOIN ', str(qs.query)) def test_ticket_21376(self): a = ObjectA.objects.create() ObjectC.objects.create(objecta=a) qs = ObjectC.objects.filter( Q(objecta=a) | Q(objectb__objecta=a), ) qs = qs.filter( Q(objectb=1) | Q(objecta=a), ) self.assertEqual(qs.count(), 1) tblname = connection.ops.quote_name(ObjectB._meta.db_table) self.assertIn(' LEFT OUTER JOIN %s' % tblname, str(qs.query)) class ForeignKeyToBaseExcludeTests(TestCase): def test_ticket_21787(self): sc1 = SpecialCategory.objects.create(special_name='sc1', name='sc1') sc2 = SpecialCategory.objects.create(special_name='sc2', name='sc2') sc3 = SpecialCategory.objects.create(special_name='sc3', name='sc3') c1 = CategoryItem.objects.create(category=sc1) CategoryItem.objects.create(category=sc2) self.assertSequenceEqual(SpecialCategory.objects.exclude(categoryitem__id=c1.pk).order_by('name'), [sc2, sc3]) self.assertSequenceEqual(SpecialCategory.objects.filter(categoryitem__id=c1.pk), [sc1]) class ReverseM2MCustomPkTests(TestCase): def test_ticket_21879(self): cpt1 = CustomPkTag.objects.create(id='cpt1', tag='cpt1') cp1 = CustomPk.objects.create(name='cp1', extra='extra') cp1.custompktag_set.add(cpt1) self.assertSequenceEqual(CustomPk.objects.filter(custompktag=cpt1), [cp1]) self.assertSequenceEqual(CustomPkTag.objects.filter(custom_pk=cp1), [cpt1]) class Ticket22429Tests(TestCase): def test_ticket_22429(self): sc1 = School.objects.create() st1 = Student.objects.create(school=sc1) sc2 = School.objects.create() st2 = Student.objects.create(school=sc2) cr = Classroom.objects.create(school=sc1) cr.students.add(st1) queryset = Student.objects.filter(~Q(classroom__school=F('school'))) self.assertSequenceEqual(queryset, [st2]) class Ticket23605Tests(TestCase): def test_ticket_23605(self): # Test filtering on a complicated q-object from ticket's report. # The query structure is such that we have multiple nested subqueries. # The original problem was that the inner queries weren't relabeled # correctly. # See also #24090. a1 = Ticket23605A.objects.create() a2 = Ticket23605A.objects.create() c1 = Ticket23605C.objects.create(field_c0=10000.0) Ticket23605B.objects.create( field_b0=10000.0, field_b1=True, modelc_fk=c1, modela_fk=a1) complex_q = Q(pk__in=Ticket23605A.objects.filter( Q( # True for a1 as field_b0 = 10000, field_c0=10000 # False for a2 as no ticket23605b found ticket23605b__field_b0__gte=1000000 / F("ticket23605b__modelc_fk__field_c0") ) & # True for a1 (field_b1=True) Q(ticket23605b__field_b1=True) & ~Q(ticket23605b__pk__in=Ticket23605B.objects.filter( ~( # Same filters as above commented filters, but # double-negated (one for Q() above, one for # parentheses). So, again a1 match, a2 not. Q(field_b1=True) & Q(field_b0__gte=1000000 / F("modelc_fk__field_c0")) ) ))).filter(ticket23605b__field_b1=True)) qs1 = Ticket23605A.objects.filter(complex_q) self.assertSequenceEqual(qs1, [a1]) qs2 = Ticket23605A.objects.exclude(complex_q) self.assertSequenceEqual(qs2, [a2]) class TestTicket24279(TestCase): def test_ticket_24278(self): School.objects.create() qs = School.objects.filter(Q(pk__in=()) | Q()) self.assertQuerysetEqual(qs, []) class TestInvalidValuesRelation(SimpleTestCase): def test_invalid_values(self): msg = "Field 'id' expected a number but got 'abc'." with self.assertRaisesMessage(ValueError, msg): Annotation.objects.filter(tag='abc') with self.assertRaisesMessage(ValueError, msg): Annotation.objects.filter(tag__in=[123, 'abc']) class TestTicket24605(TestCase): def test_ticket_24605(self): """ Subquery table names should be quoted. """ i1 = Individual.objects.create(alive=True) RelatedIndividual.objects.create(related=i1) i2 = Individual.objects.create(alive=False) RelatedIndividual.objects.create(related=i2) i3 = Individual.objects.create(alive=True) i4 = Individual.objects.create(alive=False) self.assertSequenceEqual(Individual.objects.filter(Q(alive=False), Q(related_individual__isnull=True)), [i4]) self.assertSequenceEqual( Individual.objects.exclude(Q(alive=False), Q(related_individual__isnull=True)).order_by('pk'), [i1, i2, i3] ) class Ticket23622Tests(TestCase): @skipUnlessDBFeature('can_distinct_on_fields') def test_ticket_23622(self): """ Make sure __pk__in and __in work the same for related fields when using a distinct on subquery. """ a1 = Ticket23605A.objects.create() a2 = Ticket23605A.objects.create() c1 = Ticket23605C.objects.create(field_c0=0.0) Ticket23605B.objects.create( modela_fk=a1, field_b0=123, field_b1=True, modelc_fk=c1, ) Ticket23605B.objects.create( modela_fk=a1, field_b0=23, field_b1=True, modelc_fk=c1, ) Ticket23605B.objects.create( modela_fk=a1, field_b0=234, field_b1=True, modelc_fk=c1, ) Ticket23605B.objects.create( modela_fk=a1, field_b0=12, field_b1=True, modelc_fk=c1, ) Ticket23605B.objects.create( modela_fk=a2, field_b0=567, field_b1=True, modelc_fk=c1, ) Ticket23605B.objects.create( modela_fk=a2, field_b0=76, field_b1=True, modelc_fk=c1, ) Ticket23605B.objects.create( modela_fk=a2, field_b0=7, field_b1=True, modelc_fk=c1, ) Ticket23605B.objects.create( modela_fk=a2, field_b0=56, field_b1=True, modelc_fk=c1, ) qx = ( Q(ticket23605b__pk__in=Ticket23605B.objects.order_by('modela_fk', '-field_b1').distinct('modela_fk')) & Q(ticket23605b__field_b0__gte=300) ) qy = ( Q(ticket23605b__in=Ticket23605B.objects.order_by('modela_fk', '-field_b1').distinct('modela_fk')) & Q(ticket23605b__field_b0__gte=300) ) self.assertEqual( set(Ticket23605A.objects.filter(qx).values_list('pk', flat=True)), set(Ticket23605A.objects.filter(qy).values_list('pk', flat=True)) ) self.assertSequenceEqual(Ticket23605A.objects.filter(qx), [a2])
0209c62dc1832706093c4af0998111bd32308eda39189a05894df1279c82b465
import datetime import itertools import unittest from copy import copy from unittest import mock from django.core.exceptions import FieldError from django.core.management.color import no_style from django.db import ( DatabaseError, DataError, IntegrityError, OperationalError, connection, ) from django.db.models import ( CASCADE, PROTECT, AutoField, BigAutoField, BigIntegerField, BinaryField, BooleanField, CharField, CheckConstraint, DateField, DateTimeField, DecimalField, F, FloatField, ForeignKey, ForeignObject, Index, IntegerField, JSONField, ManyToManyField, Model, OneToOneField, OrderBy, PositiveIntegerField, Q, SlugField, SmallAutoField, SmallIntegerField, TextField, TimeField, UniqueConstraint, UUIDField, Value, ) from django.db.models.fields.json import KeyTextTransform from django.db.models.functions import Abs, Cast, Collate, Lower, Random, Upper from django.db.models.indexes import IndexExpression from django.db.transaction import TransactionManagementError, atomic from django.test import ( TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature, ) from django.test.utils import ( CaptureQueriesContext, isolate_apps, register_lookup, ) from django.utils import timezone from .fields import ( CustomManyToManyField, InheritedManyToManyField, MediumBlobField, ) from .models import ( Author, AuthorCharFieldWithIndex, AuthorTextFieldWithIndex, AuthorWithDefaultHeight, AuthorWithEvenLongerName, AuthorWithIndexedName, AuthorWithIndexedNameAndBirthday, AuthorWithUniqueName, AuthorWithUniqueNameAndBirthday, Book, BookForeignObj, BookWeak, BookWithLongName, BookWithO2O, BookWithoutAuthor, BookWithSlug, IntegerPK, Node, Note, NoteRename, Tag, TagIndexed, TagM2MTest, TagUniqueRename, Thing, UniqueTest, new_apps, ) class SchemaTests(TransactionTestCase): """ Tests for the schema-alteration code. Be aware that these tests are more liable than most to false results, as sometimes the code to check if a test has worked is almost as complex as the code it is testing. """ available_apps = [] models = [ Author, AuthorCharFieldWithIndex, AuthorTextFieldWithIndex, AuthorWithDefaultHeight, AuthorWithEvenLongerName, Book, BookWeak, BookWithLongName, BookWithO2O, BookWithSlug, IntegerPK, Node, Note, Tag, TagIndexed, TagM2MTest, TagUniqueRename, Thing, UniqueTest, ] # Utility functions def setUp(self): # local_models should contain test dependent model classes that will be # automatically removed from the app cache on test tear down. self.local_models = [] # isolated_local_models contains models that are in test methods # decorated with @isolate_apps. self.isolated_local_models = [] def tearDown(self): # Delete any tables made for our models self.delete_tables() new_apps.clear_cache() for model in new_apps.get_models(): model._meta._expire_cache() if 'schema' in new_apps.all_models: for model in self.local_models: for many_to_many in model._meta.many_to_many: through = many_to_many.remote_field.through if through and through._meta.auto_created: del new_apps.all_models['schema'][through._meta.model_name] del new_apps.all_models['schema'][model._meta.model_name] if self.isolated_local_models: with connection.schema_editor() as editor: for model in self.isolated_local_models: editor.delete_model(model) def delete_tables(self): "Deletes all model tables for our models for a clean test environment" converter = connection.introspection.identifier_converter with connection.schema_editor() as editor: connection.disable_constraint_checking() table_names = connection.introspection.table_names() if connection.features.ignores_table_name_case: table_names = [table_name.lower() for table_name in table_names] for model in itertools.chain(SchemaTests.models, self.local_models): tbl = converter(model._meta.db_table) if connection.features.ignores_table_name_case: tbl = tbl.lower() if tbl in table_names: editor.delete_model(model) table_names.remove(tbl) connection.enable_constraint_checking() def column_classes(self, model): with connection.cursor() as cursor: columns = { d[0]: (connection.introspection.get_field_type(d[1], d), d) for d in connection.introspection.get_table_description( cursor, model._meta.db_table, ) } # SQLite has a different format for field_type for name, (type, desc) in columns.items(): if isinstance(type, tuple): columns[name] = (type[0], desc) # SQLite also doesn't error properly if not columns: raise DatabaseError("Table does not exist (empty pragma)") return columns def get_primary_key(self, table): with connection.cursor() as cursor: return connection.introspection.get_primary_key_column(cursor, table) def get_indexes(self, table): """ Get the indexes on the table using a new cursor. """ with connection.cursor() as cursor: return [ c['columns'][0] for c in connection.introspection.get_constraints(cursor, table).values() if c['index'] and len(c['columns']) == 1 ] def get_uniques(self, table): with connection.cursor() as cursor: return [ c['columns'][0] for c in connection.introspection.get_constraints(cursor, table).values() if c['unique'] and len(c['columns']) == 1 ] def get_constraints(self, table): """ Get the constraints on a table using a new cursor. """ with connection.cursor() as cursor: return connection.introspection.get_constraints(cursor, table) def get_constraints_for_column(self, model, column_name): constraints = self.get_constraints(model._meta.db_table) constraints_for_column = [] for name, details in constraints.items(): if details['columns'] == [column_name]: constraints_for_column.append(name) return sorted(constraints_for_column) def check_added_field_default(self, schema_editor, model, field, field_name, expected_default, cast_function=None): with connection.cursor() as cursor: schema_editor.add_field(model, field) cursor.execute("SELECT {} FROM {};".format(field_name, model._meta.db_table)) database_default = cursor.fetchall()[0][0] if cast_function and type(database_default) != type(expected_default): database_default = cast_function(database_default) self.assertEqual(database_default, expected_default) def get_constraints_count(self, table, column, fk_to): """ Return a dict with keys 'fks', 'uniques, and 'indexes' indicating the number of foreign keys, unique constraints, and indexes on `table`.`column`. The `fk_to` argument is a 2-tuple specifying the expected foreign key relationship's (table, column). """ with connection.cursor() as cursor: constraints = connection.introspection.get_constraints(cursor, table) counts = {'fks': 0, 'uniques': 0, 'indexes': 0} for c in constraints.values(): if c['columns'] == [column]: if c['foreign_key'] == fk_to: counts['fks'] += 1 if c['unique']: counts['uniques'] += 1 elif c['index']: counts['indexes'] += 1 return counts def get_column_collation(self, table, column): with connection.cursor() as cursor: return next( f.collation for f in connection.introspection.get_table_description(cursor, table) if f.name == column ) def assertIndexOrder(self, table, index, order): constraints = self.get_constraints(table) self.assertIn(index, constraints) index_orders = constraints[index]['orders'] self.assertTrue(all(val == expected for val, expected in zip(index_orders, order))) def assertForeignKeyExists(self, model, column, expected_fk_table, field='id'): """ Fail if the FK constraint on `model.Meta.db_table`.`column` to `expected_fk_table`.id doesn't exist. """ constraints = self.get_constraints(model._meta.db_table) constraint_fk = None for details in constraints.values(): if details['columns'] == [column] and details['foreign_key']: constraint_fk = details['foreign_key'] break self.assertEqual(constraint_fk, (expected_fk_table, field)) def assertForeignKeyNotExists(self, model, column, expected_fk_table): with self.assertRaises(AssertionError): self.assertForeignKeyExists(model, column, expected_fk_table) # Tests def test_creation_deletion(self): """ Tries creating a model's table, and then deleting it. """ with connection.schema_editor() as editor: # Create the table editor.create_model(Author) # The table is there list(Author.objects.all()) # Clean up that table editor.delete_model(Author) # No deferred SQL should be left over. self.assertEqual(editor.deferred_sql, []) # The table is gone with self.assertRaises(DatabaseError): list(Author.objects.all()) @skipUnlessDBFeature('supports_foreign_keys') def test_fk(self): "Creating tables out of FK order, then repointing, works" # Create the table with connection.schema_editor() as editor: editor.create_model(Book) editor.create_model(Author) editor.create_model(Tag) # Initial tables are there list(Author.objects.all()) list(Book.objects.all()) # Make sure the FK constraint is present with self.assertRaises(IntegrityError): Book.objects.create( author_id=1, title="Much Ado About Foreign Keys", pub_date=datetime.datetime.now(), ) # Repoint the FK constraint old_field = Book._meta.get_field("author") new_field = ForeignKey(Tag, CASCADE) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) self.assertForeignKeyExists(Book, 'author_id', 'schema_tag') @skipUnlessDBFeature('can_create_inline_fk') def test_inline_fk(self): # Create some tables. with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) editor.create_model(Note) self.assertForeignKeyNotExists(Note, 'book_id', 'schema_book') # Add a foreign key from one to the other. with connection.schema_editor() as editor: new_field = ForeignKey(Book, CASCADE) new_field.set_attributes_from_name('book') editor.add_field(Note, new_field) self.assertForeignKeyExists(Note, 'book_id', 'schema_book') # Creating a FK field with a constraint uses a single statement without # a deferred ALTER TABLE. self.assertFalse([ sql for sql in (str(statement) for statement in editor.deferred_sql) if sql.startswith('ALTER TABLE') and 'ADD CONSTRAINT' in sql ]) @skipUnlessDBFeature('can_create_inline_fk') def test_add_inline_fk_update_data(self): with connection.schema_editor() as editor: editor.create_model(Node) # Add an inline foreign key and update data in the same transaction. new_field = ForeignKey(Node, CASCADE, related_name='new_fk', null=True) new_field.set_attributes_from_name('new_parent_fk') parent = Node.objects.create() with connection.schema_editor() as editor: editor.add_field(Node, new_field) editor.execute('UPDATE schema_node SET new_parent_fk_id = %s;', [parent.pk]) assertIndex = ( self.assertIn if connection.features.indexes_foreign_keys else self.assertNotIn ) assertIndex('new_parent_fk_id', self.get_indexes(Node._meta.db_table)) @skipUnlessDBFeature( 'can_create_inline_fk', 'allows_multiple_constraints_on_same_fields', ) @isolate_apps('schema') def test_add_inline_fk_index_update_data(self): class Node(Model): class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Node) # Add an inline foreign key, update data, and an index in the same # transaction. new_field = ForeignKey(Node, CASCADE, related_name='new_fk', null=True) new_field.set_attributes_from_name('new_parent_fk') parent = Node.objects.create() with connection.schema_editor() as editor: editor.add_field(Node, new_field) Node._meta.add_field(new_field) editor.execute('UPDATE schema_node SET new_parent_fk_id = %s;', [parent.pk]) editor.add_index(Node, Index(fields=['new_parent_fk'], name='new_parent_inline_fk_idx')) self.assertIn('new_parent_fk_id', self.get_indexes(Node._meta.db_table)) @skipUnlessDBFeature('supports_foreign_keys') def test_char_field_with_db_index_to_fk(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(AuthorCharFieldWithIndex) # Change CharField to FK old_field = AuthorCharFieldWithIndex._meta.get_field('char_field') new_field = ForeignKey(Author, CASCADE, blank=True) new_field.set_attributes_from_name('char_field') with connection.schema_editor() as editor: editor.alter_field(AuthorCharFieldWithIndex, old_field, new_field, strict=True) self.assertForeignKeyExists(AuthorCharFieldWithIndex, 'char_field_id', 'schema_author') @skipUnlessDBFeature('supports_foreign_keys') @skipUnlessDBFeature('supports_index_on_text_field') def test_text_field_with_db_index_to_fk(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(AuthorTextFieldWithIndex) # Change TextField to FK old_field = AuthorTextFieldWithIndex._meta.get_field('text_field') new_field = ForeignKey(Author, CASCADE, blank=True) new_field.set_attributes_from_name('text_field') with connection.schema_editor() as editor: editor.alter_field(AuthorTextFieldWithIndex, old_field, new_field, strict=True) self.assertForeignKeyExists(AuthorTextFieldWithIndex, 'text_field_id', 'schema_author') @isolate_apps('schema') def test_char_field_pk_to_auto_field(self): class Foo(Model): id = CharField(max_length=255, primary_key=True) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Foo) self.isolated_local_models = [Foo] old_field = Foo._meta.get_field('id') new_field = AutoField(primary_key=True) new_field.set_attributes_from_name('id') new_field.model = Foo with connection.schema_editor() as editor: editor.alter_field(Foo, old_field, new_field, strict=True) @skipUnlessDBFeature('supports_foreign_keys') def test_fk_to_proxy(self): "Creating a FK to a proxy model creates database constraints." class AuthorProxy(Author): class Meta: app_label = 'schema' apps = new_apps proxy = True class AuthorRef(Model): author = ForeignKey(AuthorProxy, on_delete=CASCADE) class Meta: app_label = 'schema' apps = new_apps self.local_models = [AuthorProxy, AuthorRef] # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(AuthorRef) self.assertForeignKeyExists(AuthorRef, 'author_id', 'schema_author') @skipUnlessDBFeature('supports_foreign_keys') def test_fk_db_constraint(self): "The db_constraint parameter is respected" # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) editor.create_model(Author) editor.create_model(BookWeak) # Initial tables are there list(Author.objects.all()) list(Tag.objects.all()) list(BookWeak.objects.all()) self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author') # Make a db_constraint=False FK new_field = ForeignKey(Tag, CASCADE, db_constraint=False) new_field.set_attributes_from_name("tag") with connection.schema_editor() as editor: editor.add_field(Author, new_field) self.assertForeignKeyNotExists(Author, 'tag_id', 'schema_tag') # Alter to one with a constraint new_field2 = ForeignKey(Tag, CASCADE) new_field2.set_attributes_from_name("tag") with connection.schema_editor() as editor: editor.alter_field(Author, new_field, new_field2, strict=True) self.assertForeignKeyExists(Author, 'tag_id', 'schema_tag') # Alter to one without a constraint again new_field2 = ForeignKey(Tag, CASCADE) new_field2.set_attributes_from_name("tag") with connection.schema_editor() as editor: editor.alter_field(Author, new_field2, new_field, strict=True) self.assertForeignKeyNotExists(Author, 'tag_id', 'schema_tag') @isolate_apps('schema') def test_no_db_constraint_added_during_primary_key_change(self): """ When a primary key that's pointed to by a ForeignKey with db_constraint=False is altered, a foreign key constraint isn't added. """ class Author(Model): class Meta: app_label = 'schema' class BookWeak(Model): author = ForeignKey(Author, CASCADE, db_constraint=False) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWeak) self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author') old_field = Author._meta.get_field('id') new_field = BigAutoField(primary_key=True) new_field.model = Author new_field.set_attributes_from_name('id') # @isolate_apps() and inner models are needed to have the model # relations populated, otherwise this doesn't act as a regression test. self.assertEqual(len(new_field.model._meta.related_objects), 1) with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author') def _test_m2m_db_constraint(self, M2MFieldClass): class LocalAuthorWithM2M(Model): name = CharField(max_length=255) class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalAuthorWithM2M] # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) editor.create_model(LocalAuthorWithM2M) # Initial tables are there list(LocalAuthorWithM2M.objects.all()) list(Tag.objects.all()) # Make a db_constraint=False FK new_field = M2MFieldClass(Tag, related_name="authors", db_constraint=False) new_field.contribute_to_class(LocalAuthorWithM2M, "tags") # Add the field with connection.schema_editor() as editor: editor.add_field(LocalAuthorWithM2M, new_field) self.assertForeignKeyNotExists(new_field.remote_field.through, 'tag_id', 'schema_tag') @skipUnlessDBFeature('supports_foreign_keys') def test_m2m_db_constraint(self): self._test_m2m_db_constraint(ManyToManyField) @skipUnlessDBFeature('supports_foreign_keys') def test_m2m_db_constraint_custom(self): self._test_m2m_db_constraint(CustomManyToManyField) @skipUnlessDBFeature('supports_foreign_keys') def test_m2m_db_constraint_inherited(self): self._test_m2m_db_constraint(InheritedManyToManyField) def test_add_field(self): """ Tests adding fields to models """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no age field columns = self.column_classes(Author) self.assertNotIn("age", columns) # Add the new field new_field = IntegerField(null=True) new_field.set_attributes_from_name("age") with CaptureQueriesContext(connection) as ctx, connection.schema_editor() as editor: editor.add_field(Author, new_field) drop_default_sql = editor.sql_alter_column_no_default % { 'column': editor.quote_name(new_field.name), } self.assertFalse(any(drop_default_sql in query['sql'] for query in ctx.captured_queries)) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['age'][0], connection.features.introspected_field_types['IntegerField']) self.assertTrue(columns['age'][1][6]) def test_add_field_remove_field(self): """ Adding a field and removing it removes all deferred sql referring to it. """ with connection.schema_editor() as editor: # Create a table with a unique constraint on the slug field. editor.create_model(Tag) # Remove the slug column. editor.remove_field(Tag, Tag._meta.get_field('slug')) self.assertEqual(editor.deferred_sql, []) def test_add_field_temp_default(self): """ Tests adding fields to models with a temporary default """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no age field columns = self.column_classes(Author) self.assertNotIn("age", columns) # Add some rows of data Author.objects.create(name="Andrew", height=30) Author.objects.create(name="Andrea") # Add a not-null field new_field = CharField(max_length=30, default="Godwin") new_field.set_attributes_from_name("surname") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['surname'][0], connection.features.introspected_field_types['CharField']) self.assertEqual(columns['surname'][1][6], connection.features.interprets_empty_strings_as_nulls) def test_add_field_temp_default_boolean(self): """ Tests adding fields to models with a temporary default where the default is False. (#21783) """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no age field columns = self.column_classes(Author) self.assertNotIn("age", columns) # Add some rows of data Author.objects.create(name="Andrew", height=30) Author.objects.create(name="Andrea") # Add a not-null field new_field = BooleanField(default=False) new_field.set_attributes_from_name("awesome") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure the field is right afterwards columns = self.column_classes(Author) # BooleanField are stored as TINYINT(1) on MySQL. field_type = columns['awesome'][0] self.assertEqual(field_type, connection.features.introspected_field_types['BooleanField']) def test_add_field_default_transform(self): """ Tests adding fields to models with a default that is not directly valid in the database (#22581) """ class TestTransformField(IntegerField): # Weird field that saves the count of items in its value def get_default(self): return self.default def get_prep_value(self, value): if value is None: return 0 return len(value) # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Add some rows of data Author.objects.create(name="Andrew", height=30) Author.objects.create(name="Andrea") # Add the field with a default it needs to cast (to string in this case) new_field = TestTransformField(default={1: 2}) new_field.set_attributes_from_name("thing") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure the field is there columns = self.column_classes(Author) field_type, field_info = columns['thing'] self.assertEqual(field_type, connection.features.introspected_field_types['IntegerField']) # Make sure the values were transformed correctly self.assertEqual(Author.objects.extra(where=["thing = 1"]).count(), 2) def test_add_field_binary(self): """ Tests binary fields get a sane default (#22851) """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Add the new field new_field = BinaryField(blank=True) new_field.set_attributes_from_name("bits") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure the field is right afterwards columns = self.column_classes(Author) # MySQL annoyingly uses the same backend, so it'll come back as one of # these two types. self.assertIn(columns['bits'][0], ("BinaryField", "TextField")) @unittest.skipUnless(connection.vendor == 'mysql', "MySQL specific") def test_add_binaryfield_mediumblob(self): """ Test adding a custom-sized binary field on MySQL (#24846). """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Add the new field with default new_field = MediumBlobField(blank=True, default=b'123') new_field.set_attributes_from_name('bits') with connection.schema_editor() as editor: editor.add_field(Author, new_field) columns = self.column_classes(Author) # Introspection treats BLOBs as TextFields self.assertEqual(columns['bits'][0], "TextField") def test_alter(self): """ Tests simple altering of fields """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the field is right to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField']) self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls)) # Alter the name field to a TextField old_field = Author._meta.get_field("name") new_field = TextField(null=True) new_field.set_attributes_from_name("name") with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "TextField") self.assertTrue(columns['name'][1][6]) # Change nullability again new_field2 = TextField(null=False) new_field2.set_attributes_from_name("name") with connection.schema_editor() as editor: editor.alter_field(Author, new_field, new_field2, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "TextField") self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls)) def test_alter_auto_field_to_integer_field(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Change AutoField to IntegerField old_field = Author._meta.get_field('id') new_field = IntegerField(primary_key=True) new_field.set_attributes_from_name('id') new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) # Now that ID is an IntegerField, the database raises an error if it # isn't provided. if not connection.features.supports_unspecified_pk: with self.assertRaises(DatabaseError): Author.objects.create() def test_alter_auto_field_to_char_field(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Change AutoField to CharField old_field = Author._meta.get_field('id') new_field = CharField(primary_key=True, max_length=50) new_field.set_attributes_from_name('id') new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) @isolate_apps('schema') def test_alter_auto_field_quoted_db_column(self): class Foo(Model): id = AutoField(primary_key=True, db_column='"quoted_id"') class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Foo) self.isolated_local_models = [Foo] old_field = Foo._meta.get_field('id') new_field = BigAutoField(primary_key=True) new_field.model = Foo new_field.db_column = '"quoted_id"' new_field.set_attributes_from_name('id') with connection.schema_editor() as editor: editor.alter_field(Foo, old_field, new_field, strict=True) Foo.objects.create() def test_alter_not_unique_field_to_primary_key(self): # Create the table. with connection.schema_editor() as editor: editor.create_model(Author) # Change UUIDField to primary key. old_field = Author._meta.get_field('uuid') new_field = UUIDField(primary_key=True) new_field.set_attributes_from_name('uuid') new_field.model = Author with connection.schema_editor() as editor: editor.remove_field(Author, Author._meta.get_field('id')) editor.alter_field(Author, old_field, new_field, strict=True) @isolate_apps('schema') def test_alter_primary_key_quoted_db_table(self): class Foo(Model): class Meta: app_label = 'schema' db_table = '"foo"' with connection.schema_editor() as editor: editor.create_model(Foo) self.isolated_local_models = [Foo] old_field = Foo._meta.get_field('id') new_field = BigAutoField(primary_key=True) new_field.model = Foo new_field.set_attributes_from_name('id') with connection.schema_editor() as editor: editor.alter_field(Foo, old_field, new_field, strict=True) Foo.objects.create() def test_alter_text_field(self): # Regression for "BLOB/TEXT column 'info' can't have a default value") # on MySQL. # Create the table with connection.schema_editor() as editor: editor.create_model(Note) old_field = Note._meta.get_field("info") new_field = TextField(blank=True) new_field.set_attributes_from_name("info") with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) def test_alter_text_field_to_not_null_with_default_value(self): with connection.schema_editor() as editor: editor.create_model(Note) old_field = Note._meta.get_field('address') new_field = TextField(blank=True, default='', null=False) new_field.set_attributes_from_name('address') with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) @skipUnlessDBFeature('can_defer_constraint_checks', 'can_rollback_ddl') def test_alter_fk_checks_deferred_constraints(self): """ #25492 - Altering a foreign key's structure and data in the same transaction. """ with connection.schema_editor() as editor: editor.create_model(Node) old_field = Node._meta.get_field('parent') new_field = ForeignKey(Node, CASCADE) new_field.set_attributes_from_name('parent') parent = Node.objects.create() with connection.schema_editor() as editor: # Update the parent FK to create a deferred constraint check. Node.objects.update(parent=parent) editor.alter_field(Node, old_field, new_field, strict=True) def test_alter_text_field_to_date_field(self): """ #25002 - Test conversion of text field to date field. """ with connection.schema_editor() as editor: editor.create_model(Note) Note.objects.create(info='1988-05-05') old_field = Note._meta.get_field('info') new_field = DateField(blank=True) new_field.set_attributes_from_name('info') with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) # Make sure the field isn't nullable columns = self.column_classes(Note) self.assertFalse(columns['info'][1][6]) def test_alter_text_field_to_datetime_field(self): """ #25002 - Test conversion of text field to datetime field. """ with connection.schema_editor() as editor: editor.create_model(Note) Note.objects.create(info='1988-05-05 3:16:17.4567') old_field = Note._meta.get_field('info') new_field = DateTimeField(blank=True) new_field.set_attributes_from_name('info') with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) # Make sure the field isn't nullable columns = self.column_classes(Note) self.assertFalse(columns['info'][1][6]) def test_alter_text_field_to_time_field(self): """ #25002 - Test conversion of text field to time field. """ with connection.schema_editor() as editor: editor.create_model(Note) Note.objects.create(info='3:16:17.4567') old_field = Note._meta.get_field('info') new_field = TimeField(blank=True) new_field.set_attributes_from_name('info') with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) # Make sure the field isn't nullable columns = self.column_classes(Note) self.assertFalse(columns['info'][1][6]) @skipIfDBFeature('interprets_empty_strings_as_nulls') def test_alter_textual_field_keep_null_status(self): """ Changing a field type shouldn't affect the not null status. """ with connection.schema_editor() as editor: editor.create_model(Note) with self.assertRaises(IntegrityError): Note.objects.create(info=None) old_field = Note._meta.get_field("info") new_field = CharField(max_length=50) new_field.set_attributes_from_name("info") with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) with self.assertRaises(IntegrityError): Note.objects.create(info=None) def test_alter_numeric_field_keep_null_status(self): """ Changing a field type shouldn't affect the not null status. """ with connection.schema_editor() as editor: editor.create_model(UniqueTest) with self.assertRaises(IntegrityError): UniqueTest.objects.create(year=None, slug='aaa') old_field = UniqueTest._meta.get_field("year") new_field = BigIntegerField() new_field.set_attributes_from_name("year") with connection.schema_editor() as editor: editor.alter_field(UniqueTest, old_field, new_field, strict=True) with self.assertRaises(IntegrityError): UniqueTest.objects.create(year=None, slug='bbb') def test_alter_null_to_not_null(self): """ #23609 - Tests handling of default values when altering from NULL to NOT NULL. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the field is right to begin with columns = self.column_classes(Author) self.assertTrue(columns['height'][1][6]) # Create some test data Author.objects.create(name='Not null author', height=12) Author.objects.create(name='Null author') # Verify null value self.assertEqual(Author.objects.get(name='Not null author').height, 12) self.assertIsNone(Author.objects.get(name='Null author').height) # Alter the height field to NOT NULL with default old_field = Author._meta.get_field("height") new_field = PositiveIntegerField(default=42) new_field.set_attributes_from_name("height") with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertFalse(columns['height'][1][6]) # Verify default value self.assertEqual(Author.objects.get(name='Not null author').height, 12) self.assertEqual(Author.objects.get(name='Null author').height, 42) def test_alter_charfield_to_null(self): """ #24307 - Should skip an alter statement on databases with interprets_empty_strings_as_null when changing a CharField to null. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Change the CharField to null old_field = Author._meta.get_field('name') new_field = copy(old_field) new_field.null = True with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific') def test_alter_char_field_decrease_length(self): # Create the table. with connection.schema_editor() as editor: editor.create_model(Author) Author.objects.create(name='x' * 255) # Change max_length of CharField. old_field = Author._meta.get_field('name') new_field = CharField(max_length=254) new_field.set_attributes_from_name('name') with connection.schema_editor() as editor: msg = 'value too long for type character varying(254)' with self.assertRaisesMessage(DataError, msg): editor.alter_field(Author, old_field, new_field, strict=True) @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific') def test_alter_field_with_custom_db_type(self): from django.contrib.postgres.fields import ArrayField class Foo(Model): field = ArrayField(CharField(max_length=255)) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Foo) self.isolated_local_models = [Foo] old_field = Foo._meta.get_field('field') new_field = ArrayField(CharField(max_length=16)) new_field.set_attributes_from_name('field') new_field.model = Foo with connection.schema_editor() as editor: editor.alter_field(Foo, old_field, new_field, strict=True) @isolate_apps('schema') @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific') def test_alter_array_field_decrease_base_field_length(self): from django.contrib.postgres.fields import ArrayField class ArrayModel(Model): field = ArrayField(CharField(max_length=16)) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(ArrayModel) self.isolated_local_models = [ArrayModel] ArrayModel.objects.create(field=['x' * 16]) old_field = ArrayModel._meta.get_field('field') new_field = ArrayField(CharField(max_length=15)) new_field.set_attributes_from_name('field') new_field.model = ArrayModel with connection.schema_editor() as editor: msg = 'value too long for type character varying(15)' with self.assertRaisesMessage(DataError, msg): editor.alter_field(ArrayModel, old_field, new_field, strict=True) @isolate_apps('schema') @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific') def test_alter_array_field_decrease_nested_base_field_length(self): from django.contrib.postgres.fields import ArrayField class ArrayModel(Model): field = ArrayField(ArrayField(CharField(max_length=16))) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(ArrayModel) self.isolated_local_models = [ArrayModel] ArrayModel.objects.create(field=[['x' * 16]]) old_field = ArrayModel._meta.get_field('field') new_field = ArrayField(ArrayField(CharField(max_length=15))) new_field.set_attributes_from_name('field') new_field.model = ArrayModel with connection.schema_editor() as editor: msg = 'value too long for type character varying(15)' with self.assertRaisesMessage(DataError, msg): editor.alter_field(ArrayModel, old_field, new_field, strict=True) def test_alter_textfield_to_null(self): """ #24307 - Should skip an alter statement on databases with interprets_empty_strings_as_null when changing a TextField to null. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Note) # Change the TextField to null old_field = Note._meta.get_field('info') new_field = copy(old_field) new_field.null = True with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) @skipUnlessDBFeature('supports_combined_alters') def test_alter_null_to_not_null_keeping_default(self): """ #23738 - Can change a nullable field with default to non-nullable with the same default. """ # Create the table with connection.schema_editor() as editor: editor.create_model(AuthorWithDefaultHeight) # Ensure the field is right to begin with columns = self.column_classes(AuthorWithDefaultHeight) self.assertTrue(columns['height'][1][6]) # Alter the height field to NOT NULL keeping the previous default old_field = AuthorWithDefaultHeight._meta.get_field("height") new_field = PositiveIntegerField(default=42) new_field.set_attributes_from_name("height") with connection.schema_editor() as editor: editor.alter_field(AuthorWithDefaultHeight, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(AuthorWithDefaultHeight) self.assertFalse(columns['height'][1][6]) @skipUnlessDBFeature('supports_foreign_keys') def test_alter_fk(self): """ Tests altering of FKs """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the field is right to begin with columns = self.column_classes(Book) self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField']) self.assertForeignKeyExists(Book, 'author_id', 'schema_author') # Alter the FK old_field = Book._meta.get_field("author") new_field = ForeignKey(Author, CASCADE, editable=False) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Book) self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField']) self.assertForeignKeyExists(Book, 'author_id', 'schema_author') @skipUnlessDBFeature('supports_foreign_keys') def test_alter_to_fk(self): """ #24447 - Tests adding a FK constraint for an existing column """ class LocalBook(Model): author = IntegerField() title = CharField(max_length=100, db_index=True) pub_date = DateTimeField() class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalBook] # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(LocalBook) # Ensure no FK constraint exists constraints = self.get_constraints(LocalBook._meta.db_table) for details in constraints.values(): if details['foreign_key']: self.fail('Found an unexpected FK constraint to %s' % details['columns']) old_field = LocalBook._meta.get_field("author") new_field = ForeignKey(Author, CASCADE) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(LocalBook, old_field, new_field, strict=True) self.assertForeignKeyExists(LocalBook, 'author_id', 'schema_author') @skipUnlessDBFeature('supports_foreign_keys') def test_alter_o2o_to_fk(self): """ #24163 - Tests altering of OneToOneField to ForeignKey """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithO2O) # Ensure the field is right to begin with columns = self.column_classes(BookWithO2O) self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField']) # Ensure the field is unique author = Author.objects.create(name="Joe") BookWithO2O.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now()) with self.assertRaises(IntegrityError): BookWithO2O.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now()) BookWithO2O.objects.all().delete() self.assertForeignKeyExists(BookWithO2O, 'author_id', 'schema_author') # Alter the OneToOneField to ForeignKey old_field = BookWithO2O._meta.get_field("author") new_field = ForeignKey(Author, CASCADE) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(BookWithO2O, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Book) self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField']) # Ensure the field is not unique anymore Book.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now()) Book.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now()) self.assertForeignKeyExists(Book, 'author_id', 'schema_author') @skipUnlessDBFeature('supports_foreign_keys') def test_alter_fk_to_o2o(self): """ #24163 - Tests altering of ForeignKey to OneToOneField """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the field is right to begin with columns = self.column_classes(Book) self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField']) # Ensure the field is not unique author = Author.objects.create(name="Joe") Book.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now()) Book.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now()) Book.objects.all().delete() self.assertForeignKeyExists(Book, 'author_id', 'schema_author') # Alter the ForeignKey to OneToOneField old_field = Book._meta.get_field("author") new_field = OneToOneField(Author, CASCADE) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(BookWithO2O) self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField']) # Ensure the field is unique now BookWithO2O.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now()) with self.assertRaises(IntegrityError): BookWithO2O.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now()) self.assertForeignKeyExists(BookWithO2O, 'author_id', 'schema_author') def test_alter_field_fk_to_o2o(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) expected_fks = 1 if connection.features.supports_foreign_keys else 0 expected_indexes = 1 if connection.features.indexes_foreign_keys else 0 # Check the index is right to begin with. counts = self.get_constraints_count( Book._meta.db_table, Book._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) self.assertEqual( counts, {'fks': expected_fks, 'uniques': 0, 'indexes': expected_indexes}, ) old_field = Book._meta.get_field('author') new_field = OneToOneField(Author, CASCADE) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) counts = self.get_constraints_count( Book._meta.db_table, Book._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) # The index on ForeignKey is replaced with a unique constraint for OneToOneField. self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0}) def test_alter_field_fk_keeps_index(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) expected_fks = 1 if connection.features.supports_foreign_keys else 0 expected_indexes = 1 if connection.features.indexes_foreign_keys else 0 # Check the index is right to begin with. counts = self.get_constraints_count( Book._meta.db_table, Book._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) self.assertEqual( counts, {'fks': expected_fks, 'uniques': 0, 'indexes': expected_indexes}, ) old_field = Book._meta.get_field('author') # on_delete changed from CASCADE. new_field = ForeignKey(Author, PROTECT) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) counts = self.get_constraints_count( Book._meta.db_table, Book._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) # The index remains. self.assertEqual( counts, {'fks': expected_fks, 'uniques': 0, 'indexes': expected_indexes}, ) def test_alter_field_o2o_to_fk(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithO2O) expected_fks = 1 if connection.features.supports_foreign_keys else 0 # Check the unique constraint is right to begin with. counts = self.get_constraints_count( BookWithO2O._meta.db_table, BookWithO2O._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0}) old_field = BookWithO2O._meta.get_field('author') new_field = ForeignKey(Author, CASCADE) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.alter_field(BookWithO2O, old_field, new_field, strict=True) counts = self.get_constraints_count( BookWithO2O._meta.db_table, BookWithO2O._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) # The unique constraint on OneToOneField is replaced with an index for ForeignKey. self.assertEqual(counts, {'fks': expected_fks, 'uniques': 0, 'indexes': 1}) def test_alter_field_o2o_keeps_unique(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithO2O) expected_fks = 1 if connection.features.supports_foreign_keys else 0 # Check the unique constraint is right to begin with. counts = self.get_constraints_count( BookWithO2O._meta.db_table, BookWithO2O._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0}) old_field = BookWithO2O._meta.get_field('author') # on_delete changed from CASCADE. new_field = OneToOneField(Author, PROTECT) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.alter_field(BookWithO2O, old_field, new_field, strict=True) counts = self.get_constraints_count( BookWithO2O._meta.db_table, BookWithO2O._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) # The unique constraint remains. self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0}) @skipUnlessDBFeature('ignores_table_name_case') def test_alter_db_table_case(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Alter the case of the table old_table_name = Author._meta.db_table with connection.schema_editor() as editor: editor.alter_db_table(Author, old_table_name, old_table_name.upper()) def test_alter_implicit_id_to_explicit(self): """ Should be able to convert an implicit "id" field to an explicit "id" primary key field. """ with connection.schema_editor() as editor: editor.create_model(Author) old_field = Author._meta.get_field("id") new_field = AutoField(primary_key=True) new_field.set_attributes_from_name("id") new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) # This will fail if DROP DEFAULT is inadvertently executed on this # field which drops the id sequence, at least on PostgreSQL. Author.objects.create(name='Foo') Author.objects.create(name='Bar') def test_alter_autofield_pk_to_bigautofield_pk_sequence_owner(self): """ Converting an implicit PK to BigAutoField(primary_key=True) should keep a sequence owner on PostgreSQL. """ with connection.schema_editor() as editor: editor.create_model(Author) old_field = Author._meta.get_field('id') new_field = BigAutoField(primary_key=True) new_field.set_attributes_from_name('id') new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) Author.objects.create(name='Foo', pk=1) with connection.cursor() as cursor: sequence_reset_sqls = connection.ops.sequence_reset_sql(no_style(), [Author]) if sequence_reset_sqls: cursor.execute(sequence_reset_sqls[0]) # Fail on PostgreSQL if sequence is missing an owner. self.assertIsNotNone(Author.objects.create(name='Bar')) def test_alter_autofield_pk_to_smallautofield_pk_sequence_owner(self): """ Converting an implicit PK to SmallAutoField(primary_key=True) should keep a sequence owner on PostgreSQL. """ with connection.schema_editor() as editor: editor.create_model(Author) old_field = Author._meta.get_field('id') new_field = SmallAutoField(primary_key=True) new_field.set_attributes_from_name('id') new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) Author.objects.create(name='Foo', pk=1) with connection.cursor() as cursor: sequence_reset_sqls = connection.ops.sequence_reset_sql(no_style(), [Author]) if sequence_reset_sqls: cursor.execute(sequence_reset_sqls[0]) # Fail on PostgreSQL if sequence is missing an owner. self.assertIsNotNone(Author.objects.create(name='Bar')) def test_alter_int_pk_to_autofield_pk(self): """ Should be able to rename an IntegerField(primary_key=True) to AutoField(primary_key=True). """ with connection.schema_editor() as editor: editor.create_model(IntegerPK) old_field = IntegerPK._meta.get_field('i') new_field = AutoField(primary_key=True) new_field.model = IntegerPK new_field.set_attributes_from_name('i') with connection.schema_editor() as editor: editor.alter_field(IntegerPK, old_field, new_field, strict=True) # A model representing the updated model. class IntegerPKToAutoField(Model): i = AutoField(primary_key=True) j = IntegerField(unique=True) class Meta: app_label = 'schema' apps = new_apps db_table = IntegerPK._meta.db_table # An id (i) is generated by the database. obj = IntegerPKToAutoField.objects.create(j=1) self.assertIsNotNone(obj.i) def test_alter_int_pk_to_bigautofield_pk(self): """ Should be able to rename an IntegerField(primary_key=True) to BigAutoField(primary_key=True). """ with connection.schema_editor() as editor: editor.create_model(IntegerPK) old_field = IntegerPK._meta.get_field('i') new_field = BigAutoField(primary_key=True) new_field.model = IntegerPK new_field.set_attributes_from_name('i') with connection.schema_editor() as editor: editor.alter_field(IntegerPK, old_field, new_field, strict=True) # A model representing the updated model. class IntegerPKToBigAutoField(Model): i = BigAutoField(primary_key=True) j = IntegerField(unique=True) class Meta: app_label = 'schema' apps = new_apps db_table = IntegerPK._meta.db_table # An id (i) is generated by the database. obj = IntegerPKToBigAutoField.objects.create(j=1) self.assertIsNotNone(obj.i) @isolate_apps('schema') def test_alter_smallint_pk_to_smallautofield_pk(self): """ Should be able to rename an SmallIntegerField(primary_key=True) to SmallAutoField(primary_key=True). """ class SmallIntegerPK(Model): i = SmallIntegerField(primary_key=True) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(SmallIntegerPK) self.isolated_local_models = [SmallIntegerPK] old_field = SmallIntegerPK._meta.get_field('i') new_field = SmallAutoField(primary_key=True) new_field.model = SmallIntegerPK new_field.set_attributes_from_name('i') with connection.schema_editor() as editor: editor.alter_field(SmallIntegerPK, old_field, new_field, strict=True) def test_alter_int_pk_to_int_unique(self): """ Should be able to rename an IntegerField(primary_key=True) to IntegerField(unique=True). """ with connection.schema_editor() as editor: editor.create_model(IntegerPK) # Delete the old PK old_field = IntegerPK._meta.get_field('i') new_field = IntegerField(unique=True) new_field.model = IntegerPK new_field.set_attributes_from_name('i') with connection.schema_editor() as editor: editor.alter_field(IntegerPK, old_field, new_field, strict=True) # The primary key constraint is gone. Result depends on database: # 'id' for SQLite, None for others (must not be 'i'). self.assertIn(self.get_primary_key(IntegerPK._meta.db_table), ('id', None)) # Set up a model class as it currently stands. The original IntegerPK # class is now out of date and some backends make use of the whole # model class when modifying a field (such as sqlite3 when remaking a # table) so an outdated model class leads to incorrect results. class Transitional(Model): i = IntegerField(unique=True) j = IntegerField(unique=True) class Meta: app_label = 'schema' apps = new_apps db_table = 'INTEGERPK' # model requires a new PK old_field = Transitional._meta.get_field('j') new_field = IntegerField(primary_key=True) new_field.model = Transitional new_field.set_attributes_from_name('j') with connection.schema_editor() as editor: editor.alter_field(Transitional, old_field, new_field, strict=True) # Create a model class representing the updated model. class IntegerUnique(Model): i = IntegerField(unique=True) j = IntegerField(primary_key=True) class Meta: app_label = 'schema' apps = new_apps db_table = 'INTEGERPK' # Ensure unique constraint works. IntegerUnique.objects.create(i=1, j=1) with self.assertRaises(IntegrityError): IntegerUnique.objects.create(i=1, j=2) def test_rename(self): """ Tests simple altering of fields """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the field is right to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField']) self.assertNotIn("display_name", columns) # Alter the name field's name old_field = Author._meta.get_field("name") new_field = CharField(max_length=254) new_field.set_attributes_from_name("display_name") with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['display_name'][0], connection.features.introspected_field_types['CharField']) self.assertNotIn("name", columns) @isolate_apps('schema') def test_rename_referenced_field(self): class Author(Model): name = CharField(max_length=255, unique=True) class Meta: app_label = 'schema' class Book(Model): author = ForeignKey(Author, CASCADE, to_field='name') class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) new_field = CharField(max_length=255, unique=True) new_field.set_attributes_from_name('renamed') with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor: editor.alter_field(Author, Author._meta.get_field('name'), new_field) # Ensure the foreign key reference was updated. self.assertForeignKeyExists(Book, 'author_id', 'schema_author', 'renamed') @skipIfDBFeature('interprets_empty_strings_as_nulls') def test_rename_keep_null_status(self): """ Renaming a field shouldn't affect the not null status. """ with connection.schema_editor() as editor: editor.create_model(Note) with self.assertRaises(IntegrityError): Note.objects.create(info=None) old_field = Note._meta.get_field("info") new_field = TextField() new_field.set_attributes_from_name("detail_info") with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) columns = self.column_classes(Note) self.assertEqual(columns['detail_info'][0], "TextField") self.assertNotIn("info", columns) with self.assertRaises(IntegrityError): NoteRename.objects.create(detail_info=None) def _test_m2m_create(self, M2MFieldClass): """ Tests M2M fields on models during creation """ class LocalBookWithM2M(Model): author = ForeignKey(Author, CASCADE) title = CharField(max_length=100, db_index=True) pub_date = DateTimeField() tags = M2MFieldClass("TagM2MTest", related_name="books") class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalBookWithM2M] # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(TagM2MTest) editor.create_model(LocalBookWithM2M) # Ensure there is now an m2m table there columns = self.column_classes(LocalBookWithM2M._meta.get_field("tags").remote_field.through) self.assertEqual(columns['tagm2mtest_id'][0], connection.features.introspected_field_types['IntegerField']) def test_m2m_create(self): self._test_m2m_create(ManyToManyField) def test_m2m_create_custom(self): self._test_m2m_create(CustomManyToManyField) def test_m2m_create_inherited(self): self._test_m2m_create(InheritedManyToManyField) def _test_m2m_create_through(self, M2MFieldClass): """ Tests M2M fields on models during creation with through models """ class LocalTagThrough(Model): book = ForeignKey("schema.LocalBookWithM2MThrough", CASCADE) tag = ForeignKey("schema.TagM2MTest", CASCADE) class Meta: app_label = 'schema' apps = new_apps class LocalBookWithM2MThrough(Model): tags = M2MFieldClass("TagM2MTest", related_name="books", through=LocalTagThrough) class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalTagThrough, LocalBookWithM2MThrough] # Create the tables with connection.schema_editor() as editor: editor.create_model(LocalTagThrough) editor.create_model(TagM2MTest) editor.create_model(LocalBookWithM2MThrough) # Ensure there is now an m2m table there columns = self.column_classes(LocalTagThrough) self.assertEqual(columns['book_id'][0], connection.features.introspected_field_types['IntegerField']) self.assertEqual(columns['tag_id'][0], connection.features.introspected_field_types['IntegerField']) def test_m2m_create_through(self): self._test_m2m_create_through(ManyToManyField) def test_m2m_create_through_custom(self): self._test_m2m_create_through(CustomManyToManyField) def test_m2m_create_through_inherited(self): self._test_m2m_create_through(InheritedManyToManyField) def _test_m2m(self, M2MFieldClass): """ Tests adding/removing M2M fields on models """ class LocalAuthorWithM2M(Model): name = CharField(max_length=255) class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalAuthorWithM2M] # Create the tables with connection.schema_editor() as editor: editor.create_model(LocalAuthorWithM2M) editor.create_model(TagM2MTest) # Create an M2M field new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors") new_field.contribute_to_class(LocalAuthorWithM2M, "tags") # Ensure there's no m2m table there with self.assertRaises(DatabaseError): self.column_classes(new_field.remote_field.through) # Add the field with connection.schema_editor() as editor: editor.add_field(LocalAuthorWithM2M, new_field) # Ensure there is now an m2m table there columns = self.column_classes(new_field.remote_field.through) self.assertEqual(columns['tagm2mtest_id'][0], connection.features.introspected_field_types['IntegerField']) # "Alter" the field. This should not rename the DB table to itself. with connection.schema_editor() as editor: editor.alter_field(LocalAuthorWithM2M, new_field, new_field, strict=True) # Remove the M2M table again with connection.schema_editor() as editor: editor.remove_field(LocalAuthorWithM2M, new_field) # Ensure there's no m2m table there with self.assertRaises(DatabaseError): self.column_classes(new_field.remote_field.through) # Make sure the model state is coherent with the table one now that # we've removed the tags field. opts = LocalAuthorWithM2M._meta opts.local_many_to_many.remove(new_field) del new_apps.all_models['schema'][new_field.remote_field.through._meta.model_name] opts._expire_cache() def test_m2m(self): self._test_m2m(ManyToManyField) def test_m2m_custom(self): self._test_m2m(CustomManyToManyField) def test_m2m_inherited(self): self._test_m2m(InheritedManyToManyField) def _test_m2m_through_alter(self, M2MFieldClass): """ Tests altering M2Ms with explicit through models (should no-op) """ class LocalAuthorTag(Model): author = ForeignKey("schema.LocalAuthorWithM2MThrough", CASCADE) tag = ForeignKey("schema.TagM2MTest", CASCADE) class Meta: app_label = 'schema' apps = new_apps class LocalAuthorWithM2MThrough(Model): name = CharField(max_length=255) tags = M2MFieldClass("schema.TagM2MTest", related_name="authors", through=LocalAuthorTag) class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalAuthorTag, LocalAuthorWithM2MThrough] # Create the tables with connection.schema_editor() as editor: editor.create_model(LocalAuthorTag) editor.create_model(LocalAuthorWithM2MThrough) editor.create_model(TagM2MTest) # Ensure the m2m table is there self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3) # "Alter" the field's blankness. This should not actually do anything. old_field = LocalAuthorWithM2MThrough._meta.get_field("tags") new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors", through=LocalAuthorTag) new_field.contribute_to_class(LocalAuthorWithM2MThrough, "tags") with connection.schema_editor() as editor: editor.alter_field(LocalAuthorWithM2MThrough, old_field, new_field, strict=True) # Ensure the m2m table is still there self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3) def test_m2m_through_alter(self): self._test_m2m_through_alter(ManyToManyField) def test_m2m_through_alter_custom(self): self._test_m2m_through_alter(CustomManyToManyField) def test_m2m_through_alter_inherited(self): self._test_m2m_through_alter(InheritedManyToManyField) def _test_m2m_repoint(self, M2MFieldClass): """ Tests repointing M2M fields """ class LocalBookWithM2M(Model): author = ForeignKey(Author, CASCADE) title = CharField(max_length=100, db_index=True) pub_date = DateTimeField() tags = M2MFieldClass("TagM2MTest", related_name="books") class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalBookWithM2M] # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(LocalBookWithM2M) editor.create_model(TagM2MTest) editor.create_model(UniqueTest) # Ensure the M2M exists and points to TagM2MTest if connection.features.supports_foreign_keys: self.assertForeignKeyExists( LocalBookWithM2M._meta.get_field("tags").remote_field.through, 'tagm2mtest_id', 'schema_tagm2mtest', ) # Repoint the M2M old_field = LocalBookWithM2M._meta.get_field("tags") new_field = M2MFieldClass(UniqueTest) new_field.contribute_to_class(LocalBookWithM2M, "uniques") with connection.schema_editor() as editor: editor.alter_field(LocalBookWithM2M, old_field, new_field, strict=True) # Ensure old M2M is gone with self.assertRaises(DatabaseError): self.column_classes(LocalBookWithM2M._meta.get_field("tags").remote_field.through) # This model looks like the new model and is used for teardown. opts = LocalBookWithM2M._meta opts.local_many_to_many.remove(old_field) # Ensure the new M2M exists and points to UniqueTest if connection.features.supports_foreign_keys: self.assertForeignKeyExists(new_field.remote_field.through, 'uniquetest_id', 'schema_uniquetest') def test_m2m_repoint(self): self._test_m2m_repoint(ManyToManyField) def test_m2m_repoint_custom(self): self._test_m2m_repoint(CustomManyToManyField) def test_m2m_repoint_inherited(self): self._test_m2m_repoint(InheritedManyToManyField) @isolate_apps('schema') def test_m2m_rename_field_in_target_model(self): class LocalTagM2MTest(Model): title = CharField(max_length=255) class Meta: app_label = 'schema' class LocalM2M(Model): tags = ManyToManyField(LocalTagM2MTest) class Meta: app_label = 'schema' # Create the tables. with connection.schema_editor() as editor: editor.create_model(LocalM2M) editor.create_model(LocalTagM2MTest) self.isolated_local_models = [LocalM2M, LocalTagM2MTest] # Ensure the m2m table is there. self.assertEqual(len(self.column_classes(LocalM2M)), 1) # Alter a field in LocalTagM2MTest. old_field = LocalTagM2MTest._meta.get_field('title') new_field = CharField(max_length=254) new_field.contribute_to_class(LocalTagM2MTest, 'title1') # @isolate_apps() and inner models are needed to have the model # relations populated, otherwise this doesn't act as a regression test. self.assertEqual(len(new_field.model._meta.related_objects), 1) with connection.schema_editor() as editor: editor.alter_field(LocalTagM2MTest, old_field, new_field, strict=True) # Ensure the m2m table is still there. self.assertEqual(len(self.column_classes(LocalM2M)), 1) @skipUnlessDBFeature('supports_column_check_constraints', 'can_introspect_check_constraints') def test_check_constraints(self): """ Tests creating/deleting CHECK constraints """ # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the constraint exists constraints = self.get_constraints(Author._meta.db_table) if not any(details['columns'] == ['height'] and details['check'] for details in constraints.values()): self.fail("No check constraint for height found") # Alter the column to remove it old_field = Author._meta.get_field("height") new_field = IntegerField(null=True, blank=True) new_field.set_attributes_from_name("height") with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) constraints = self.get_constraints(Author._meta.db_table) for details in constraints.values(): if details['columns'] == ["height"] and details['check']: self.fail("Check constraint for height found") # Alter the column to re-add it new_field2 = Author._meta.get_field("height") with connection.schema_editor() as editor: editor.alter_field(Author, new_field, new_field2, strict=True) constraints = self.get_constraints(Author._meta.db_table) if not any(details['columns'] == ['height'] and details['check'] for details in constraints.values()): self.fail("No check constraint for height found") @skipUnlessDBFeature('supports_column_check_constraints', 'can_introspect_check_constraints') def test_remove_field_check_does_not_remove_meta_constraints(self): with connection.schema_editor() as editor: editor.create_model(Author) # Add the custom check constraint constraint = CheckConstraint(check=Q(height__gte=0), name='author_height_gte_0_check') custom_constraint_name = constraint.name Author._meta.constraints = [constraint] with connection.schema_editor() as editor: editor.add_constraint(Author, constraint) # Ensure the constraints exist constraints = self.get_constraints(Author._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['height'] and details['check'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Alter the column to remove field check old_field = Author._meta.get_field('height') new_field = IntegerField(null=True, blank=True) new_field.set_attributes_from_name('height') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) constraints = self.get_constraints(Author._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['height'] and details['check'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 0) # Alter the column to re-add field check new_field2 = Author._meta.get_field('height') with connection.schema_editor() as editor: editor.alter_field(Author, new_field, new_field2, strict=True) constraints = self.get_constraints(Author._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['height'] and details['check'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Drop the check constraint with connection.schema_editor() as editor: Author._meta.constraints = [] editor.remove_constraint(Author, constraint) def test_unique(self): """ Tests removing and adding unique constraints to a single column. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) # Ensure the field is unique to begin with Tag.objects.create(title="foo", slug="foo") with self.assertRaises(IntegrityError): Tag.objects.create(title="bar", slug="foo") Tag.objects.all().delete() # Alter the slug field to be non-unique old_field = Tag._meta.get_field("slug") new_field = SlugField(unique=False) new_field.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_field(Tag, old_field, new_field, strict=True) # Ensure the field is no longer unique Tag.objects.create(title="foo", slug="foo") Tag.objects.create(title="bar", slug="foo") Tag.objects.all().delete() # Alter the slug field to be unique new_field2 = SlugField(unique=True) new_field2.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_field(Tag, new_field, new_field2, strict=True) # Ensure the field is unique again Tag.objects.create(title="foo", slug="foo") with self.assertRaises(IntegrityError): Tag.objects.create(title="bar", slug="foo") Tag.objects.all().delete() # Rename the field new_field3 = SlugField(unique=True) new_field3.set_attributes_from_name("slug2") with connection.schema_editor() as editor: editor.alter_field(Tag, new_field2, new_field3, strict=True) # Ensure the field is still unique TagUniqueRename.objects.create(title="foo", slug2="foo") with self.assertRaises(IntegrityError): TagUniqueRename.objects.create(title="bar", slug2="foo") Tag.objects.all().delete() def test_unique_name_quoting(self): old_table_name = TagUniqueRename._meta.db_table try: with connection.schema_editor() as editor: editor.create_model(TagUniqueRename) editor.alter_db_table(TagUniqueRename, old_table_name, 'unique-table') TagUniqueRename._meta.db_table = 'unique-table' # This fails if the unique index name isn't quoted. editor.alter_unique_together(TagUniqueRename, [], (('title', 'slug2'),)) finally: TagUniqueRename._meta.db_table = old_table_name @isolate_apps('schema') @skipUnlessDBFeature('supports_foreign_keys') def test_unique_no_unnecessary_fk_drops(self): """ If AlterField isn't selective about dropping foreign key constraints when modifying a field with a unique constraint, the AlterField incorrectly drops and recreates the Book.author foreign key even though it doesn't restrict the field being changed (#29193). """ class Author(Model): name = CharField(max_length=254, unique=True) class Meta: app_label = 'schema' class Book(Model): author = ForeignKey(Author, CASCADE) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) new_field = CharField(max_length=255, unique=True) new_field.model = Author new_field.set_attributes_from_name('name') with self.assertLogs('django.db.backends.schema', 'DEBUG') as cm: with connection.schema_editor() as editor: editor.alter_field(Author, Author._meta.get_field('name'), new_field) # One SQL statement is executed to alter the field. self.assertEqual(len(cm.records), 1) @isolate_apps('schema') def test_unique_and_reverse_m2m(self): """ AlterField can modify a unique field when there's a reverse M2M relation on the model. """ class Tag(Model): title = CharField(max_length=255) slug = SlugField(unique=True) class Meta: app_label = 'schema' class Book(Model): tags = ManyToManyField(Tag, related_name='books') class Meta: app_label = 'schema' self.isolated_local_models = [Book._meta.get_field('tags').remote_field.through] with connection.schema_editor() as editor: editor.create_model(Tag) editor.create_model(Book) new_field = SlugField(max_length=75, unique=True) new_field.model = Tag new_field.set_attributes_from_name('slug') with self.assertLogs('django.db.backends.schema', 'DEBUG') as cm: with connection.schema_editor() as editor: editor.alter_field(Tag, Tag._meta.get_field('slug'), new_field) # One SQL statement is executed to alter the field. self.assertEqual(len(cm.records), 1) # Ensure that the field is still unique. Tag.objects.create(title='foo', slug='foo') with self.assertRaises(IntegrityError): Tag.objects.create(title='bar', slug='foo') @skipUnlessDBFeature('allows_multiple_constraints_on_same_fields') def test_remove_field_unique_does_not_remove_meta_constraints(self): with connection.schema_editor() as editor: editor.create_model(AuthorWithUniqueName) # Add the custom unique constraint constraint = UniqueConstraint(fields=['name'], name='author_name_uniq') custom_constraint_name = constraint.name AuthorWithUniqueName._meta.constraints = [constraint] with connection.schema_editor() as editor: editor.add_constraint(AuthorWithUniqueName, constraint) # Ensure the constraints exist constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Alter the column to remove field uniqueness old_field = AuthorWithUniqueName._meta.get_field('name') new_field = CharField(max_length=255) new_field.set_attributes_from_name('name') with connection.schema_editor() as editor: editor.alter_field(AuthorWithUniqueName, old_field, new_field, strict=True) constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 0) # Alter the column to re-add field uniqueness new_field2 = AuthorWithUniqueName._meta.get_field('name') with connection.schema_editor() as editor: editor.alter_field(AuthorWithUniqueName, new_field, new_field2, strict=True) constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Drop the unique constraint with connection.schema_editor() as editor: AuthorWithUniqueName._meta.constraints = [] editor.remove_constraint(AuthorWithUniqueName, constraint) def test_unique_together(self): """ Tests removing and adding unique_together constraints on a model. """ # Create the table with connection.schema_editor() as editor: editor.create_model(UniqueTest) # Ensure the fields are unique to begin with UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.create(year=2011, slug="foo") UniqueTest.objects.create(year=2011, slug="bar") with self.assertRaises(IntegrityError): UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.all().delete() # Alter the model to its non-unique-together companion with connection.schema_editor() as editor: editor.alter_unique_together(UniqueTest, UniqueTest._meta.unique_together, []) # Ensure the fields are no longer unique UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.all().delete() # Alter it back new_field2 = SlugField(unique=True) new_field2.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_unique_together(UniqueTest, [], UniqueTest._meta.unique_together) # Ensure the fields are unique again UniqueTest.objects.create(year=2012, slug="foo") with self.assertRaises(IntegrityError): UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.all().delete() def test_unique_together_with_fk(self): """ Tests removing and adding unique_together constraints that include a foreign key. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the fields are unique to begin with self.assertEqual(Book._meta.unique_together, ()) # Add the unique_together constraint with connection.schema_editor() as editor: editor.alter_unique_together(Book, [], [['author', 'title']]) # Alter it back with connection.schema_editor() as editor: editor.alter_unique_together(Book, [['author', 'title']], []) def test_unique_together_with_fk_with_existing_index(self): """ Tests removing and adding unique_together constraints that include a foreign key, where the foreign key is added after the model is created. """ # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithoutAuthor) new_field = ForeignKey(Author, CASCADE) new_field.set_attributes_from_name('author') editor.add_field(BookWithoutAuthor, new_field) # Ensure the fields aren't unique to begin with self.assertEqual(Book._meta.unique_together, ()) # Add the unique_together constraint with connection.schema_editor() as editor: editor.alter_unique_together(Book, [], [['author', 'title']]) # Alter it back with connection.schema_editor() as editor: editor.alter_unique_together(Book, [['author', 'title']], []) @skipUnlessDBFeature('allows_multiple_constraints_on_same_fields') def test_remove_unique_together_does_not_remove_meta_constraints(self): with connection.schema_editor() as editor: editor.create_model(AuthorWithUniqueNameAndBirthday) # Add the custom unique constraint constraint = UniqueConstraint(fields=['name', 'birthday'], name='author_name_birthday_uniq') custom_constraint_name = constraint.name AuthorWithUniqueNameAndBirthday._meta.constraints = [constraint] with connection.schema_editor() as editor: editor.add_constraint(AuthorWithUniqueNameAndBirthday, constraint) # Ensure the constraints exist constraints = self.get_constraints(AuthorWithUniqueNameAndBirthday._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Remove unique together unique_together = AuthorWithUniqueNameAndBirthday._meta.unique_together with connection.schema_editor() as editor: editor.alter_unique_together(AuthorWithUniqueNameAndBirthday, unique_together, []) constraints = self.get_constraints(AuthorWithUniqueNameAndBirthday._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 0) # Re-add unique together with connection.schema_editor() as editor: editor.alter_unique_together(AuthorWithUniqueNameAndBirthday, [], unique_together) constraints = self.get_constraints(AuthorWithUniqueNameAndBirthday._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Drop the unique constraint with connection.schema_editor() as editor: AuthorWithUniqueNameAndBirthday._meta.constraints = [] editor.remove_constraint(AuthorWithUniqueNameAndBirthday, constraint) def test_unique_constraint(self): with connection.schema_editor() as editor: editor.create_model(Author) constraint = UniqueConstraint(fields=['name'], name='name_uq') # Add constraint. with connection.schema_editor() as editor: editor.add_constraint(Author, constraint) sql = constraint.create_sql(Author, editor) table = Author._meta.db_table self.assertIs(sql.references_table(table), True) self.assertIs(sql.references_column(table, 'name'), True) # Remove constraint. with connection.schema_editor() as editor: editor.remove_constraint(Author, constraint) self.assertNotIn(constraint.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_unique_constraint(self): with connection.schema_editor() as editor: editor.create_model(Author) constraint = UniqueConstraint(Upper('name').desc(), name='func_upper_uq') # Add constraint. with connection.schema_editor() as editor: editor.add_constraint(Author, constraint) sql = constraint.create_sql(Author, editor) table = Author._meta.db_table constraints = self.get_constraints(table) if connection.features.supports_index_column_ordering: self.assertIndexOrder(table, constraint.name, ['DESC']) self.assertIn(constraint.name, constraints) self.assertIs(constraints[constraint.name]['unique'], True) # SQL contains a database function. self.assertIs(sql.references_column(table, 'name'), True) self.assertIn('UPPER(%s)' % editor.quote_name('name'), str(sql)) # Remove constraint. with connection.schema_editor() as editor: editor.remove_constraint(Author, constraint) self.assertNotIn(constraint.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_composite_func_unique_constraint(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithSlug) constraint = UniqueConstraint( Upper('title'), Lower('slug'), name='func_upper_lower_unq', ) # Add constraint. with connection.schema_editor() as editor: editor.add_constraint(BookWithSlug, constraint) sql = constraint.create_sql(BookWithSlug, editor) table = BookWithSlug._meta.db_table constraints = self.get_constraints(table) self.assertIn(constraint.name, constraints) self.assertIs(constraints[constraint.name]['unique'], True) # SQL contains database functions. self.assertIs(sql.references_column(table, 'title'), True) self.assertIs(sql.references_column(table, 'slug'), True) sql = str(sql) self.assertIn('UPPER(%s)' % editor.quote_name('title'), sql) self.assertIn('LOWER(%s)' % editor.quote_name('slug'), sql) self.assertLess(sql.index('UPPER'), sql.index('LOWER')) # Remove constraint. with connection.schema_editor() as editor: editor.remove_constraint(BookWithSlug, constraint) self.assertNotIn(constraint.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_unique_constraint_field_and_expression(self): with connection.schema_editor() as editor: editor.create_model(Author) constraint = UniqueConstraint( F('height').desc(), 'uuid', Lower('name').asc(), name='func_f_lower_field_unq', ) # Add constraint. with connection.schema_editor() as editor: editor.add_constraint(Author, constraint) sql = constraint.create_sql(Author, editor) table = Author._meta.db_table if connection.features.supports_index_column_ordering: self.assertIndexOrder(table, constraint.name, ['DESC', 'ASC', 'ASC']) constraints = self.get_constraints(table) self.assertIs(constraints[constraint.name]['unique'], True) self.assertEqual(len(constraints[constraint.name]['columns']), 3) self.assertEqual(constraints[constraint.name]['columns'][1], 'uuid') # SQL contains database functions and columns. self.assertIs(sql.references_column(table, 'height'), True) self.assertIs(sql.references_column(table, 'name'), True) self.assertIs(sql.references_column(table, 'uuid'), True) self.assertIn('LOWER(%s)' % editor.quote_name('name'), str(sql)) # Remove constraint. with connection.schema_editor() as editor: editor.remove_constraint(Author, constraint) self.assertNotIn(constraint.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes', 'supports_partial_indexes') def test_func_unique_constraint_partial(self): with connection.schema_editor() as editor: editor.create_model(Author) constraint = UniqueConstraint( Upper('name'), name='func_upper_cond_weight_uq', condition=Q(weight__isnull=False), ) # Add constraint. with connection.schema_editor() as editor: editor.add_constraint(Author, constraint) sql = constraint.create_sql(Author, editor) table = Author._meta.db_table constraints = self.get_constraints(table) self.assertIn(constraint.name, constraints) self.assertIs(constraints[constraint.name]['unique'], True) self.assertIs(sql.references_column(table, 'name'), True) self.assertIn('UPPER(%s)' % editor.quote_name('name'), str(sql)) self.assertIn( 'WHERE %s IS NOT NULL' % editor.quote_name('weight'), str(sql), ) # Remove constraint. with connection.schema_editor() as editor: editor.remove_constraint(Author, constraint) self.assertNotIn(constraint.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes', 'supports_covering_indexes') def test_func_unique_constraint_covering(self): with connection.schema_editor() as editor: editor.create_model(Author) constraint = UniqueConstraint( Upper('name'), name='func_upper_covering_uq', include=['weight', 'height'], ) # Add constraint. with connection.schema_editor() as editor: editor.add_constraint(Author, constraint) sql = constraint.create_sql(Author, editor) table = Author._meta.db_table constraints = self.get_constraints(table) self.assertIn(constraint.name, constraints) self.assertIs(constraints[constraint.name]['unique'], True) self.assertEqual( constraints[constraint.name]['columns'], [None, 'weight', 'height'], ) self.assertIs(sql.references_column(table, 'name'), True) self.assertIs(sql.references_column(table, 'weight'), True) self.assertIs(sql.references_column(table, 'height'), True) self.assertIn('UPPER(%s)' % editor.quote_name('name'), str(sql)) self.assertIn( 'INCLUDE (%s, %s)' % ( editor.quote_name('weight'), editor.quote_name('height'), ), str(sql), ) # Remove constraint. with connection.schema_editor() as editor: editor.remove_constraint(Author, constraint) self.assertNotIn(constraint.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_unique_constraint_lookups(self): with connection.schema_editor() as editor: editor.create_model(Author) with register_lookup(CharField, Lower), register_lookup(IntegerField, Abs): constraint = UniqueConstraint( F('name__lower'), F('weight__abs'), name='func_lower_abs_lookup_uq', ) # Add constraint. with connection.schema_editor() as editor: editor.add_constraint(Author, constraint) sql = constraint.create_sql(Author, editor) table = Author._meta.db_table constraints = self.get_constraints(table) self.assertIn(constraint.name, constraints) self.assertIs(constraints[constraint.name]['unique'], True) # SQL contains columns. self.assertIs(sql.references_column(table, 'name'), True) self.assertIs(sql.references_column(table, 'weight'), True) # Remove constraint. with connection.schema_editor() as editor: editor.remove_constraint(Author, constraint) self.assertNotIn(constraint.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_unique_constraint_collate(self): collation = connection.features.test_collations.get('non_default') if not collation: self.skipTest( 'This backend does not support case-insensitive collations.' ) with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithSlug) constraint = UniqueConstraint( Collate(F('title'), collation=collation).desc(), Collate('slug', collation=collation), name='func_collate_uq', ) # Add constraint. with connection.schema_editor() as editor: editor.add_constraint(BookWithSlug, constraint) sql = constraint.create_sql(BookWithSlug, editor) table = BookWithSlug._meta.db_table constraints = self.get_constraints(table) self.assertIn(constraint.name, constraints) self.assertIs(constraints[constraint.name]['unique'], True) if connection.features.supports_index_column_ordering: self.assertIndexOrder(table, constraint.name, ['DESC', 'ASC']) # SQL contains columns and a collation. self.assertIs(sql.references_column(table, 'title'), True) self.assertIs(sql.references_column(table, 'slug'), True) self.assertIn('COLLATE %s' % editor.quote_name(collation), str(sql)) # Remove constraint. with connection.schema_editor() as editor: editor.remove_constraint(BookWithSlug, constraint) self.assertNotIn(constraint.name, self.get_constraints(table)) @skipIfDBFeature('supports_expression_indexes') def test_func_unique_constraint_unsupported(self): # UniqueConstraint is ignored on databases that don't support indexes on # expressions. with connection.schema_editor() as editor: editor.create_model(Author) constraint = UniqueConstraint(F('name'), name='func_name_uq') with connection.schema_editor() as editor, self.assertNumQueries(0): self.assertIsNone(editor.add_constraint(Author, constraint)) self.assertIsNone(editor.remove_constraint(Author, constraint)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_unique_constraint_nonexistent_field(self): constraint = UniqueConstraint(Lower('nonexistent'), name='func_nonexistent_uq') msg = ( "Cannot resolve keyword 'nonexistent' into field. Choices are: " "height, id, name, uuid, weight" ) with self.assertRaisesMessage(FieldError, msg): with connection.schema_editor() as editor: editor.add_constraint(Author, constraint) @skipUnlessDBFeature('supports_expression_indexes') def test_func_unique_constraint_nondeterministic(self): with connection.schema_editor() as editor: editor.create_model(Author) constraint = UniqueConstraint(Random(), name='func_random_uq') with connection.schema_editor() as editor: with self.assertRaises(DatabaseError): editor.add_constraint(Author, constraint) def test_index_together(self): """ Tests removing and adding index_together constraints on a model. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) # Ensure there's no index on the year/slug columns first self.assertIs( any( c["index"] for c in self.get_constraints("schema_tag").values() if c['columns'] == ["slug", "title"] ), False, ) # Alter the model to add an index with connection.schema_editor() as editor: editor.alter_index_together(Tag, [], [("slug", "title")]) # Ensure there is now an index self.assertIs( any( c["index"] for c in self.get_constraints("schema_tag").values() if c['columns'] == ["slug", "title"] ), True, ) # Alter it back new_field2 = SlugField(unique=True) new_field2.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_index_together(Tag, [("slug", "title")], []) # Ensure there's no index self.assertIs( any( c["index"] for c in self.get_constraints("schema_tag").values() if c['columns'] == ["slug", "title"] ), False, ) def test_index_together_with_fk(self): """ Tests removing and adding index_together constraints that include a foreign key. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the fields are unique to begin with self.assertEqual(Book._meta.index_together, ()) # Add the unique_together constraint with connection.schema_editor() as editor: editor.alter_index_together(Book, [], [['author', 'title']]) # Alter it back with connection.schema_editor() as editor: editor.alter_index_together(Book, [['author', 'title']], []) def test_create_index_together(self): """ Tests creating models with index_together already defined """ # Create the table with connection.schema_editor() as editor: editor.create_model(TagIndexed) # Ensure there is an index self.assertIs( any( c["index"] for c in self.get_constraints("schema_tagindexed").values() if c['columns'] == ["slug", "title"] ), True, ) @skipUnlessDBFeature('allows_multiple_constraints_on_same_fields') def test_remove_index_together_does_not_remove_meta_indexes(self): with connection.schema_editor() as editor: editor.create_model(AuthorWithIndexedNameAndBirthday) # Add the custom index index = Index(fields=['name', 'birthday'], name='author_name_birthday_idx') custom_index_name = index.name AuthorWithIndexedNameAndBirthday._meta.indexes = [index] with connection.schema_editor() as editor: editor.add_index(AuthorWithIndexedNameAndBirthday, index) # Ensure the indexes exist constraints = self.get_constraints(AuthorWithIndexedNameAndBirthday._meta.db_table) self.assertIn(custom_index_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['index'] and name != custom_index_name ] self.assertEqual(len(other_constraints), 1) # Remove index together index_together = AuthorWithIndexedNameAndBirthday._meta.index_together with connection.schema_editor() as editor: editor.alter_index_together(AuthorWithIndexedNameAndBirthday, index_together, []) constraints = self.get_constraints(AuthorWithIndexedNameAndBirthday._meta.db_table) self.assertIn(custom_index_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['index'] and name != custom_index_name ] self.assertEqual(len(other_constraints), 0) # Re-add index together with connection.schema_editor() as editor: editor.alter_index_together(AuthorWithIndexedNameAndBirthday, [], index_together) constraints = self.get_constraints(AuthorWithIndexedNameAndBirthday._meta.db_table) self.assertIn(custom_index_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['index'] and name != custom_index_name ] self.assertEqual(len(other_constraints), 1) # Drop the index with connection.schema_editor() as editor: AuthorWithIndexedNameAndBirthday._meta.indexes = [] editor.remove_index(AuthorWithIndexedNameAndBirthday, index) @isolate_apps('schema') def test_db_table(self): """ Tests renaming of the table """ class Author(Model): name = CharField(max_length=255) class Meta: app_label = 'schema' class Book(Model): author = ForeignKey(Author, CASCADE) class Meta: app_label = 'schema' # Create the table and one referring it. with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the table is there to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField']) # Alter the table with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor: editor.alter_db_table(Author, "schema_author", "schema_otherauthor") # Ensure the table is there afterwards Author._meta.db_table = "schema_otherauthor" columns = self.column_classes(Author) self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField']) # Ensure the foreign key reference was updated self.assertForeignKeyExists(Book, "author_id", "schema_otherauthor") # Alter the table again with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor: editor.alter_db_table(Author, "schema_otherauthor", "schema_author") # Ensure the table is still there Author._meta.db_table = "schema_author" columns = self.column_classes(Author) self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField']) def test_add_remove_index(self): """ Tests index addition and removal """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the table is there and has no index self.assertNotIn('title', self.get_indexes(Author._meta.db_table)) # Add the index index = Index(fields=['name'], name='author_title_idx') with connection.schema_editor() as editor: editor.add_index(Author, index) self.assertIn('name', self.get_indexes(Author._meta.db_table)) # Drop the index with connection.schema_editor() as editor: editor.remove_index(Author, index) self.assertNotIn('name', self.get_indexes(Author._meta.db_table)) def test_remove_db_index_doesnt_remove_custom_indexes(self): """ Changing db_index to False doesn't remove indexes from Meta.indexes. """ with connection.schema_editor() as editor: editor.create_model(AuthorWithIndexedName) # Ensure the table has its index self.assertIn('name', self.get_indexes(AuthorWithIndexedName._meta.db_table)) # Add the custom index index = Index(fields=['-name'], name='author_name_idx') author_index_name = index.name with connection.schema_editor() as editor: db_index_name = editor._create_index_name( table_name=AuthorWithIndexedName._meta.db_table, column_names=('name',), ) try: AuthorWithIndexedName._meta.indexes = [index] with connection.schema_editor() as editor: editor.add_index(AuthorWithIndexedName, index) old_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table) self.assertIn(author_index_name, old_constraints) self.assertIn(db_index_name, old_constraints) # Change name field to db_index=False old_field = AuthorWithIndexedName._meta.get_field('name') new_field = CharField(max_length=255) new_field.set_attributes_from_name('name') with connection.schema_editor() as editor: editor.alter_field(AuthorWithIndexedName, old_field, new_field, strict=True) new_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table) self.assertNotIn(db_index_name, new_constraints) # The index from Meta.indexes is still in the database. self.assertIn(author_index_name, new_constraints) # Drop the index with connection.schema_editor() as editor: editor.remove_index(AuthorWithIndexedName, index) finally: AuthorWithIndexedName._meta.indexes = [] def test_order_index(self): """ Indexes defined with ordering (ASC/DESC) defined on column """ with connection.schema_editor() as editor: editor.create_model(Author) # The table doesn't have an index self.assertNotIn('title', self.get_indexes(Author._meta.db_table)) index_name = 'author_name_idx' # Add the index index = Index(fields=['name', '-weight'], name=index_name) with connection.schema_editor() as editor: editor.add_index(Author, index) if connection.features.supports_index_column_ordering: self.assertIndexOrder(Author._meta.db_table, index_name, ['ASC', 'DESC']) # Drop the index with connection.schema_editor() as editor: editor.remove_index(Author, index) def test_indexes(self): """ Tests creation/altering of indexes """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the table is there and has the right index self.assertIn( "title", self.get_indexes(Book._meta.db_table), ) # Alter to remove the index old_field = Book._meta.get_field("title") new_field = CharField(max_length=100, db_index=False) new_field.set_attributes_from_name("title") with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) # Ensure the table is there and has no index self.assertNotIn( "title", self.get_indexes(Book._meta.db_table), ) # Alter to re-add the index new_field2 = Book._meta.get_field("title") with connection.schema_editor() as editor: editor.alter_field(Book, new_field, new_field2, strict=True) # Ensure the table is there and has the index again self.assertIn( "title", self.get_indexes(Book._meta.db_table), ) # Add a unique column, verify that creates an implicit index new_field3 = BookWithSlug._meta.get_field("slug") with connection.schema_editor() as editor: editor.add_field(Book, new_field3) self.assertIn( "slug", self.get_uniques(Book._meta.db_table), ) # Remove the unique, check the index goes with it new_field4 = CharField(max_length=20, unique=False) new_field4.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_field(BookWithSlug, new_field3, new_field4, strict=True) self.assertNotIn( "slug", self.get_uniques(Book._meta.db_table), ) def test_text_field_with_db_index(self): with connection.schema_editor() as editor: editor.create_model(AuthorTextFieldWithIndex) # The text_field index is present if the database supports it. assertion = self.assertIn if connection.features.supports_index_on_text_field else self.assertNotIn assertion('text_field', self.get_indexes(AuthorTextFieldWithIndex._meta.db_table)) def _index_expressions_wrappers(self): index_expression = IndexExpression() index_expression.set_wrapper_classes(connection) return ', '.join([ wrapper_cls.__qualname__ for wrapper_cls in index_expression.wrapper_classes ]) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_multiple_wrapper_references(self): index = Index(OrderBy(F('name').desc(), descending=True), name='name') msg = ( "Multiple references to %s can't be used in an indexed expression." % self._index_expressions_wrappers() ) with connection.schema_editor() as editor: with self.assertRaisesMessage(ValueError, msg): editor.add_index(Author, index) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_invalid_topmost_expressions(self): index = Index(Upper(F('name').desc()), name='name') msg = ( '%s must be topmost expressions in an indexed expression.' % self._index_expressions_wrappers() ) with connection.schema_editor() as editor: with self.assertRaisesMessage(ValueError, msg): editor.add_index(Author, index) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index(self): with connection.schema_editor() as editor: editor.create_model(Author) index = Index(Lower('name').desc(), name='func_lower_idx') # Add index. with connection.schema_editor() as editor: editor.add_index(Author, index) sql = index.create_sql(Author, editor) table = Author._meta.db_table if connection.features.supports_index_column_ordering: self.assertIndexOrder(table, index.name, ['DESC']) # SQL contains a database function. self.assertIs(sql.references_column(table, 'name'), True) self.assertIn('LOWER(%s)' % editor.quote_name('name'), str(sql)) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Author, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_f(self): with connection.schema_editor() as editor: editor.create_model(Tag) index = Index('slug', F('title').desc(), name='func_f_idx') # Add index. with connection.schema_editor() as editor: editor.add_index(Tag, index) sql = index.create_sql(Tag, editor) table = Tag._meta.db_table self.assertIn(index.name, self.get_constraints(table)) if connection.features.supports_index_column_ordering: self.assertIndexOrder(Tag._meta.db_table, index.name, ['ASC', 'DESC']) # SQL contains columns. self.assertIs(sql.references_column(table, 'slug'), True) self.assertIs(sql.references_column(table, 'title'), True) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Tag, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_lookups(self): with connection.schema_editor() as editor: editor.create_model(Author) with register_lookup(CharField, Lower), register_lookup(IntegerField, Abs): index = Index( F('name__lower'), F('weight__abs'), name='func_lower_abs_lookup_idx', ) # Add index. with connection.schema_editor() as editor: editor.add_index(Author, index) sql = index.create_sql(Author, editor) table = Author._meta.db_table self.assertIn(index.name, self.get_constraints(table)) # SQL contains columns. self.assertIs(sql.references_column(table, 'name'), True) self.assertIs(sql.references_column(table, 'weight'), True) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Author, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_composite_func_index(self): with connection.schema_editor() as editor: editor.create_model(Author) index = Index(Lower('name'), Upper('name'), name='func_lower_upper_idx') # Add index. with connection.schema_editor() as editor: editor.add_index(Author, index) sql = index.create_sql(Author, editor) table = Author._meta.db_table self.assertIn(index.name, self.get_constraints(table)) # SQL contains database functions. self.assertIs(sql.references_column(table, 'name'), True) sql = str(sql) self.assertIn('LOWER(%s)' % editor.quote_name('name'), sql) self.assertIn('UPPER(%s)' % editor.quote_name('name'), sql) self.assertLess(sql.index('LOWER'), sql.index('UPPER')) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Author, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_composite_func_index_field_and_expression(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) index = Index( F('author').desc(), Lower('title').asc(), 'pub_date', name='func_f_lower_field_idx', ) # Add index. with connection.schema_editor() as editor: editor.add_index(Book, index) sql = index.create_sql(Book, editor) table = Book._meta.db_table constraints = self.get_constraints(table) if connection.features.supports_index_column_ordering: self.assertIndexOrder(table, index.name, ['DESC', 'ASC', 'ASC']) self.assertEqual(len(constraints[index.name]['columns']), 3) self.assertEqual(constraints[index.name]['columns'][2], 'pub_date') # SQL contains database functions and columns. self.assertIs(sql.references_column(table, 'author_id'), True) self.assertIs(sql.references_column(table, 'title'), True) self.assertIs(sql.references_column(table, 'pub_date'), True) self.assertIn('LOWER(%s)' % editor.quote_name('title'), str(sql)) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Book, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') @isolate_apps('schema') def test_func_index_f_decimalfield(self): class Node(Model): value = DecimalField(max_digits=5, decimal_places=2) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Node) index = Index(F('value'), name='func_f_decimalfield_idx') # Add index. with connection.schema_editor() as editor: editor.add_index(Node, index) sql = index.create_sql(Node, editor) table = Node._meta.db_table self.assertIn(index.name, self.get_constraints(table)) self.assertIs(sql.references_column(table, 'value'), True) # SQL doesn't contain casting. self.assertNotIn('CAST', str(sql)) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Node, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_cast(self): with connection.schema_editor() as editor: editor.create_model(Author) index = Index(Cast('weight', FloatField()), name='func_cast_idx') # Add index. with connection.schema_editor() as editor: editor.add_index(Author, index) sql = index.create_sql(Author, editor) table = Author._meta.db_table self.assertIn(index.name, self.get_constraints(table)) self.assertIs(sql.references_column(table, 'weight'), True) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Author, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_collate(self): collation = connection.features.test_collations.get('non_default') if not collation: self.skipTest( 'This backend does not support case-insensitive collations.' ) with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithSlug) index = Index( Collate(F('title'), collation=collation).desc(), Collate('slug', collation=collation), name='func_collate_idx', ) # Add index. with connection.schema_editor() as editor: editor.add_index(BookWithSlug, index) sql = index.create_sql(BookWithSlug, editor) table = Book._meta.db_table self.assertIn(index.name, self.get_constraints(table)) if connection.features.supports_index_column_ordering: self.assertIndexOrder(table, index.name, ['DESC', 'ASC']) # SQL contains columns and a collation. self.assertIs(sql.references_column(table, 'title'), True) self.assertIs(sql.references_column(table, 'slug'), True) self.assertIn('COLLATE %s' % editor.quote_name(collation), str(sql)) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Book, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') @skipIfDBFeature('collate_as_index_expression') def test_func_index_collate_f_ordered(self): collation = connection.features.test_collations.get('non_default') if not collation: self.skipTest( 'This backend does not support case-insensitive collations.' ) with connection.schema_editor() as editor: editor.create_model(Author) index = Index( Collate(F('name').desc(), collation=collation), name='func_collate_f_desc_idx', ) # Add index. with connection.schema_editor() as editor: editor.add_index(Author, index) sql = index.create_sql(Author, editor) table = Author._meta.db_table self.assertIn(index.name, self.get_constraints(table)) if connection.features.supports_index_column_ordering: self.assertIndexOrder(table, index.name, ['DESC']) # SQL contains columns and a collation. self.assertIs(sql.references_column(table, 'name'), True) self.assertIn('COLLATE %s' % editor.quote_name(collation), str(sql)) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Author, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_calc(self): with connection.schema_editor() as editor: editor.create_model(Author) index = Index(F('height') / (F('weight') + Value(5)), name='func_calc_idx') # Add index. with connection.schema_editor() as editor: editor.add_index(Author, index) sql = index.create_sql(Author, editor) table = Author._meta.db_table self.assertIn(index.name, self.get_constraints(table)) # SQL contains columns and expressions. self.assertIs(sql.references_column(table, 'height'), True) self.assertIs(sql.references_column(table, 'weight'), True) sql = str(sql) self.assertIs( sql.index(editor.quote_name('height')) < sql.index('/') < sql.index(editor.quote_name('weight')) < sql.index('+') < sql.index('5'), True, ) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Author, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes', 'supports_json_field') @isolate_apps('schema') def test_func_index_json_key_transform(self): class JSONModel(Model): field = JSONField() class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(JSONModel) self.isolated_local_models = [JSONModel] index = Index('field__some_key', name='func_json_key_idx') with connection.schema_editor() as editor: editor.add_index(JSONModel, index) sql = index.create_sql(JSONModel, editor) table = JSONModel._meta.db_table self.assertIn(index.name, self.get_constraints(table)) self.assertIs(sql.references_column(table, 'field'), True) with connection.schema_editor() as editor: editor.remove_index(JSONModel, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes', 'supports_json_field') @isolate_apps('schema') def test_func_index_json_key_transform_cast(self): class JSONModel(Model): field = JSONField() class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(JSONModel) self.isolated_local_models = [JSONModel] index = Index( Cast(KeyTextTransform('some_key', 'field'), IntegerField()), name='func_json_key_cast_idx', ) with connection.schema_editor() as editor: editor.add_index(JSONModel, index) sql = index.create_sql(JSONModel, editor) table = JSONModel._meta.db_table self.assertIn(index.name, self.get_constraints(table)) self.assertIs(sql.references_column(table, 'field'), True) with connection.schema_editor() as editor: editor.remove_index(JSONModel, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipIfDBFeature('supports_expression_indexes') def test_func_index_unsupported(self): # Index is ignored on databases that don't support indexes on # expressions. with connection.schema_editor() as editor: editor.create_model(Author) index = Index(F('name'), name='random_idx') with connection.schema_editor() as editor, self.assertNumQueries(0): self.assertIsNone(editor.add_index(Author, index)) self.assertIsNone(editor.remove_index(Author, index)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_nonexistent_field(self): index = Index(Lower('nonexistent'), name='func_nonexistent_idx') msg = ( "Cannot resolve keyword 'nonexistent' into field. Choices are: " "height, id, name, uuid, weight" ) with self.assertRaisesMessage(FieldError, msg): with connection.schema_editor() as editor: editor.add_index(Author, index) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_nondeterministic(self): with connection.schema_editor() as editor: editor.create_model(Author) index = Index(Random(), name='func_random_idx') with connection.schema_editor() as editor: with self.assertRaises(DatabaseError): editor.add_index(Author, index) def test_primary_key(self): """ Tests altering of the primary key """ # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) # Ensure the table is there and has the right PK self.assertEqual(self.get_primary_key(Tag._meta.db_table), 'id') # Alter to change the PK id_field = Tag._meta.get_field("id") old_field = Tag._meta.get_field("slug") new_field = SlugField(primary_key=True) new_field.set_attributes_from_name("slug") new_field.model = Tag with connection.schema_editor() as editor: editor.remove_field(Tag, id_field) editor.alter_field(Tag, old_field, new_field) # Ensure the PK changed self.assertNotIn( 'id', self.get_indexes(Tag._meta.db_table), ) self.assertEqual(self.get_primary_key(Tag._meta.db_table), 'slug') def test_context_manager_exit(self): """ Ensures transaction is correctly closed when an error occurs inside a SchemaEditor context. """ class SomeError(Exception): pass try: with connection.schema_editor(): raise SomeError except SomeError: self.assertFalse(connection.in_atomic_block) @skipIfDBFeature('can_rollback_ddl') def test_unsupported_transactional_ddl_disallowed(self): message = ( "Executing DDL statements while in a transaction on databases " "that can't perform a rollback is prohibited." ) with atomic(), connection.schema_editor() as editor: with self.assertRaisesMessage(TransactionManagementError, message): editor.execute(editor.sql_create_table % {'table': 'foo', 'definition': ''}) @skipUnlessDBFeature('supports_foreign_keys', 'indexes_foreign_keys') def test_foreign_key_index_long_names_regression(self): """ Regression test for #21497. Only affects databases that supports foreign keys. """ # Create the table with connection.schema_editor() as editor: editor.create_model(AuthorWithEvenLongerName) editor.create_model(BookWithLongName) # Find the properly shortened column name column_name = connection.ops.quote_name("author_foreign_key_with_really_long_field_name_id") column_name = column_name[1:-1].lower() # unquote, and, for Oracle, un-upcase # Ensure the table is there and has an index on the column self.assertIn( column_name, self.get_indexes(BookWithLongName._meta.db_table), ) @skipUnlessDBFeature('supports_foreign_keys') def test_add_foreign_key_long_names(self): """ Regression test for #23009. Only affects databases that supports foreign keys. """ # Create the initial tables with connection.schema_editor() as editor: editor.create_model(AuthorWithEvenLongerName) editor.create_model(BookWithLongName) # Add a second FK, this would fail due to long ref name before the fix new_field = ForeignKey(AuthorWithEvenLongerName, CASCADE, related_name="something") new_field.set_attributes_from_name("author_other_really_long_named_i_mean_so_long_fk") with connection.schema_editor() as editor: editor.add_field(BookWithLongName, new_field) @isolate_apps('schema') @skipUnlessDBFeature('supports_foreign_keys') def test_add_foreign_key_quoted_db_table(self): class Author(Model): class Meta: db_table = '"table_author_double_quoted"' app_label = 'schema' class Book(Model): author = ForeignKey(Author, CASCADE) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) if connection.vendor == 'mysql': self.assertForeignKeyExists(Book, 'author_id', '"table_author_double_quoted"') else: self.assertForeignKeyExists(Book, 'author_id', 'table_author_double_quoted') def test_add_foreign_object(self): with connection.schema_editor() as editor: editor.create_model(BookForeignObj) new_field = ForeignObject(Author, on_delete=CASCADE, from_fields=['author_id'], to_fields=['id']) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.add_field(BookForeignObj, new_field) def test_creation_deletion_reserved_names(self): """ Tries creating a model's table, and then deleting it when it has a SQL reserved name. """ # Create the table with connection.schema_editor() as editor: try: editor.create_model(Thing) except OperationalError as e: self.fail("Errors when applying initial migration for a model " "with a table named after an SQL reserved word: %s" % e) # The table is there list(Thing.objects.all()) # Clean up that table with connection.schema_editor() as editor: editor.delete_model(Thing) # The table is gone with self.assertRaises(DatabaseError): list(Thing.objects.all()) def test_remove_constraints_capital_letters(self): """ #23065 - Constraint names must be quoted if they contain capital letters. """ def get_field(*args, field_class=IntegerField, **kwargs): kwargs['db_column'] = "CamelCase" field = field_class(*args, **kwargs) field.set_attributes_from_name("CamelCase") return field model = Author field = get_field() table = model._meta.db_table column = field.column identifier_converter = connection.introspection.identifier_converter with connection.schema_editor() as editor: editor.create_model(model) editor.add_field(model, field) constraint_name = 'CamelCaseIndex' expected_constraint_name = identifier_converter(constraint_name) editor.execute( editor.sql_create_index % { "table": editor.quote_name(table), "name": editor.quote_name(constraint_name), "using": "", "columns": editor.quote_name(column), "extra": "", "condition": "", "include": "", } ) self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) editor.alter_field(model, get_field(db_index=True), field, strict=True) self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) constraint_name = 'CamelCaseUniqConstraint' expected_constraint_name = identifier_converter(constraint_name) editor.execute(editor._create_unique_sql(model, [field.column], constraint_name)) self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) editor.alter_field(model, get_field(unique=True), field, strict=True) self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) if editor.sql_create_fk: constraint_name = 'CamelCaseFKConstraint' expected_constraint_name = identifier_converter(constraint_name) editor.execute( editor.sql_create_fk % { "table": editor.quote_name(table), "name": editor.quote_name(constraint_name), "column": editor.quote_name(column), "to_table": editor.quote_name(table), "to_column": editor.quote_name(model._meta.auto_field.column), "deferrable": connection.ops.deferrable_sql(), } ) self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) editor.alter_field(model, get_field(Author, CASCADE, field_class=ForeignKey), field, strict=True) self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) def test_add_field_use_effective_default(self): """ #23987 - effective_default() should be used as the field default when adding a new field. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no surname field columns = self.column_classes(Author) self.assertNotIn("surname", columns) # Create a row Author.objects.create(name='Anonymous1') # Add new CharField to ensure default will be used from effective_default new_field = CharField(max_length=15, blank=True) new_field.set_attributes_from_name("surname") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure field was added with the right default with connection.cursor() as cursor: cursor.execute("SELECT surname FROM schema_author;") item = cursor.fetchall()[0] self.assertEqual(item[0], None if connection.features.interprets_empty_strings_as_nulls else '') def test_add_field_default_dropped(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no surname field columns = self.column_classes(Author) self.assertNotIn("surname", columns) # Create a row Author.objects.create(name='Anonymous1') # Add new CharField with a default new_field = CharField(max_length=15, blank=True, default='surname default') new_field.set_attributes_from_name("surname") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure field was added with the right default with connection.cursor() as cursor: cursor.execute("SELECT surname FROM schema_author;") item = cursor.fetchall()[0] self.assertEqual(item[0], 'surname default') # And that the default is no longer set in the database. field = next( f for f in connection.introspection.get_table_description(cursor, "schema_author") if f.name == "surname" ) if connection.features.can_introspect_default: self.assertIsNone(field.default) def test_add_field_default_nullable(self): with connection.schema_editor() as editor: editor.create_model(Author) # Add new nullable CharField with a default. new_field = CharField(max_length=15, blank=True, null=True, default='surname') new_field.set_attributes_from_name('surname') with connection.schema_editor() as editor: editor.add_field(Author, new_field) Author.objects.create(name='Anonymous1') with connection.cursor() as cursor: cursor.execute('SELECT surname FROM schema_author;') item = cursor.fetchall()[0] self.assertIsNone(item[0]) field = next( f for f in connection.introspection.get_table_description( cursor, 'schema_author', ) if f.name == 'surname' ) # Field is still nullable. self.assertTrue(field.null_ok) # The database default is no longer set. if connection.features.can_introspect_default: self.assertIn(field.default, ['NULL', None]) def test_alter_field_default_dropped(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Create a row Author.objects.create(name='Anonymous1') self.assertIsNone(Author.objects.get().height) old_field = Author._meta.get_field('height') # The default from the new field is used in updating existing rows. new_field = IntegerField(blank=True, default=42) new_field.set_attributes_from_name('height') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertEqual(Author.objects.get().height, 42) # The database default should be removed. with connection.cursor() as cursor: field = next( f for f in connection.introspection.get_table_description(cursor, "schema_author") if f.name == "height" ) if connection.features.can_introspect_default: self.assertIsNone(field.default) def test_alter_field_default_doesnt_perform_queries(self): """ No queries are performed if a field default changes and the field's not changing from null to non-null. """ with connection.schema_editor() as editor: editor.create_model(AuthorWithDefaultHeight) old_field = AuthorWithDefaultHeight._meta.get_field('height') new_default = old_field.default * 2 new_field = PositiveIntegerField(null=True, blank=True, default=new_default) new_field.set_attributes_from_name('height') with connection.schema_editor() as editor, self.assertNumQueries(0): editor.alter_field(AuthorWithDefaultHeight, old_field, new_field, strict=True) @skipUnlessDBFeature('supports_foreign_keys') def test_alter_field_fk_attributes_noop(self): """ No queries are performed when changing field attributes that don't affect the schema. """ with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) old_field = Book._meta.get_field('author') new_field = ForeignKey( Author, blank=True, editable=False, error_messages={'invalid': 'error message'}, help_text='help text', limit_choices_to={'limit': 'choice'}, on_delete=PROTECT, related_name='related_name', related_query_name='related_query_name', validators=[lambda x: x], verbose_name='verbose name', ) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor, self.assertNumQueries(0): editor.alter_field(Book, old_field, new_field, strict=True) with connection.schema_editor() as editor, self.assertNumQueries(0): editor.alter_field(Book, new_field, old_field, strict=True) def test_add_textfield_unhashable_default(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Create a row Author.objects.create(name='Anonymous1') # Create a field that has an unhashable default new_field = TextField(default={}) new_field.set_attributes_from_name("info") with connection.schema_editor() as editor: editor.add_field(Author, new_field) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_add_indexed_charfield(self): field = CharField(max_length=255, db_index=True) field.set_attributes_from_name('nom_de_plume') with connection.schema_editor() as editor: editor.create_model(Author) editor.add_field(Author, field) # Should create two indexes; one for like operator. self.assertEqual( self.get_constraints_for_column(Author, 'nom_de_plume'), ['schema_author_nom_de_plume_7570a851', 'schema_author_nom_de_plume_7570a851_like'], ) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_add_unique_charfield(self): field = CharField(max_length=255, unique=True) field.set_attributes_from_name('nom_de_plume') with connection.schema_editor() as editor: editor.create_model(Author) editor.add_field(Author, field) # Should create two indexes; one for like operator. self.assertEqual( self.get_constraints_for_column(Author, 'nom_de_plume'), ['schema_author_nom_de_plume_7570a851_like', 'schema_author_nom_de_plume_key'] ) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_add_index_to_charfield(self): # Create the table and verify no initial indexes. with connection.schema_editor() as editor: editor.create_model(Author) self.assertEqual(self.get_constraints_for_column(Author, 'name'), []) # Alter to add db_index=True and create 2 indexes. old_field = Author._meta.get_field('name') new_field = CharField(max_length=255, db_index=True) new_field.set_attributes_from_name('name') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(Author, 'name'), ['schema_author_name_1fbc5617', 'schema_author_name_1fbc5617_like'] ) # Remove db_index=True to drop both indexes. with connection.schema_editor() as editor: editor.alter_field(Author, new_field, old_field, strict=True) self.assertEqual(self.get_constraints_for_column(Author, 'name'), []) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_add_unique_to_charfield(self): # Create the table and verify no initial indexes. with connection.schema_editor() as editor: editor.create_model(Author) self.assertEqual(self.get_constraints_for_column(Author, 'name'), []) # Alter to add unique=True and create 2 indexes. old_field = Author._meta.get_field('name') new_field = CharField(max_length=255, unique=True) new_field.set_attributes_from_name('name') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(Author, 'name'), ['schema_author_name_1fbc5617_like', 'schema_author_name_1fbc5617_uniq'] ) # Remove unique=True to drop both indexes. with connection.schema_editor() as editor: editor.alter_field(Author, new_field, old_field, strict=True) self.assertEqual(self.get_constraints_for_column(Author, 'name'), []) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_add_index_to_textfield(self): # Create the table and verify no initial indexes. with connection.schema_editor() as editor: editor.create_model(Note) self.assertEqual(self.get_constraints_for_column(Note, 'info'), []) # Alter to add db_index=True and create 2 indexes. old_field = Note._meta.get_field('info') new_field = TextField(db_index=True) new_field.set_attributes_from_name('info') with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(Note, 'info'), ['schema_note_info_4b0ea695', 'schema_note_info_4b0ea695_like'] ) # Remove db_index=True to drop both indexes. with connection.schema_editor() as editor: editor.alter_field(Note, new_field, old_field, strict=True) self.assertEqual(self.get_constraints_for_column(Note, 'info'), []) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_add_unique_to_charfield_with_db_index(self): # Create the table and verify initial indexes. with connection.schema_editor() as editor: editor.create_model(BookWithoutAuthor) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like'] ) # Alter to add unique=True (should replace the index) old_field = BookWithoutAuthor._meta.get_field('title') new_field = CharField(max_length=100, db_index=True, unique=True) new_field.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq'] ) # Alter to remove unique=True (should drop unique index) new_field2 = CharField(max_length=100, db_index=True) new_field2.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like'] ) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_remove_unique_and_db_index_from_charfield(self): # Create the table and verify initial indexes. with connection.schema_editor() as editor: editor.create_model(BookWithoutAuthor) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like'] ) # Alter to add unique=True (should replace the index) old_field = BookWithoutAuthor._meta.get_field('title') new_field = CharField(max_length=100, db_index=True, unique=True) new_field.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq'] ) # Alter to remove both unique=True and db_index=True (should drop all indexes) new_field2 = CharField(max_length=100) new_field2.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True) self.assertEqual(self.get_constraints_for_column(BookWithoutAuthor, 'title'), []) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_swap_unique_and_db_index_with_charfield(self): # Create the table and verify initial indexes. with connection.schema_editor() as editor: editor.create_model(BookWithoutAuthor) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like'] ) # Alter to set unique=True and remove db_index=True (should replace the index) old_field = BookWithoutAuthor._meta.get_field('title') new_field = CharField(max_length=100, unique=True) new_field.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq'] ) # Alter to set db_index=True and remove unique=True (should restore index) new_field2 = CharField(max_length=100, db_index=True) new_field2.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like'] ) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_add_db_index_to_charfield_with_unique(self): # Create the table and verify initial indexes. with connection.schema_editor() as editor: editor.create_model(Tag) self.assertEqual( self.get_constraints_for_column(Tag, 'slug'), ['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key'] ) # Alter to add db_index=True old_field = Tag._meta.get_field('slug') new_field = SlugField(db_index=True, unique=True) new_field.set_attributes_from_name('slug') with connection.schema_editor() as editor: editor.alter_field(Tag, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(Tag, 'slug'), ['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key'] ) # Alter to remove db_index=True new_field2 = SlugField(unique=True) new_field2.set_attributes_from_name('slug') with connection.schema_editor() as editor: editor.alter_field(Tag, new_field, new_field2, strict=True) self.assertEqual( self.get_constraints_for_column(Tag, 'slug'), ['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key'] ) def test_alter_field_add_index_to_integerfield(self): # Create the table and verify no initial indexes. with connection.schema_editor() as editor: editor.create_model(Author) self.assertEqual(self.get_constraints_for_column(Author, 'weight'), []) # Alter to add db_index=True and create index. old_field = Author._meta.get_field('weight') new_field = IntegerField(null=True, db_index=True) new_field.set_attributes_from_name('weight') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertEqual(self.get_constraints_for_column(Author, 'weight'), ['schema_author_weight_587740f9']) # Remove db_index=True to drop index. with connection.schema_editor() as editor: editor.alter_field(Author, new_field, old_field, strict=True) self.assertEqual(self.get_constraints_for_column(Author, 'weight'), []) def test_alter_pk_with_self_referential_field(self): """ Changing the primary key field name of a model with a self-referential foreign key (#26384). """ with connection.schema_editor() as editor: editor.create_model(Node) old_field = Node._meta.get_field('node_id') new_field = AutoField(primary_key=True) new_field.set_attributes_from_name('id') with connection.schema_editor() as editor: editor.alter_field(Node, old_field, new_field, strict=True) self.assertForeignKeyExists(Node, 'parent_id', Node._meta.db_table) @mock.patch('django.db.backends.base.schema.datetime') @mock.patch('django.db.backends.base.schema.timezone') def test_add_datefield_and_datetimefield_use_effective_default(self, mocked_datetime, mocked_tz): """ effective_default() should be used for DateField, DateTimeField, and TimeField if auto_now or auto_now_add is set (#25005). """ now = datetime.datetime(month=1, day=1, year=2000, hour=1, minute=1) now_tz = datetime.datetime(month=1, day=1, year=2000, hour=1, minute=1, tzinfo=timezone.utc) mocked_datetime.now = mock.MagicMock(return_value=now) mocked_tz.now = mock.MagicMock(return_value=now_tz) # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Check auto_now/auto_now_add attributes are not defined columns = self.column_classes(Author) self.assertNotIn("dob_auto_now", columns) self.assertNotIn("dob_auto_now_add", columns) self.assertNotIn("dtob_auto_now", columns) self.assertNotIn("dtob_auto_now_add", columns) self.assertNotIn("tob_auto_now", columns) self.assertNotIn("tob_auto_now_add", columns) # Create a row Author.objects.create(name='Anonymous1') # Ensure fields were added with the correct defaults dob_auto_now = DateField(auto_now=True) dob_auto_now.set_attributes_from_name('dob_auto_now') self.check_added_field_default( editor, Author, dob_auto_now, 'dob_auto_now', now.date(), cast_function=lambda x: x.date(), ) dob_auto_now_add = DateField(auto_now_add=True) dob_auto_now_add.set_attributes_from_name('dob_auto_now_add') self.check_added_field_default( editor, Author, dob_auto_now_add, 'dob_auto_now_add', now.date(), cast_function=lambda x: x.date(), ) dtob_auto_now = DateTimeField(auto_now=True) dtob_auto_now.set_attributes_from_name('dtob_auto_now') self.check_added_field_default( editor, Author, dtob_auto_now, 'dtob_auto_now', now, ) dt_tm_of_birth_auto_now_add = DateTimeField(auto_now_add=True) dt_tm_of_birth_auto_now_add.set_attributes_from_name('dtob_auto_now_add') self.check_added_field_default( editor, Author, dt_tm_of_birth_auto_now_add, 'dtob_auto_now_add', now, ) tob_auto_now = TimeField(auto_now=True) tob_auto_now.set_attributes_from_name('tob_auto_now') self.check_added_field_default( editor, Author, tob_auto_now, 'tob_auto_now', now.time(), cast_function=lambda x: x.time(), ) tob_auto_now_add = TimeField(auto_now_add=True) tob_auto_now_add.set_attributes_from_name('tob_auto_now_add') self.check_added_field_default( editor, Author, tob_auto_now_add, 'tob_auto_now_add', now.time(), cast_function=lambda x: x.time(), ) def test_namespaced_db_table_create_index_name(self): """ Table names are stripped of their namespace/schema before being used to generate index names. """ with connection.schema_editor() as editor: max_name_length = connection.ops.max_name_length() or 200 namespace = 'n' * max_name_length table_name = 't' * max_name_length namespaced_table_name = '"%s"."%s"' % (namespace, table_name) self.assertEqual( editor._create_index_name(table_name, []), editor._create_index_name(namespaced_table_name, []), ) @unittest.skipUnless(connection.vendor == 'oracle', 'Oracle specific db_table syntax') def test_creation_with_db_table_double_quotes(self): oracle_user = connection.creation._test_database_user() class Student(Model): name = CharField(max_length=30) class Meta: app_label = 'schema' apps = new_apps db_table = '"%s"."DJANGO_STUDENT_TABLE"' % oracle_user class Document(Model): name = CharField(max_length=30) students = ManyToManyField(Student) class Meta: app_label = 'schema' apps = new_apps db_table = '"%s"."DJANGO_DOCUMENT_TABLE"' % oracle_user self.local_models = [Student, Document] with connection.schema_editor() as editor: editor.create_model(Student) editor.create_model(Document) doc = Document.objects.create(name='Test Name') student = Student.objects.create(name='Some man') doc.students.add(student) @isolate_apps('schema') @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific db_table syntax.') def test_namespaced_db_table_foreign_key_reference(self): with connection.cursor() as cursor: cursor.execute('CREATE SCHEMA django_schema_tests') def delete_schema(): with connection.cursor() as cursor: cursor.execute('DROP SCHEMA django_schema_tests CASCADE') self.addCleanup(delete_schema) class Author(Model): class Meta: app_label = 'schema' class Book(Model): class Meta: app_label = 'schema' db_table = '"django_schema_tests"."schema_book"' author = ForeignKey(Author, CASCADE) author.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) editor.add_field(Book, author) def test_rename_table_renames_deferred_sql_references(self): atomic_rename = connection.features.supports_atomic_references_rename with connection.schema_editor(atomic=atomic_rename) as editor: editor.create_model(Author) editor.create_model(Book) editor.alter_db_table(Author, 'schema_author', 'schema_renamed_author') editor.alter_db_table(Author, 'schema_book', 'schema_renamed_book') try: self.assertGreater(len(editor.deferred_sql), 0) for statement in editor.deferred_sql: self.assertIs(statement.references_table('schema_author'), False) self.assertIs(statement.references_table('schema_book'), False) finally: editor.alter_db_table(Author, 'schema_renamed_author', 'schema_author') editor.alter_db_table(Author, 'schema_renamed_book', 'schema_book') def test_rename_column_renames_deferred_sql_references(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) old_title = Book._meta.get_field('title') new_title = CharField(max_length=100, db_index=True) new_title.set_attributes_from_name('renamed_title') editor.alter_field(Book, old_title, new_title) old_author = Book._meta.get_field('author') new_author = ForeignKey(Author, CASCADE) new_author.set_attributes_from_name('renamed_author') editor.alter_field(Book, old_author, new_author) self.assertGreater(len(editor.deferred_sql), 0) for statement in editor.deferred_sql: self.assertIs(statement.references_column('book', 'title'), False) self.assertIs(statement.references_column('book', 'author_id'), False) @isolate_apps('schema') def test_referenced_field_without_constraint_rename_inside_atomic_block(self): """ Foreign keys without database level constraint don't prevent the field they reference from being renamed in an atomic block. """ class Foo(Model): field = CharField(max_length=255, unique=True) class Meta: app_label = 'schema' class Bar(Model): foo = ForeignKey(Foo, CASCADE, to_field='field', db_constraint=False) class Meta: app_label = 'schema' self.isolated_local_models = [Foo, Bar] with connection.schema_editor() as editor: editor.create_model(Foo) editor.create_model(Bar) new_field = CharField(max_length=255, unique=True) new_field.set_attributes_from_name('renamed') with connection.schema_editor(atomic=True) as editor: editor.alter_field(Foo, Foo._meta.get_field('field'), new_field) @isolate_apps('schema') def test_referenced_table_without_constraint_rename_inside_atomic_block(self): """ Foreign keys without database level constraint don't prevent the table they reference from being renamed in an atomic block. """ class Foo(Model): field = CharField(max_length=255, unique=True) class Meta: app_label = 'schema' class Bar(Model): foo = ForeignKey(Foo, CASCADE, to_field='field', db_constraint=False) class Meta: app_label = 'schema' self.isolated_local_models = [Foo, Bar] with connection.schema_editor() as editor: editor.create_model(Foo) editor.create_model(Bar) new_field = CharField(max_length=255, unique=True) new_field.set_attributes_from_name('renamed') with connection.schema_editor(atomic=True) as editor: editor.alter_db_table(Foo, Foo._meta.db_table, 'renamed_table') Foo._meta.db_table = 'renamed_table' @isolate_apps('schema') @skipUnlessDBFeature('supports_collation_on_charfield') def test_db_collation_charfield(self): collation = connection.features.test_collations.get('non_default') if not collation: self.skipTest('Language collations are not supported.') class Foo(Model): field = CharField(max_length=255, db_collation=collation) class Meta: app_label = 'schema' self.isolated_local_models = [Foo] with connection.schema_editor() as editor: editor.create_model(Foo) self.assertEqual( self.get_column_collation(Foo._meta.db_table, 'field'), collation, ) @isolate_apps('schema') @skipUnlessDBFeature('supports_collation_on_textfield') def test_db_collation_textfield(self): collation = connection.features.test_collations.get('non_default') if not collation: self.skipTest('Language collations are not supported.') class Foo(Model): field = TextField(db_collation=collation) class Meta: app_label = 'schema' self.isolated_local_models = [Foo] with connection.schema_editor() as editor: editor.create_model(Foo) self.assertEqual( self.get_column_collation(Foo._meta.db_table, 'field'), collation, ) @skipUnlessDBFeature('supports_collation_on_charfield') def test_add_field_db_collation(self): collation = connection.features.test_collations.get('non_default') if not collation: self.skipTest('Language collations are not supported.') with connection.schema_editor() as editor: editor.create_model(Author) new_field = CharField(max_length=255, db_collation=collation) new_field.set_attributes_from_name('alias') with connection.schema_editor() as editor: editor.add_field(Author, new_field) columns = self.column_classes(Author) self.assertEqual( columns['alias'][0], connection.features.introspected_field_types['CharField'], ) self.assertEqual(columns['alias'][1][8], collation) @skipUnlessDBFeature('supports_collation_on_charfield') def test_alter_field_db_collation(self): collation = connection.features.test_collations.get('non_default') if not collation: self.skipTest('Language collations are not supported.') with connection.schema_editor() as editor: editor.create_model(Author) old_field = Author._meta.get_field('name') new_field = CharField(max_length=255, db_collation=collation) new_field.set_attributes_from_name('name') new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertEqual( self.get_column_collation(Author._meta.db_table, 'name'), collation, ) with connection.schema_editor() as editor: editor.alter_field(Author, new_field, old_field, strict=True) self.assertIsNone(self.get_column_collation(Author._meta.db_table, 'name')) @skipUnlessDBFeature('supports_collation_on_charfield') def test_alter_field_type_and_db_collation(self): collation = connection.features.test_collations.get('non_default') if not collation: self.skipTest('Language collations are not supported.') with connection.schema_editor() as editor: editor.create_model(Note) old_field = Note._meta.get_field('info') new_field = CharField(max_length=255, db_collation=collation) new_field.set_attributes_from_name('info') new_field.model = Note with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) columns = self.column_classes(Note) self.assertEqual( columns['info'][0], connection.features.introspected_field_types['CharField'], ) self.assertEqual(columns['info'][1][8], collation) with connection.schema_editor() as editor: editor.alter_field(Note, new_field, old_field, strict=True) columns = self.column_classes(Note) self.assertEqual(columns['info'][0], 'TextField') self.assertIsNone(columns['info'][1][8]) @skipUnlessDBFeature( 'supports_collation_on_charfield', 'supports_non_deterministic_collations', ) def test_ci_cs_db_collation(self): cs_collation = connection.features.test_collations.get('cs') ci_collation = connection.features.test_collations.get('ci') try: if connection.vendor == 'mysql': cs_collation = 'latin1_general_cs' elif connection.vendor == 'postgresql': cs_collation = 'en-x-icu' with connection.cursor() as cursor: cursor.execute( "CREATE COLLATION IF NOT EXISTS case_insensitive " "(provider = icu, locale = 'und-u-ks-level2', " "deterministic = false)" ) ci_collation = 'case_insensitive' # Create the table. with connection.schema_editor() as editor: editor.create_model(Author) # Case-insensitive collation. old_field = Author._meta.get_field('name') new_field_ci = CharField(max_length=255, db_collation=ci_collation) new_field_ci.set_attributes_from_name('name') new_field_ci.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field_ci, strict=True) Author.objects.create(name='ANDREW') self.assertIs(Author.objects.filter(name='Andrew').exists(), True) # Case-sensitive collation. new_field_cs = CharField(max_length=255, db_collation=cs_collation) new_field_cs.set_attributes_from_name('name') new_field_cs.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, new_field_ci, new_field_cs, strict=True) self.assertIs(Author.objects.filter(name='Andrew').exists(), False) finally: if connection.vendor == 'postgresql': with connection.cursor() as cursor: cursor.execute('DROP COLLATION IF EXISTS case_insensitive')
8c6e7c1e58563ab17682baca64789a08f839a348103e829ada0b9039ca53167d
from unittest import mock from django.core import checks from django.core.checks import Error, Warning from django.db import models from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature from django.test.utils import ( isolate_apps, modify_settings, override_settings, override_system_checks, ) class EmptyRouter: pass @isolate_apps('check_framework', attr_name='apps') @override_system_checks([checks.model_checks.check_all_models]) class DuplicateDBTableTests(SimpleTestCase): def test_collision_in_same_app(self): class Model1(models.Model): class Meta: db_table = 'test_table' class Model2(models.Model): class Meta: db_table = 'test_table' self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [ Error( "db_table 'test_table' is used by multiple models: " "check_framework.Model1, check_framework.Model2.", obj='test_table', id='models.E028', ) ]) @override_settings(DATABASE_ROUTERS=['check_framework.test_model_checks.EmptyRouter']) def test_collision_in_same_app_database_routers_installed(self): class Model1(models.Model): class Meta: db_table = 'test_table' class Model2(models.Model): class Meta: db_table = 'test_table' self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [ Warning( "db_table 'test_table' is used by multiple models: " "check_framework.Model1, check_framework.Model2.", hint=( 'You have configured settings.DATABASE_ROUTERS. Verify ' 'that check_framework.Model1, check_framework.Model2 are ' 'correctly routed to separate databases.' ), obj='test_table', id='models.W035', ) ]) @modify_settings(INSTALLED_APPS={'append': 'basic'}) @isolate_apps('basic', 'check_framework', kwarg_name='apps') def test_collision_across_apps(self, apps): class Model1(models.Model): class Meta: app_label = 'basic' db_table = 'test_table' class Model2(models.Model): class Meta: app_label = 'check_framework' db_table = 'test_table' self.assertEqual(checks.run_checks(app_configs=apps.get_app_configs()), [ Error( "db_table 'test_table' is used by multiple models: " "basic.Model1, check_framework.Model2.", obj='test_table', id='models.E028', ) ]) @modify_settings(INSTALLED_APPS={'append': 'basic'}) @override_settings(DATABASE_ROUTERS=['check_framework.test_model_checks.EmptyRouter']) @isolate_apps('basic', 'check_framework', kwarg_name='apps') def test_collision_across_apps_database_routers_installed(self, apps): class Model1(models.Model): class Meta: app_label = 'basic' db_table = 'test_table' class Model2(models.Model): class Meta: app_label = 'check_framework' db_table = 'test_table' self.assertEqual(checks.run_checks(app_configs=apps.get_app_configs()), [ Warning( "db_table 'test_table' is used by multiple models: " "basic.Model1, check_framework.Model2.", hint=( 'You have configured settings.DATABASE_ROUTERS. Verify ' 'that basic.Model1, check_framework.Model2 are correctly ' 'routed to separate databases.' ), obj='test_table', id='models.W035', ) ]) def test_no_collision_for_unmanaged_models(self): class Unmanaged(models.Model): class Meta: db_table = 'test_table' managed = False class Managed(models.Model): class Meta: db_table = 'test_table' self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), []) def test_no_collision_for_proxy_models(self): class Model(models.Model): class Meta: db_table = 'test_table' class ProxyModel(Model): class Meta: proxy = True self.assertEqual(Model._meta.db_table, ProxyModel._meta.db_table) self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), []) @isolate_apps('check_framework', attr_name='apps') @override_system_checks([checks.model_checks.check_all_models]) class IndexNameTests(SimpleTestCase): def test_collision_in_same_model(self): index = models.Index(fields=['id'], name='foo') class Model(models.Model): class Meta: indexes = [index, index] self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [ Error( "index name 'foo' is not unique for model check_framework.Model.", id='models.E029', ), ]) def test_collision_in_different_models(self): index = models.Index(fields=['id'], name='foo') class Model1(models.Model): class Meta: indexes = [index] class Model2(models.Model): class Meta: indexes = [index] self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [ Error( "index name 'foo' is not unique among models: " "check_framework.Model1, check_framework.Model2.", id='models.E030', ), ]) def test_collision_abstract_model(self): class AbstractModel(models.Model): class Meta: indexes = [models.Index(fields=['id'], name='foo')] abstract = True class Model1(AbstractModel): pass class Model2(AbstractModel): pass self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [ Error( "index name 'foo' is not unique among models: " "check_framework.Model1, check_framework.Model2.", id='models.E030', ), ]) def test_no_collision_abstract_model_interpolation(self): class AbstractModel(models.Model): name = models.CharField(max_length=20) class Meta: indexes = [models.Index(fields=['name'], name='%(app_label)s_%(class)s_foo')] abstract = True class Model1(AbstractModel): pass class Model2(AbstractModel): pass self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), []) @modify_settings(INSTALLED_APPS={'append': 'basic'}) @isolate_apps('basic', 'check_framework', kwarg_name='apps') def test_collision_across_apps(self, apps): index = models.Index(fields=['id'], name='foo') class Model1(models.Model): class Meta: app_label = 'basic' indexes = [index] class Model2(models.Model): class Meta: app_label = 'check_framework' indexes = [index] self.assertEqual(checks.run_checks(app_configs=apps.get_app_configs()), [ Error( "index name 'foo' is not unique among models: basic.Model1, " "check_framework.Model2.", id='models.E030', ), ]) @modify_settings(INSTALLED_APPS={'append': 'basic'}) @isolate_apps('basic', 'check_framework', kwarg_name='apps') def test_no_collision_across_apps_interpolation(self, apps): index = models.Index(fields=['id'], name='%(app_label)s_%(class)s_foo') class Model1(models.Model): class Meta: app_label = 'basic' constraints = [index] class Model2(models.Model): class Meta: app_label = 'check_framework' constraints = [index] self.assertEqual(checks.run_checks(app_configs=apps.get_app_configs()), []) @isolate_apps('check_framework', attr_name='apps') @override_system_checks([checks.model_checks.check_all_models]) @skipUnlessDBFeature('supports_table_check_constraints') class ConstraintNameTests(TestCase): def test_collision_in_same_model(self): class Model(models.Model): class Meta: constraints = [ models.CheckConstraint(check=models.Q(id__gt=0), name='foo'), models.CheckConstraint(check=models.Q(id__lt=100), name='foo'), ] self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [ Error( "constraint name 'foo' is not unique for model " "check_framework.Model.", id='models.E031', ), ]) def test_collision_in_different_models(self): constraint = models.CheckConstraint(check=models.Q(id__gt=0), name='foo') class Model1(models.Model): class Meta: constraints = [constraint] class Model2(models.Model): class Meta: constraints = [constraint] self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [ Error( "constraint name 'foo' is not unique among models: " "check_framework.Model1, check_framework.Model2.", id='models.E032', ), ]) def test_collision_abstract_model(self): class AbstractModel(models.Model): class Meta: constraints = [models.CheckConstraint(check=models.Q(id__gt=0), name='foo')] abstract = True class Model1(AbstractModel): pass class Model2(AbstractModel): pass self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [ Error( "constraint name 'foo' is not unique among models: " "check_framework.Model1, check_framework.Model2.", id='models.E032', ), ]) def test_no_collision_abstract_model_interpolation(self): class AbstractModel(models.Model): class Meta: constraints = [ models.CheckConstraint(check=models.Q(id__gt=0), name='%(app_label)s_%(class)s_foo'), ] abstract = True class Model1(AbstractModel): pass class Model2(AbstractModel): pass self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), []) @modify_settings(INSTALLED_APPS={'append': 'basic'}) @isolate_apps('basic', 'check_framework', kwarg_name='apps') def test_collision_across_apps(self, apps): constraint = models.CheckConstraint(check=models.Q(id__gt=0), name='foo') class Model1(models.Model): class Meta: app_label = 'basic' constraints = [constraint] class Model2(models.Model): class Meta: app_label = 'check_framework' constraints = [constraint] self.assertEqual(checks.run_checks(app_configs=apps.get_app_configs()), [ Error( "constraint name 'foo' is not unique among models: " "basic.Model1, check_framework.Model2.", id='models.E032', ), ]) @modify_settings(INSTALLED_APPS={'append': 'basic'}) @isolate_apps('basic', 'check_framework', kwarg_name='apps') def test_no_collision_across_apps_interpolation(self, apps): constraint = models.CheckConstraint(check=models.Q(id__gt=0), name='%(app_label)s_%(class)s_foo') class Model1(models.Model): class Meta: app_label = 'basic' constraints = [constraint] class Model2(models.Model): class Meta: app_label = 'check_framework' constraints = [constraint] self.assertEqual(checks.run_checks(app_configs=apps.get_app_configs()), []) def mocked_is_overridden(self, setting): # Force treating DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' as a not # overridden setting. return ( setting != 'DEFAULT_AUTO_FIELD' or self.DEFAULT_AUTO_FIELD != 'django.db.models.AutoField' ) @mock.patch('django.conf.UserSettingsHolder.is_overridden', mocked_is_overridden) @override_settings(DEFAULT_AUTO_FIELD='django.db.models.AutoField') @isolate_apps('check_framework.apps.CheckDefaultPKConfig', attr_name='apps') @override_system_checks([checks.model_checks.check_all_models]) class ModelDefaultAutoFieldTests(SimpleTestCase): msg = ( "Auto-created primary key used when not defining a primary key type, " "by default 'django.db.models.AutoField'." ) hint = ( "Configure the DEFAULT_AUTO_FIELD setting or the " "CheckDefaultPKConfig.default_auto_field attribute to point to a " "subclass of AutoField, e.g. 'django.db.models.BigAutoField'." ) def test_auto_created_pk(self): class Model(models.Model): pass self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [ Warning(self.msg, hint=self.hint, obj=Model, id='models.W042'), ]) def test_explicit_inherited_pk(self): class Parent(models.Model): id = models.AutoField(primary_key=True) class Child(Parent): pass self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), []) def test_skipped_on_abstract_model(self): class Abstract(models.Model): class Meta: abstract = True # Call .check() because abstract models are not registered. self.assertEqual(Abstract.check(), []) def test_explicit_inherited_parent_link(self): class Parent(models.Model): id = models.AutoField(primary_key=True) class Child(Parent): parent_ptr = models.OneToOneField(Parent, models.CASCADE, parent_link=True) self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), []) def test_auto_created_inherited_pk(self): class Parent(models.Model): pass class Child(Parent): pass self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [ Warning(self.msg, hint=self.hint, obj=Parent, id='models.W042'), ]) def test_auto_created_inherited_parent_link(self): class Parent(models.Model): pass class Child(Parent): parent_ptr = models.OneToOneField(Parent, models.CASCADE, parent_link=True) self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [ Warning(self.msg, hint=self.hint, obj=Parent, id='models.W042'), ]) def test_auto_created_pk_inherited_abstract_parent(self): class Parent(models.Model): class Meta: abstract = True class Child(Parent): pass self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [ Warning(self.msg, hint=self.hint, obj=Child, id='models.W042'), ]) @override_settings(DEFAULT_AUTO_FIELD='django.db.models.BigAutoField') def test_default_auto_field_setting(self): class Model(models.Model): pass self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), []) def test_explicit_pk(self): class Model(models.Model): id = models.BigAutoField(primary_key=True) self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), []) @isolate_apps('check_framework.apps.CheckPKConfig', kwarg_name='apps') def test_app_default_auto_field(self, apps): class ModelWithPkViaAppConfig(models.Model): class Meta: app_label = 'check_framework.apps.CheckPKConfig' self.assertEqual(checks.run_checks(app_configs=apps.get_app_configs()), [])
0b0d03abcdcfb9d6d05d54a141e3dc00e35cd56269895185e837f9baeedd8454
import base64 import hashlib import os import shutil import sys import tempfile as sys_tempfile import unittest from io import BytesIO, StringIO from unittest import mock from urllib.parse import quote from django.core.exceptions import SuspiciousFileOperation from django.core.files import temp as tempfile from django.core.files.uploadedfile import SimpleUploadedFile, UploadedFile from django.http.multipartparser import ( FILE, MultiPartParser, MultiPartParserError, Parser, parse_header, ) from django.test import SimpleTestCase, TestCase, client, override_settings from . import uploadhandler from .models import FileModel UNICODE_FILENAME = 'test-0123456789_中文_Orléans.jpg' MEDIA_ROOT = sys_tempfile.mkdtemp() UPLOAD_TO = os.path.join(MEDIA_ROOT, 'test_upload') CANDIDATE_TRAVERSAL_FILE_NAMES = [ '/tmp/hax0rd.txt', # Absolute path, *nix-style. 'C:\\Windows\\hax0rd.txt', # Absolute path, win-style. 'C:/Windows/hax0rd.txt', # Absolute path, broken-style. '\\tmp\\hax0rd.txt', # Absolute path, broken in a different way. '/tmp\\hax0rd.txt', # Absolute path, broken by mixing. 'subdir/hax0rd.txt', # Descendant path, *nix-style. 'subdir\\hax0rd.txt', # Descendant path, win-style. 'sub/dir\\hax0rd.txt', # Descendant path, mixed. '../../hax0rd.txt', # Relative path, *nix-style. '..\\..\\hax0rd.txt', # Relative path, win-style. '../..\\hax0rd.txt', # Relative path, mixed. '..&#x2F;hax0rd.txt', # HTML entities. '..&sol;hax0rd.txt', # HTML entities. ] CANDIDATE_INVALID_FILE_NAMES = [ '/tmp/', # Directory, *nix-style. 'c:\\tmp\\', # Directory, win-style. '/tmp/.', # Directory dot, *nix-style. 'c:\\tmp\\.', # Directory dot, *nix-style. '/tmp/..', # Parent directory, *nix-style. 'c:\\tmp\\..', # Parent directory, win-style. '', # Empty filename. ] @override_settings(MEDIA_ROOT=MEDIA_ROOT, ROOT_URLCONF='file_uploads.urls', MIDDLEWARE=[]) class FileUploadTests(TestCase): @classmethod def setUpClass(cls): super().setUpClass() os.makedirs(MEDIA_ROOT, exist_ok=True) @classmethod def tearDownClass(cls): shutil.rmtree(MEDIA_ROOT) super().tearDownClass() def test_upload_name_is_validated(self): candidates = [ '/tmp/', '/tmp/..', '/tmp/.', ] if sys.platform == 'win32': candidates.extend([ 'c:\\tmp\\', 'c:\\tmp\\..', 'c:\\tmp\\.', ]) for file_name in candidates: with self.subTest(file_name=file_name): self.assertRaises(SuspiciousFileOperation, UploadedFile, name=file_name) def test_simple_upload(self): with open(__file__, 'rb') as fp: post_data = { 'name': 'Ringo', 'file_field': fp, } response = self.client.post('/upload/', post_data) self.assertEqual(response.status_code, 200) def test_large_upload(self): file = tempfile.NamedTemporaryFile with file(suffix=".file1") as file1, file(suffix=".file2") as file2: file1.write(b'a' * (2 ** 21)) file1.seek(0) file2.write(b'a' * (10 * 2 ** 20)) file2.seek(0) post_data = { 'name': 'Ringo', 'file_field1': file1, 'file_field2': file2, } for key in list(post_data): try: post_data[key + '_hash'] = hashlib.sha1(post_data[key].read()).hexdigest() post_data[key].seek(0) except AttributeError: post_data[key + '_hash'] = hashlib.sha1(post_data[key].encode()).hexdigest() response = self.client.post('/verify/', post_data) self.assertEqual(response.status_code, 200) def _test_base64_upload(self, content, encode=base64.b64encode): payload = client.FakePayload("\r\n".join([ '--' + client.BOUNDARY, 'Content-Disposition: form-data; name="file"; filename="test.txt"', 'Content-Type: application/octet-stream', 'Content-Transfer-Encoding: base64', ''])) payload.write(b'\r\n' + encode(content.encode()) + b'\r\n') payload.write('--' + client.BOUNDARY + '--\r\n') r = { 'CONTENT_LENGTH': len(payload), 'CONTENT_TYPE': client.MULTIPART_CONTENT, 'PATH_INFO': "/echo_content/", 'REQUEST_METHOD': 'POST', 'wsgi.input': payload, } response = self.client.request(**r) self.assertEqual(response.json()['file'], content) def test_base64_upload(self): self._test_base64_upload("This data will be transmitted base64-encoded.") def test_big_base64_upload(self): self._test_base64_upload("Big data" * 68000) # > 512Kb def test_big_base64_newlines_upload(self): self._test_base64_upload("Big data" * 68000, encode=base64.encodebytes) def test_unicode_file_name(self): with sys_tempfile.TemporaryDirectory() as temp_dir: # This file contains Chinese symbols and an accented char in the name. with open(os.path.join(temp_dir, UNICODE_FILENAME), 'w+b') as file1: file1.write(b'b' * (2 ** 10)) file1.seek(0) response = self.client.post('/unicode_name/', {'file_unicode': file1}) self.assertEqual(response.status_code, 200) def test_unicode_file_name_rfc2231(self): """ Test receiving file upload when filename is encoded with RFC2231 (#22971). """ payload = client.FakePayload() payload.write('\r\n'.join([ '--' + client.BOUNDARY, 'Content-Disposition: form-data; name="file_unicode"; filename*=UTF-8\'\'%s' % quote(UNICODE_FILENAME), 'Content-Type: application/octet-stream', '', 'You got pwnd.\r\n', '\r\n--' + client.BOUNDARY + '--\r\n' ])) r = { 'CONTENT_LENGTH': len(payload), 'CONTENT_TYPE': client.MULTIPART_CONTENT, 'PATH_INFO': "/unicode_name/", 'REQUEST_METHOD': 'POST', 'wsgi.input': payload, } response = self.client.request(**r) self.assertEqual(response.status_code, 200) def test_unicode_name_rfc2231(self): """ Test receiving file upload when filename is encoded with RFC2231 (#22971). """ payload = client.FakePayload() payload.write( '\r\n'.join([ '--' + client.BOUNDARY, 'Content-Disposition: form-data; name*=UTF-8\'\'file_unicode; filename*=UTF-8\'\'%s' % quote( UNICODE_FILENAME ), 'Content-Type: application/octet-stream', '', 'You got pwnd.\r\n', '\r\n--' + client.BOUNDARY + '--\r\n' ]) ) r = { 'CONTENT_LENGTH': len(payload), 'CONTENT_TYPE': client.MULTIPART_CONTENT, 'PATH_INFO': "/unicode_name/", 'REQUEST_METHOD': 'POST', 'wsgi.input': payload, } response = self.client.request(**r) self.assertEqual(response.status_code, 200) def test_unicode_file_name_rfc2231_with_double_quotes(self): payload = client.FakePayload() payload.write('\r\n'.join([ '--' + client.BOUNDARY, 'Content-Disposition: form-data; name="file_unicode"; ' 'filename*="UTF-8\'\'%s"' % quote(UNICODE_FILENAME), 'Content-Type: application/octet-stream', '', 'You got pwnd.\r\n', '\r\n--' + client.BOUNDARY + '--\r\n', ])) r = { 'CONTENT_LENGTH': len(payload), 'CONTENT_TYPE': client.MULTIPART_CONTENT, 'PATH_INFO': '/unicode_name/', 'REQUEST_METHOD': 'POST', 'wsgi.input': payload, } response = self.client.request(**r) self.assertEqual(response.status_code, 200) def test_unicode_name_rfc2231_with_double_quotes(self): payload = client.FakePayload() payload.write('\r\n'.join([ '--' + client.BOUNDARY, 'Content-Disposition: form-data; name*="UTF-8\'\'file_unicode"; ' 'filename*="UTF-8\'\'%s"' % quote(UNICODE_FILENAME), 'Content-Type: application/octet-stream', '', 'You got pwnd.\r\n', '\r\n--' + client.BOUNDARY + '--\r\n' ])) r = { 'CONTENT_LENGTH': len(payload), 'CONTENT_TYPE': client.MULTIPART_CONTENT, 'PATH_INFO': '/unicode_name/', 'REQUEST_METHOD': 'POST', 'wsgi.input': payload, } response = self.client.request(**r) self.assertEqual(response.status_code, 200) def test_blank_filenames(self): """ Receiving file upload when filename is blank (before and after sanitization) should be okay. """ filenames = [ '', # Normalized by MultiPartParser.IE_sanitize(). 'C:\\Windows\\', # Normalized by os.path.basename(). '/', 'ends-with-slash/', ] payload = client.FakePayload() for i, name in enumerate(filenames): payload.write('\r\n'.join([ '--' + client.BOUNDARY, 'Content-Disposition: form-data; name="file%s"; filename="%s"' % (i, name), 'Content-Type: application/octet-stream', '', 'You got pwnd.\r\n' ])) payload.write('\r\n--' + client.BOUNDARY + '--\r\n') r = { 'CONTENT_LENGTH': len(payload), 'CONTENT_TYPE': client.MULTIPART_CONTENT, 'PATH_INFO': '/echo/', 'REQUEST_METHOD': 'POST', 'wsgi.input': payload, } response = self.client.request(**r) self.assertEqual(response.status_code, 200) # Empty filenames should be ignored received = response.json() for i, name in enumerate(filenames): self.assertIsNone(received.get('file%s' % i)) def test_dangerous_file_names(self): """Uploaded file names should be sanitized before ever reaching the view.""" # This test simulates possible directory traversal attacks by a # malicious uploader We have to do some monkeybusiness here to construct # a malicious payload with an invalid file name (containing os.sep or # os.pardir). This similar to what an attacker would need to do when # trying such an attack. payload = client.FakePayload() for i, name in enumerate(CANDIDATE_TRAVERSAL_FILE_NAMES): payload.write('\r\n'.join([ '--' + client.BOUNDARY, 'Content-Disposition: form-data; name="file%s"; filename="%s"' % (i, name), 'Content-Type: application/octet-stream', '', 'You got pwnd.\r\n' ])) payload.write('\r\n--' + client.BOUNDARY + '--\r\n') r = { 'CONTENT_LENGTH': len(payload), 'CONTENT_TYPE': client.MULTIPART_CONTENT, 'PATH_INFO': "/echo/", 'REQUEST_METHOD': 'POST', 'wsgi.input': payload, } response = self.client.request(**r) # The filenames should have been sanitized by the time it got to the view. received = response.json() for i, name in enumerate(CANDIDATE_TRAVERSAL_FILE_NAMES): got = received["file%s" % i] self.assertEqual(got, "hax0rd.txt") def test_filename_overflow(self): """File names over 256 characters (dangerous on some platforms) get fixed up.""" long_str = 'f' * 300 cases = [ # field name, filename, expected ('long_filename', '%s.txt' % long_str, '%s.txt' % long_str[:251]), ('long_extension', 'foo.%s' % long_str, '.%s' % long_str[:254]), ('no_extension', long_str, long_str[:255]), ('no_filename', '.%s' % long_str, '.%s' % long_str[:254]), ('long_everything', '%s.%s' % (long_str, long_str), '.%s' % long_str[:254]), ] payload = client.FakePayload() for name, filename, _ in cases: payload.write("\r\n".join([ '--' + client.BOUNDARY, 'Content-Disposition: form-data; name="{}"; filename="{}"', 'Content-Type: application/octet-stream', '', 'Oops.', '' ]).format(name, filename)) payload.write('\r\n--' + client.BOUNDARY + '--\r\n') r = { 'CONTENT_LENGTH': len(payload), 'CONTENT_TYPE': client.MULTIPART_CONTENT, 'PATH_INFO': "/echo/", 'REQUEST_METHOD': 'POST', 'wsgi.input': payload, } response = self.client.request(**r) result = response.json() for name, _, expected in cases: got = result[name] self.assertEqual(expected, got, 'Mismatch for {}'.format(name)) self.assertLess(len(got), 256, "Got a long file name (%s characters)." % len(got)) def test_file_content(self): file = tempfile.NamedTemporaryFile with file(suffix=".ctype_extra") as no_content_type, file(suffix=".ctype_extra") as simple_file: no_content_type.write(b'no content') no_content_type.seek(0) simple_file.write(b'text content') simple_file.seek(0) simple_file.content_type = 'text/plain' string_io = StringIO('string content') bytes_io = BytesIO(b'binary content') response = self.client.post('/echo_content/', { 'no_content_type': no_content_type, 'simple_file': simple_file, 'string': string_io, 'binary': bytes_io, }) received = response.json() self.assertEqual(received['no_content_type'], 'no content') self.assertEqual(received['simple_file'], 'text content') self.assertEqual(received['string'], 'string content') self.assertEqual(received['binary'], 'binary content') def test_content_type_extra(self): """Uploaded files may have content type parameters available.""" file = tempfile.NamedTemporaryFile with file(suffix=".ctype_extra") as no_content_type, file(suffix=".ctype_extra") as simple_file: no_content_type.write(b'something') no_content_type.seek(0) simple_file.write(b'something') simple_file.seek(0) simple_file.content_type = 'text/plain; test-key=test_value' response = self.client.post('/echo_content_type_extra/', { 'no_content_type': no_content_type, 'simple_file': simple_file, }) received = response.json() self.assertEqual(received['no_content_type'], {}) self.assertEqual(received['simple_file'], {'test-key': 'test_value'}) def test_truncated_multipart_handled_gracefully(self): """ If passed an incomplete multipart message, MultiPartParser does not attempt to read beyond the end of the stream, and simply will handle the part that can be parsed gracefully. """ payload_str = "\r\n".join([ '--' + client.BOUNDARY, 'Content-Disposition: form-data; name="file"; filename="foo.txt"', 'Content-Type: application/octet-stream', '', 'file contents' '--' + client.BOUNDARY + '--', '', ]) payload = client.FakePayload(payload_str[:-10]) r = { 'CONTENT_LENGTH': len(payload), 'CONTENT_TYPE': client.MULTIPART_CONTENT, 'PATH_INFO': '/echo/', 'REQUEST_METHOD': 'POST', 'wsgi.input': payload, } self.assertEqual(self.client.request(**r).json(), {}) def test_empty_multipart_handled_gracefully(self): """ If passed an empty multipart message, MultiPartParser will return an empty QueryDict. """ r = { 'CONTENT_LENGTH': 0, 'CONTENT_TYPE': client.MULTIPART_CONTENT, 'PATH_INFO': '/echo/', 'REQUEST_METHOD': 'POST', 'wsgi.input': client.FakePayload(b''), } self.assertEqual(self.client.request(**r).json(), {}) def test_custom_upload_handler(self): file = tempfile.NamedTemporaryFile with file() as smallfile, file() as bigfile: # A small file (under the 5M quota) smallfile.write(b'a' * (2 ** 21)) smallfile.seek(0) # A big file (over the quota) bigfile.write(b'a' * (10 * 2 ** 20)) bigfile.seek(0) # Small file posting should work. self.assertIn('f', self.client.post('/quota/', {'f': smallfile}).json()) # Large files don't go through. self.assertNotIn('f', self.client.post("/quota/", {'f': bigfile}).json()) def test_broken_custom_upload_handler(self): with tempfile.NamedTemporaryFile() as file: file.write(b'a' * (2 ** 21)) file.seek(0) msg = 'You cannot alter upload handlers after the upload has been processed.' with self.assertRaisesMessage(AttributeError, msg): self.client.post('/quota/broken/', {'f': file}) def test_stop_upload_temporary_file_handler(self): with tempfile.NamedTemporaryFile() as temp_file: temp_file.write(b'a') temp_file.seek(0) response = self.client.post('/temp_file/stop_upload/', {'file': temp_file}) temp_path = response.json()['temp_path'] self.assertIs(os.path.exists(temp_path), False) def test_upload_interrupted_temporary_file_handler(self): # Simulate an interrupted upload by omitting the closing boundary. class MockedParser(Parser): def __iter__(self): for item in super().__iter__(): item_type, meta_data, field_stream = item yield item_type, meta_data, field_stream if item_type == FILE: return with tempfile.NamedTemporaryFile() as temp_file: temp_file.write(b'a') temp_file.seek(0) with mock.patch( 'django.http.multipartparser.Parser', MockedParser, ): response = self.client.post( '/temp_file/upload_interrupted/', {'file': temp_file}, ) temp_path = response.json()['temp_path'] self.assertIs(os.path.exists(temp_path), False) def test_fileupload_getlist(self): file = tempfile.NamedTemporaryFile with file() as file1, file() as file2, file() as file2a: file1.write(b'a' * (2 ** 23)) file1.seek(0) file2.write(b'a' * (2 * 2 ** 18)) file2.seek(0) file2a.write(b'a' * (5 * 2 ** 20)) file2a.seek(0) response = self.client.post('/getlist_count/', { 'file1': file1, 'field1': 'test', 'field2': 'test3', 'field3': 'test5', 'field4': 'test6', 'field5': 'test7', 'file2': (file2, file2a) }) got = response.json() self.assertEqual(got.get('file1'), 1) self.assertEqual(got.get('file2'), 2) def test_fileuploads_closed_at_request_end(self): file = tempfile.NamedTemporaryFile with file() as f1, file() as f2a, file() as f2b: response = self.client.post('/fd_closing/t/', { 'file': f1, 'file2': (f2a, f2b), }) request = response.wsgi_request # The files were parsed. self.assertTrue(hasattr(request, '_files')) file = request._files['file'] self.assertTrue(file.closed) files = request._files.getlist('file2') self.assertTrue(files[0].closed) self.assertTrue(files[1].closed) def test_no_parsing_triggered_by_fd_closing(self): file = tempfile.NamedTemporaryFile with file() as f1, file() as f2a, file() as f2b: response = self.client.post('/fd_closing/f/', { 'file': f1, 'file2': (f2a, f2b), }) request = response.wsgi_request # The fd closing logic doesn't trigger parsing of the stream self.assertFalse(hasattr(request, '_files')) def test_file_error_blocking(self): """ The server should not block when there are upload errors (bug #8622). This can happen if something -- i.e. an exception handler -- tries to access POST while handling an error in parsing POST. This shouldn't cause an infinite loop! """ class POSTAccessingHandler(client.ClientHandler): """A handler that'll access POST during an exception.""" def handle_uncaught_exception(self, request, resolver, exc_info): ret = super().handle_uncaught_exception(request, resolver, exc_info) request.POST # evaluate return ret # Maybe this is a little more complicated that it needs to be; but if # the django.test.client.FakePayload.read() implementation changes then # this test would fail. So we need to know exactly what kind of error # it raises when there is an attempt to read more than the available bytes: try: client.FakePayload(b'a').read(2) except Exception as err: reference_error = err # install the custom handler that tries to access request.POST self.client.handler = POSTAccessingHandler() with open(__file__, 'rb') as fp: post_data = { 'name': 'Ringo', 'file_field': fp, } try: self.client.post('/upload_errors/', post_data) except reference_error.__class__ as err: self.assertNotEqual( str(err), str(reference_error), "Caught a repeated exception that'll cause an infinite loop in file uploads." ) except Exception as err: # CustomUploadError is the error that should have been raised self.assertEqual(err.__class__, uploadhandler.CustomUploadError) def test_filename_case_preservation(self): """ The storage backend shouldn't mess with the case of the filenames uploaded. """ # Synthesize the contents of a file upload with a mixed case filename # so we don't have to carry such a file in the Django tests source code # tree. vars = {'boundary': 'oUrBoUnDaRyStRiNg'} post_data = [ '--%(boundary)s', 'Content-Disposition: form-data; name="file_field"; filename="MiXeD_cAsE.txt"', 'Content-Type: application/octet-stream', '', 'file contents\n' '', '--%(boundary)s--\r\n', ] response = self.client.post( '/filename_case/', '\r\n'.join(post_data) % vars, 'multipart/form-data; boundary=%(boundary)s' % vars ) self.assertEqual(response.status_code, 200) id = int(response.content) obj = FileModel.objects.get(pk=id) # The name of the file uploaded and the file stored in the server-side # shouldn't differ. self.assertEqual(os.path.basename(obj.testfile.path), 'MiXeD_cAsE.txt') def test_filename_traversal_upload(self): os.makedirs(UPLOAD_TO, exist_ok=True) self.addCleanup(shutil.rmtree, MEDIA_ROOT) tests = [ '..&#x2F;test.txt', '..&sol;test.txt', ] for file_name in tests: with self.subTest(file_name=file_name): payload = client.FakePayload() payload.write( '\r\n'.join([ '--' + client.BOUNDARY, 'Content-Disposition: form-data; name="my_file"; ' 'filename="%s";' % file_name, 'Content-Type: text/plain', '', 'file contents.\r\n', '\r\n--' + client.BOUNDARY + '--\r\n', ]), ) r = { 'CONTENT_LENGTH': len(payload), 'CONTENT_TYPE': client.MULTIPART_CONTENT, 'PATH_INFO': '/upload_traversal/', 'REQUEST_METHOD': 'POST', 'wsgi.input': payload, } response = self.client.request(**r) result = response.json() self.assertEqual(response.status_code, 200) self.assertEqual(result['file_name'], 'test.txt') self.assertIs( os.path.exists(os.path.join(MEDIA_ROOT, 'test.txt')), False, ) self.assertIs( os.path.exists(os.path.join(UPLOAD_TO, 'test.txt')), True, ) @override_settings(MEDIA_ROOT=MEDIA_ROOT) class DirectoryCreationTests(SimpleTestCase): """ Tests for error handling during directory creation via _save_FIELD_file (ticket #6450) """ @classmethod def setUpClass(cls): super().setUpClass() os.makedirs(MEDIA_ROOT, exist_ok=True) @classmethod def tearDownClass(cls): shutil.rmtree(MEDIA_ROOT) super().tearDownClass() def setUp(self): self.obj = FileModel() @unittest.skipIf(sys.platform == 'win32', "Python on Windows doesn't have working os.chmod().") def test_readonly_root(self): """Permission errors are not swallowed""" os.chmod(MEDIA_ROOT, 0o500) self.addCleanup(os.chmod, MEDIA_ROOT, 0o700) with self.assertRaises(PermissionError): self.obj.testfile.save('foo.txt', SimpleUploadedFile('foo.txt', b'x'), save=False) def test_not_a_directory(self): # Create a file with the upload directory name open(UPLOAD_TO, 'wb').close() self.addCleanup(os.remove, UPLOAD_TO) msg = '%s exists and is not a directory.' % UPLOAD_TO with self.assertRaisesMessage(FileExistsError, msg): with SimpleUploadedFile('foo.txt', b'x') as file: self.obj.testfile.save('foo.txt', file, save=False) class MultiParserTests(SimpleTestCase): def test_empty_upload_handlers(self): # We're not actually parsing here; just checking if the parser properly # instantiates with empty upload handlers. MultiPartParser({ 'CONTENT_TYPE': 'multipart/form-data; boundary=_foo', 'CONTENT_LENGTH': '1' }, StringIO('x'), [], 'utf-8') def test_invalid_content_type(self): with self.assertRaisesMessage(MultiPartParserError, 'Invalid Content-Type: text/plain'): MultiPartParser({ 'CONTENT_TYPE': 'text/plain', 'CONTENT_LENGTH': '1', }, StringIO('x'), [], 'utf-8') def test_negative_content_length(self): with self.assertRaisesMessage(MultiPartParserError, 'Invalid content length: -1'): MultiPartParser({ 'CONTENT_TYPE': 'multipart/form-data; boundary=_foo', 'CONTENT_LENGTH': -1, }, StringIO('x'), [], 'utf-8') def test_bad_type_content_length(self): multipart_parser = MultiPartParser({ 'CONTENT_TYPE': 'multipart/form-data; boundary=_foo', 'CONTENT_LENGTH': 'a', }, StringIO('x'), [], 'utf-8') self.assertEqual(multipart_parser._content_length, 0) def test_sanitize_file_name(self): parser = MultiPartParser({ 'CONTENT_TYPE': 'multipart/form-data; boundary=_foo', 'CONTENT_LENGTH': '1' }, StringIO('x'), [], 'utf-8') for file_name in CANDIDATE_TRAVERSAL_FILE_NAMES: with self.subTest(file_name=file_name): self.assertEqual(parser.sanitize_file_name(file_name), 'hax0rd.txt') def test_sanitize_invalid_file_name(self): parser = MultiPartParser({ 'CONTENT_TYPE': 'multipart/form-data; boundary=_foo', 'CONTENT_LENGTH': '1', }, StringIO('x'), [], 'utf-8') for file_name in CANDIDATE_INVALID_FILE_NAMES: with self.subTest(file_name=file_name): self.assertIsNone(parser.sanitize_file_name(file_name)) def test_rfc2231_parsing(self): test_data = ( (b"Content-Type: application/x-stuff; title*=us-ascii'en-us'This%20is%20%2A%2A%2Afun%2A%2A%2A", "This is ***fun***"), (b"Content-Type: application/x-stuff; title*=UTF-8''foo-%c3%a4.html", "foo-ä.html"), (b"Content-Type: application/x-stuff; title*=iso-8859-1''foo-%E4.html", "foo-ä.html"), ) for raw_line, expected_title in test_data: parsed = parse_header(raw_line) self.assertEqual(parsed[1]['title'], expected_title) def test_rfc2231_wrong_title(self): """ Test wrongly formatted RFC 2231 headers (missing double single quotes). Parsing should not crash (#24209). """ test_data = ( (b"Content-Type: application/x-stuff; title*='This%20is%20%2A%2A%2Afun%2A%2A%2A", b"'This%20is%20%2A%2A%2Afun%2A%2A%2A"), (b"Content-Type: application/x-stuff; title*='foo.html", b"'foo.html"), (b"Content-Type: application/x-stuff; title*=bar.html", b"bar.html"), ) for raw_line, expected_title in test_data: parsed = parse_header(raw_line) self.assertEqual(parsed[1]['title'], expected_title)
5acf37beb904ee30cebf0e4dd5eed09eaab924f885e011dae2abdc61e05718ed
import unittest from django.core.checks import Error, Warning from django.core.checks.model_checks import _check_lazy_references from django.db import connection, connections, models from django.db.models.functions import Abs, Lower, Round from django.db.models.signals import post_init from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature from django.test.utils import isolate_apps, override_settings, register_lookup class EmptyRouter: pass def get_max_column_name_length(): allowed_len = None db_alias = None for db in ('default', 'other'): connection = connections[db] max_name_length = connection.ops.max_name_length() if max_name_length is not None and not connection.features.truncates_names: if allowed_len is None or max_name_length < allowed_len: allowed_len = max_name_length db_alias = db return (allowed_len, db_alias) @isolate_apps('invalid_models_tests') class IndexTogetherTests(SimpleTestCase): def test_non_iterable(self): class Model(models.Model): class Meta: index_together = 42 self.assertEqual(Model.check(), [ Error( "'index_together' must be a list or tuple.", obj=Model, id='models.E008', ), ]) def test_non_list(self): class Model(models.Model): class Meta: index_together = 'not-a-list' self.assertEqual(Model.check(), [ Error( "'index_together' must be a list or tuple.", obj=Model, id='models.E008', ), ]) def test_list_containing_non_iterable(self): class Model(models.Model): class Meta: index_together = [('a', 'b'), 42] self.assertEqual(Model.check(), [ Error( "All 'index_together' elements must be lists or tuples.", obj=Model, id='models.E009', ), ]) def test_pointing_to_missing_field(self): class Model(models.Model): class Meta: index_together = [['missing_field']] self.assertEqual(Model.check(), [ Error( "'index_together' refers to the nonexistent field 'missing_field'.", obj=Model, id='models.E012', ), ]) def test_pointing_to_non_local_field(self): class Foo(models.Model): field1 = models.IntegerField() class Bar(Foo): field2 = models.IntegerField() class Meta: index_together = [['field2', 'field1']] self.assertEqual(Bar.check(), [ Error( "'index_together' refers to field 'field1' which is not " "local to model 'Bar'.", hint='This issue may be caused by multi-table inheritance.', obj=Bar, id='models.E016', ), ]) def test_pointing_to_m2m_field(self): class Model(models.Model): m2m = models.ManyToManyField('self') class Meta: index_together = [['m2m']] self.assertEqual(Model.check(), [ Error( "'index_together' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'index_together'.", obj=Model, id='models.E013', ), ]) def test_pointing_to_fk(self): class Foo(models.Model): pass class Bar(models.Model): foo_1 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_1') foo_2 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_2') class Meta: index_together = [['foo_1_id', 'foo_2']] self.assertEqual(Bar.check(), []) # unique_together tests are very similar to index_together tests. @isolate_apps('invalid_models_tests') class UniqueTogetherTests(SimpleTestCase): def test_non_iterable(self): class Model(models.Model): class Meta: unique_together = 42 self.assertEqual(Model.check(), [ Error( "'unique_together' must be a list or tuple.", obj=Model, id='models.E010', ), ]) def test_list_containing_non_iterable(self): class Model(models.Model): one = models.IntegerField() two = models.IntegerField() class Meta: unique_together = [('a', 'b'), 42] self.assertEqual(Model.check(), [ Error( "All 'unique_together' elements must be lists or tuples.", obj=Model, id='models.E011', ), ]) def test_non_list(self): class Model(models.Model): class Meta: unique_together = 'not-a-list' self.assertEqual(Model.check(), [ Error( "'unique_together' must be a list or tuple.", obj=Model, id='models.E010', ), ]) def test_valid_model(self): class Model(models.Model): one = models.IntegerField() two = models.IntegerField() class Meta: # unique_together can be a simple tuple unique_together = ('one', 'two') self.assertEqual(Model.check(), []) def test_pointing_to_missing_field(self): class Model(models.Model): class Meta: unique_together = [['missing_field']] self.assertEqual(Model.check(), [ Error( "'unique_together' refers to the nonexistent field 'missing_field'.", obj=Model, id='models.E012', ), ]) def test_pointing_to_m2m(self): class Model(models.Model): m2m = models.ManyToManyField('self') class Meta: unique_together = [['m2m']] self.assertEqual(Model.check(), [ Error( "'unique_together' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'unique_together'.", obj=Model, id='models.E013', ), ]) def test_pointing_to_fk(self): class Foo(models.Model): pass class Bar(models.Model): foo_1 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_1') foo_2 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_2') class Meta: unique_together = [['foo_1_id', 'foo_2']] self.assertEqual(Bar.check(), []) @isolate_apps('invalid_models_tests') class IndexesTests(TestCase): def test_pointing_to_missing_field(self): class Model(models.Model): class Meta: indexes = [models.Index(fields=['missing_field'], name='name')] self.assertEqual(Model.check(), [ Error( "'indexes' refers to the nonexistent field 'missing_field'.", obj=Model, id='models.E012', ), ]) def test_pointing_to_m2m_field(self): class Model(models.Model): m2m = models.ManyToManyField('self') class Meta: indexes = [models.Index(fields=['m2m'], name='name')] self.assertEqual(Model.check(), [ Error( "'indexes' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'indexes'.", obj=Model, id='models.E013', ), ]) def test_pointing_to_non_local_field(self): class Foo(models.Model): field1 = models.IntegerField() class Bar(Foo): field2 = models.IntegerField() class Meta: indexes = [models.Index(fields=['field2', 'field1'], name='name')] self.assertEqual(Bar.check(), [ Error( "'indexes' refers to field 'field1' which is not local to " "model 'Bar'.", hint='This issue may be caused by multi-table inheritance.', obj=Bar, id='models.E016', ), ]) def test_pointing_to_fk(self): class Foo(models.Model): pass class Bar(models.Model): foo_1 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_1') foo_2 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_2') class Meta: indexes = [models.Index(fields=['foo_1_id', 'foo_2'], name='index_name')] self.assertEqual(Bar.check(), []) def test_name_constraints(self): class Model(models.Model): class Meta: indexes = [ models.Index(fields=['id'], name='_index_name'), models.Index(fields=['id'], name='5index_name'), ] self.assertEqual(Model.check(), [ Error( "The index name '%sindex_name' cannot start with an " "underscore or a number." % prefix, obj=Model, id='models.E033', ) for prefix in ('_', '5') ]) def test_max_name_length(self): index_name = 'x' * 31 class Model(models.Model): class Meta: indexes = [models.Index(fields=['id'], name=index_name)] self.assertEqual(Model.check(), [ Error( "The index name '%s' cannot be longer than 30 characters." % index_name, obj=Model, id='models.E034', ), ]) def test_index_with_condition(self): class Model(models.Model): age = models.IntegerField() class Meta: indexes = [ models.Index( fields=['age'], name='index_age_gte_10', condition=models.Q(age__gte=10), ), ] errors = Model.check(databases=self.databases) expected = [] if connection.features.supports_partial_indexes else [ Warning( '%s does not support indexes with conditions.' % connection.display_name, hint=( "Conditions will be ignored. Silence this warning if you " "don't care about it." ), obj=Model, id='models.W037', ) ] self.assertEqual(errors, expected) def test_index_with_condition_required_db_features(self): class Model(models.Model): age = models.IntegerField() class Meta: required_db_features = {'supports_partial_indexes'} indexes = [ models.Index( fields=['age'], name='index_age_gte_10', condition=models.Q(age__gte=10), ), ] self.assertEqual(Model.check(databases=self.databases), []) def test_index_with_include(self): class Model(models.Model): age = models.IntegerField() class Meta: indexes = [ models.Index( fields=['age'], name='index_age_include_id', include=['id'], ), ] errors = Model.check(databases=self.databases) expected = [] if connection.features.supports_covering_indexes else [ Warning( '%s does not support indexes with non-key columns.' % connection.display_name, hint=( "Non-key columns will be ignored. Silence this warning if " "you don't care about it." ), obj=Model, id='models.W040', ) ] self.assertEqual(errors, expected) def test_index_with_include_required_db_features(self): class Model(models.Model): age = models.IntegerField() class Meta: required_db_features = {'supports_covering_indexes'} indexes = [ models.Index( fields=['age'], name='index_age_include_id', include=['id'], ), ] self.assertEqual(Model.check(databases=self.databases), []) @skipUnlessDBFeature('supports_covering_indexes') def test_index_include_pointing_to_missing_field(self): class Model(models.Model): class Meta: indexes = [ models.Index(fields=['id'], include=['missing_field'], name='name'), ] self.assertEqual(Model.check(databases=self.databases), [ Error( "'indexes' refers to the nonexistent field 'missing_field'.", obj=Model, id='models.E012', ), ]) @skipUnlessDBFeature('supports_covering_indexes') def test_index_include_pointing_to_m2m_field(self): class Model(models.Model): m2m = models.ManyToManyField('self') class Meta: indexes = [models.Index(fields=['id'], include=['m2m'], name='name')] self.assertEqual(Model.check(databases=self.databases), [ Error( "'indexes' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'indexes'.", obj=Model, id='models.E013', ), ]) @skipUnlessDBFeature('supports_covering_indexes') def test_index_include_pointing_to_non_local_field(self): class Parent(models.Model): field1 = models.IntegerField() class Child(Parent): field2 = models.IntegerField() class Meta: indexes = [ models.Index(fields=['field2'], include=['field1'], name='name'), ] self.assertEqual(Child.check(databases=self.databases), [ Error( "'indexes' refers to field 'field1' which is not local to " "model 'Child'.", hint='This issue may be caused by multi-table inheritance.', obj=Child, id='models.E016', ), ]) @skipUnlessDBFeature('supports_covering_indexes') def test_index_include_pointing_to_fk(self): class Target(models.Model): pass class Model(models.Model): fk_1 = models.ForeignKey(Target, models.CASCADE, related_name='target_1') fk_2 = models.ForeignKey(Target, models.CASCADE, related_name='target_2') class Meta: constraints = [ models.Index( fields=['id'], include=['fk_1_id', 'fk_2'], name='name', ), ] self.assertEqual(Model.check(databases=self.databases), []) def test_func_index(self): class Model(models.Model): name = models.CharField(max_length=10) class Meta: indexes = [models.Index(Lower('name'), name='index_lower_name')] warn = Warning( '%s does not support indexes on expressions.' % connection.display_name, hint=( "An index won't be created. Silence this warning if you don't " "care about it." ), obj=Model, id='models.W043', ) expected = [] if connection.features.supports_expression_indexes else [warn] self.assertEqual(Model.check(databases=self.databases), expected) def test_func_index_required_db_features(self): class Model(models.Model): name = models.CharField(max_length=10) class Meta: indexes = [models.Index(Lower('name'), name='index_lower_name')] required_db_features = {'supports_expression_indexes'} self.assertEqual(Model.check(databases=self.databases), []) def test_func_index_complex_expression_custom_lookup(self): class Model(models.Model): height = models.IntegerField() weight = models.IntegerField() class Meta: indexes = [ models.Index( models.F('height') / (models.F('weight__abs') + models.Value(5)), name='name', ), ] with register_lookup(models.IntegerField, Abs): self.assertEqual(Model.check(), []) def test_func_index_pointing_to_missing_field(self): class Model(models.Model): class Meta: indexes = [models.Index(Lower('missing_field').desc(), name='name')] self.assertEqual(Model.check(), [ Error( "'indexes' refers to the nonexistent field 'missing_field'.", obj=Model, id='models.E012', ), ]) def test_func_index_pointing_to_missing_field_nested(self): class Model(models.Model): class Meta: indexes = [ models.Index(Abs(Round('missing_field')), name='name'), ] self.assertEqual(Model.check(), [ Error( "'indexes' refers to the nonexistent field 'missing_field'.", obj=Model, id='models.E012', ), ]) def test_func_index_pointing_to_m2m_field(self): class Model(models.Model): m2m = models.ManyToManyField('self') class Meta: indexes = [models.Index(Lower('m2m'), name='name')] self.assertEqual(Model.check(), [ Error( "'indexes' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'indexes'.", obj=Model, id='models.E013', ), ]) def test_func_index_pointing_to_non_local_field(self): class Foo(models.Model): field1 = models.CharField(max_length=15) class Bar(Foo): class Meta: indexes = [models.Index(Lower('field1'), name='name')] self.assertEqual(Bar.check(), [ Error( "'indexes' refers to field 'field1' which is not local to " "model 'Bar'.", hint='This issue may be caused by multi-table inheritance.', obj=Bar, id='models.E016', ), ]) def test_func_index_pointing_to_fk(self): class Foo(models.Model): pass class Bar(models.Model): foo_1 = models.ForeignKey(Foo, models.CASCADE, related_name='bar_1') foo_2 = models.ForeignKey(Foo, models.CASCADE, related_name='bar_2') class Meta: indexes = [ models.Index(Lower('foo_1_id'), Lower('foo_2'), name='index_name'), ] self.assertEqual(Bar.check(), []) @isolate_apps('invalid_models_tests') class FieldNamesTests(TestCase): databases = {'default', 'other'} def test_ending_with_underscore(self): class Model(models.Model): field_ = models.CharField(max_length=10) m2m_ = models.ManyToManyField('self') self.assertEqual(Model.check(), [ Error( 'Field names must not end with an underscore.', obj=Model._meta.get_field('field_'), id='fields.E001', ), Error( 'Field names must not end with an underscore.', obj=Model._meta.get_field('m2m_'), id='fields.E001', ), ]) max_column_name_length, column_limit_db_alias = get_max_column_name_length() @unittest.skipIf(max_column_name_length is None, "The database doesn't have a column name length limit.") def test_M2M_long_column_name(self): """ #13711 -- Model check for long M2M column names when database has column name length limits. """ # A model with very long name which will be used to set relations to. class VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz(models.Model): title = models.CharField(max_length=11) # Main model for which checks will be performed. class ModelWithLongField(models.Model): m2m_field = models.ManyToManyField( VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz, related_name='rn1', ) m2m_field2 = models.ManyToManyField( VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz, related_name='rn2', through='m2msimple', ) m2m_field3 = models.ManyToManyField( VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz, related_name='rn3', through='m2mcomplex', ) fk = models.ForeignKey( VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz, models.CASCADE, related_name='rn4', ) # Models used for setting `through` in M2M field. class m2msimple(models.Model): id2 = models.ForeignKey(ModelWithLongField, models.CASCADE) class m2mcomplex(models.Model): id2 = models.ForeignKey(ModelWithLongField, models.CASCADE) long_field_name = 'a' * (self.max_column_name_length + 1) models.ForeignKey( VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz, models.CASCADE, ).contribute_to_class(m2msimple, long_field_name) models.ForeignKey( VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz, models.CASCADE, db_column=long_field_name ).contribute_to_class(m2mcomplex, long_field_name) errors = ModelWithLongField.check(databases=('default', 'other')) # First error because of M2M field set on the model with long name. m2m_long_name = "verylongmodelnamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz_id" if self.max_column_name_length > len(m2m_long_name): # Some databases support names longer than the test name. expected = [] else: expected = [ Error( 'Autogenerated column name too long for M2M field "%s". ' 'Maximum length is "%s" for database "%s".' % (m2m_long_name, self.max_column_name_length, self.column_limit_db_alias), hint="Use 'through' to create a separate model for " "M2M and then set column_name using 'db_column'.", obj=ModelWithLongField, id='models.E019', ) ] # Second error because the FK specified in the `through` model # `m2msimple` has auto-generated name longer than allowed. # There will be no check errors in the other M2M because it # specifies db_column for the FK in `through` model even if the actual # name is longer than the limits of the database. expected.append( Error( 'Autogenerated column name too long for M2M field "%s_id". ' 'Maximum length is "%s" for database "%s".' % (long_field_name, self.max_column_name_length, self.column_limit_db_alias), hint="Use 'through' to create a separate model for " "M2M and then set column_name using 'db_column'.", obj=ModelWithLongField, id='models.E019', ) ) self.assertEqual(errors, expected) # Check for long column names is called only for specified database # aliases. self.assertEqual(ModelWithLongField.check(databases=None), []) @unittest.skipIf(max_column_name_length is None, "The database doesn't have a column name length limit.") def test_local_field_long_column_name(self): """ #13711 -- Model check for long column names when database does not support long names. """ class ModelWithLongField(models.Model): title = models.CharField(max_length=11) long_field_name = 'a' * (self.max_column_name_length + 1) long_field_name2 = 'b' * (self.max_column_name_length + 1) models.CharField(max_length=11).contribute_to_class(ModelWithLongField, long_field_name) models.CharField(max_length=11, db_column='vlmn').contribute_to_class(ModelWithLongField, long_field_name2) self.assertEqual(ModelWithLongField.check(databases=('default', 'other')), [ Error( 'Autogenerated column name too long for field "%s". ' 'Maximum length is "%s" for database "%s".' % (long_field_name, self.max_column_name_length, self.column_limit_db_alias), hint="Set the column name manually using 'db_column'.", obj=ModelWithLongField, id='models.E018', ) ]) # Check for long column names is called only for specified database # aliases. self.assertEqual(ModelWithLongField.check(databases=None), []) def test_including_separator(self): class Model(models.Model): some__field = models.IntegerField() self.assertEqual(Model.check(), [ Error( 'Field names must not contain "__".', obj=Model._meta.get_field('some__field'), id='fields.E002', ) ]) def test_pk(self): class Model(models.Model): pk = models.IntegerField() self.assertEqual(Model.check(), [ Error( "'pk' is a reserved word that cannot be used as a field name.", obj=Model._meta.get_field('pk'), id='fields.E003', ) ]) def test_db_column_clash(self): class Model(models.Model): foo = models.IntegerField() bar = models.IntegerField(db_column='foo') self.assertEqual(Model.check(), [ Error( "Field 'bar' has column name 'foo' that is used by " "another field.", hint="Specify a 'db_column' for the field.", obj=Model, id='models.E007', ) ]) @isolate_apps('invalid_models_tests') class ShadowingFieldsTests(SimpleTestCase): def test_field_name_clash_with_child_accessor(self): class Parent(models.Model): pass class Child(Parent): child = models.CharField(max_length=100) self.assertEqual(Child.check(), [ Error( "The field 'child' clashes with the field " "'child' from model 'invalid_models_tests.parent'.", obj=Child._meta.get_field('child'), id='models.E006', ) ]) def test_multiinheritance_clash(self): class Mother(models.Model): clash = models.IntegerField() class Father(models.Model): clash = models.IntegerField() class Child(Mother, Father): # Here we have two clashed: id (automatic field) and clash, because # both parents define these fields. pass self.assertEqual(Child.check(), [ Error( "The field 'id' from parent model " "'invalid_models_tests.mother' clashes with the field 'id' " "from parent model 'invalid_models_tests.father'.", obj=Child, id='models.E005', ), Error( "The field 'clash' from parent model " "'invalid_models_tests.mother' clashes with the field 'clash' " "from parent model 'invalid_models_tests.father'.", obj=Child, id='models.E005', ) ]) def test_inheritance_clash(self): class Parent(models.Model): f_id = models.IntegerField() class Target(models.Model): # This field doesn't result in a clash. f_id = models.IntegerField() class Child(Parent): # This field clashes with parent "f_id" field. f = models.ForeignKey(Target, models.CASCADE) self.assertEqual(Child.check(), [ Error( "The field 'f' clashes with the field 'f_id' " "from model 'invalid_models_tests.parent'.", obj=Child._meta.get_field('f'), id='models.E006', ) ]) def test_multigeneration_inheritance(self): class GrandParent(models.Model): clash = models.IntegerField() class Parent(GrandParent): pass class Child(Parent): pass class GrandChild(Child): clash = models.IntegerField() self.assertEqual(GrandChild.check(), [ Error( "The field 'clash' clashes with the field 'clash' " "from model 'invalid_models_tests.grandparent'.", obj=GrandChild._meta.get_field('clash'), id='models.E006', ) ]) def test_id_clash(self): class Target(models.Model): pass class Model(models.Model): fk = models.ForeignKey(Target, models.CASCADE) fk_id = models.IntegerField() self.assertEqual(Model.check(), [ Error( "The field 'fk_id' clashes with the field 'fk' from model " "'invalid_models_tests.model'.", obj=Model._meta.get_field('fk_id'), id='models.E006', ) ]) @isolate_apps('invalid_models_tests') class OtherModelTests(SimpleTestCase): def test_unique_primary_key(self): invalid_id = models.IntegerField(primary_key=False) class Model(models.Model): id = invalid_id self.assertEqual(Model.check(), [ Error( "'id' can only be used as a field name if the field also sets " "'primary_key=True'.", obj=Model, id='models.E004', ), ]) def test_ordering_non_iterable(self): class Model(models.Model): class Meta: ordering = 'missing_field' self.assertEqual(Model.check(), [ Error( "'ordering' must be a tuple or list " "(even if you want to order by only one field).", obj=Model, id='models.E014', ), ]) def test_just_ordering_no_errors(self): class Model(models.Model): order = models.PositiveIntegerField() class Meta: ordering = ['order'] self.assertEqual(Model.check(), []) def test_just_order_with_respect_to_no_errors(self): class Question(models.Model): pass class Answer(models.Model): question = models.ForeignKey(Question, models.CASCADE) class Meta: order_with_respect_to = 'question' self.assertEqual(Answer.check(), []) def test_ordering_with_order_with_respect_to(self): class Question(models.Model): pass class Answer(models.Model): question = models.ForeignKey(Question, models.CASCADE) order = models.IntegerField() class Meta: order_with_respect_to = 'question' ordering = ['order'] self.assertEqual(Answer.check(), [ Error( "'ordering' and 'order_with_respect_to' cannot be used together.", obj=Answer, id='models.E021', ), ]) def test_non_valid(self): class RelationModel(models.Model): pass class Model(models.Model): relation = models.ManyToManyField(RelationModel) class Meta: ordering = ['relation'] self.assertEqual(Model.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'relation'.", obj=Model, id='models.E015', ), ]) def test_ordering_pointing_to_missing_field(self): class Model(models.Model): class Meta: ordering = ('missing_field',) self.assertEqual(Model.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'missing_field'.", obj=Model, id='models.E015', ) ]) def test_ordering_pointing_to_missing_foreignkey_field(self): class Model(models.Model): missing_fk_field = models.IntegerField() class Meta: ordering = ('missing_fk_field_id',) self.assertEqual(Model.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'missing_fk_field_id'.", obj=Model, id='models.E015', ) ]) def test_ordering_pointing_to_missing_related_field(self): class Model(models.Model): test = models.IntegerField() class Meta: ordering = ('missing_related__id',) self.assertEqual(Model.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'missing_related__id'.", obj=Model, id='models.E015', ) ]) def test_ordering_pointing_to_missing_related_model_field(self): class Parent(models.Model): pass class Child(models.Model): parent = models.ForeignKey(Parent, models.CASCADE) class Meta: ordering = ('parent__missing_field',) self.assertEqual(Child.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'parent__missing_field'.", obj=Child, id='models.E015', ) ]) def test_ordering_pointing_to_non_related_field(self): class Child(models.Model): parent = models.IntegerField() class Meta: ordering = ('parent__missing_field',) self.assertEqual(Child.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'parent__missing_field'.", obj=Child, id='models.E015', ) ]) def test_ordering_pointing_to_two_related_model_field(self): class Parent2(models.Model): pass class Parent1(models.Model): parent2 = models.ForeignKey(Parent2, models.CASCADE) class Child(models.Model): parent1 = models.ForeignKey(Parent1, models.CASCADE) class Meta: ordering = ('parent1__parent2__missing_field',) self.assertEqual(Child.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'parent1__parent2__missing_field'.", obj=Child, id='models.E015', ) ]) def test_ordering_pointing_multiple_times_to_model_fields(self): class Parent(models.Model): field1 = models.CharField(max_length=100) field2 = models.CharField(max_length=100) class Child(models.Model): parent = models.ForeignKey(Parent, models.CASCADE) class Meta: ordering = ('parent__field1__field2',) self.assertEqual(Child.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'parent__field1__field2'.", obj=Child, id='models.E015', ) ]) def test_ordering_allows_registered_lookups(self): class Model(models.Model): test = models.CharField(max_length=100) class Meta: ordering = ('test__lower',) with register_lookup(models.CharField, Lower): self.assertEqual(Model.check(), []) def test_ordering_pointing_to_lookup_not_transform(self): class Model(models.Model): test = models.CharField(max_length=100) class Meta: ordering = ('test__isnull',) self.assertEqual(Model.check(), []) def test_ordering_pointing_to_related_model_pk(self): class Parent(models.Model): pass class Child(models.Model): parent = models.ForeignKey(Parent, models.CASCADE) class Meta: ordering = ('parent__pk',) self.assertEqual(Child.check(), []) def test_ordering_pointing_to_foreignkey_field(self): class Parent(models.Model): pass class Child(models.Model): parent = models.ForeignKey(Parent, models.CASCADE) class Meta: ordering = ('parent_id',) self.assertFalse(Child.check()) def test_name_beginning_with_underscore(self): class _Model(models.Model): pass self.assertEqual(_Model.check(), [ Error( "The model name '_Model' cannot start or end with an underscore " "as it collides with the query lookup syntax.", obj=_Model, id='models.E023', ) ]) def test_name_ending_with_underscore(self): class Model_(models.Model): pass self.assertEqual(Model_.check(), [ Error( "The model name 'Model_' cannot start or end with an underscore " "as it collides with the query lookup syntax.", obj=Model_, id='models.E023', ) ]) def test_name_contains_double_underscores(self): class Test__Model(models.Model): pass self.assertEqual(Test__Model.check(), [ Error( "The model name 'Test__Model' cannot contain double underscores " "as it collides with the query lookup syntax.", obj=Test__Model, id='models.E024', ) ]) def test_property_and_related_field_accessor_clash(self): class Model(models.Model): fk = models.ForeignKey('self', models.CASCADE) @property def fk_id(self): pass self.assertEqual(Model.check(), [ Error( "The property 'fk_id' clashes with a related field accessor.", obj=Model, id='models.E025', ) ]) def test_single_primary_key(self): class Model(models.Model): foo = models.IntegerField(primary_key=True) bar = models.IntegerField(primary_key=True) self.assertEqual(Model.check(), [ Error( "The model cannot have more than one field with 'primary_key=True'.", obj=Model, id='models.E026', ) ]) @override_settings(TEST_SWAPPED_MODEL_BAD_VALUE='not-a-model') def test_swappable_missing_app_name(self): class Model(models.Model): class Meta: swappable = 'TEST_SWAPPED_MODEL_BAD_VALUE' self.assertEqual(Model.check(), [ Error( "'TEST_SWAPPED_MODEL_BAD_VALUE' is not of the form 'app_label.app_name'.", id='models.E001', ), ]) @override_settings(TEST_SWAPPED_MODEL_BAD_MODEL='not_an_app.Target') def test_swappable_missing_app(self): class Model(models.Model): class Meta: swappable = 'TEST_SWAPPED_MODEL_BAD_MODEL' self.assertEqual(Model.check(), [ Error( "'TEST_SWAPPED_MODEL_BAD_MODEL' references 'not_an_app.Target', " 'which has not been installed, or is abstract.', id='models.E002', ), ]) def test_two_m2m_through_same_relationship(self): class Person(models.Model): pass class Group(models.Model): primary = models.ManyToManyField(Person, through='Membership', related_name='primary') secondary = models.ManyToManyField(Person, through='Membership', related_name='secondary') class Membership(models.Model): person = models.ForeignKey(Person, models.CASCADE) group = models.ForeignKey(Group, models.CASCADE) self.assertEqual(Group.check(), [ Error( "The model has two identical many-to-many relations through " "the intermediate model 'invalid_models_tests.Membership'.", obj=Group, id='models.E003', ) ]) def test_two_m2m_through_same_model_with_different_through_fields(self): class Country(models.Model): pass class ShippingMethod(models.Model): to_countries = models.ManyToManyField( Country, through='ShippingMethodPrice', through_fields=('method', 'to_country'), ) from_countries = models.ManyToManyField( Country, through='ShippingMethodPrice', through_fields=('method', 'from_country'), related_name='+', ) class ShippingMethodPrice(models.Model): method = models.ForeignKey(ShippingMethod, models.CASCADE) to_country = models.ForeignKey(Country, models.CASCADE) from_country = models.ForeignKey(Country, models.CASCADE) self.assertEqual(ShippingMethod.check(), []) def test_onetoone_with_parent_model(self): class Place(models.Model): pass class ParkingLot(Place): other_place = models.OneToOneField(Place, models.CASCADE, related_name='other_parking') self.assertEqual(ParkingLot.check(), []) def test_onetoone_with_explicit_parent_link_parent_model(self): class Place(models.Model): pass class ParkingLot(Place): place = models.OneToOneField(Place, models.CASCADE, parent_link=True, primary_key=True) other_place = models.OneToOneField(Place, models.CASCADE, related_name='other_parking') self.assertEqual(ParkingLot.check(), []) def test_m2m_table_name_clash(self): class Foo(models.Model): bar = models.ManyToManyField('Bar', db_table='myapp_bar') class Meta: db_table = 'myapp_foo' class Bar(models.Model): class Meta: db_table = 'myapp_bar' self.assertEqual(Foo.check(), [ Error( "The field's intermediary table 'myapp_bar' clashes with the " "table name of 'invalid_models_tests.Bar'.", obj=Foo._meta.get_field('bar'), id='fields.E340', ) ]) @override_settings(DATABASE_ROUTERS=['invalid_models_tests.test_models.EmptyRouter']) def test_m2m_table_name_clash_database_routers_installed(self): class Foo(models.Model): bar = models.ManyToManyField('Bar', db_table='myapp_bar') class Meta: db_table = 'myapp_foo' class Bar(models.Model): class Meta: db_table = 'myapp_bar' self.assertEqual(Foo.check(), [ Warning( "The field's intermediary table 'myapp_bar' clashes with the " "table name of 'invalid_models_tests.Bar'.", obj=Foo._meta.get_field('bar'), hint=( "You have configured settings.DATABASE_ROUTERS. Verify " "that the table of 'invalid_models_tests.Bar' is " "correctly routed to a separate database." ), id='fields.W344', ), ]) def test_m2m_field_table_name_clash(self): class Foo(models.Model): pass class Bar(models.Model): foos = models.ManyToManyField(Foo, db_table='clash') class Baz(models.Model): foos = models.ManyToManyField(Foo, db_table='clash') self.assertEqual(Bar.check() + Baz.check(), [ Error( "The field's intermediary table 'clash' clashes with the " "table name of 'invalid_models_tests.Baz.foos'.", obj=Bar._meta.get_field('foos'), id='fields.E340', ), Error( "The field's intermediary table 'clash' clashes with the " "table name of 'invalid_models_tests.Bar.foos'.", obj=Baz._meta.get_field('foos'), id='fields.E340', ) ]) @override_settings(DATABASE_ROUTERS=['invalid_models_tests.test_models.EmptyRouter']) def test_m2m_field_table_name_clash_database_routers_installed(self): class Foo(models.Model): pass class Bar(models.Model): foos = models.ManyToManyField(Foo, db_table='clash') class Baz(models.Model): foos = models.ManyToManyField(Foo, db_table='clash') self.assertEqual(Bar.check() + Baz.check(), [ Warning( "The field's intermediary table 'clash' clashes with the " "table name of 'invalid_models_tests.%s.foos'." % clashing_model, obj=model_cls._meta.get_field('foos'), hint=( "You have configured settings.DATABASE_ROUTERS. Verify " "that the table of 'invalid_models_tests.%s.foos' is " "correctly routed to a separate database." % clashing_model ), id='fields.W344', ) for model_cls, clashing_model in [(Bar, 'Baz'), (Baz, 'Bar')] ]) def test_m2m_autogenerated_table_name_clash(self): class Foo(models.Model): class Meta: db_table = 'bar_foos' class Bar(models.Model): # The autogenerated `db_table` will be bar_foos. foos = models.ManyToManyField(Foo) class Meta: db_table = 'bar' self.assertEqual(Bar.check(), [ Error( "The field's intermediary table 'bar_foos' clashes with the " "table name of 'invalid_models_tests.Foo'.", obj=Bar._meta.get_field('foos'), id='fields.E340', ) ]) @override_settings(DATABASE_ROUTERS=['invalid_models_tests.test_models.EmptyRouter']) def test_m2m_autogenerated_table_name_clash_database_routers_installed(self): class Foo(models.Model): class Meta: db_table = 'bar_foos' class Bar(models.Model): # The autogenerated db_table is bar_foos. foos = models.ManyToManyField(Foo) class Meta: db_table = 'bar' self.assertEqual(Bar.check(), [ Warning( "The field's intermediary table 'bar_foos' clashes with the " "table name of 'invalid_models_tests.Foo'.", obj=Bar._meta.get_field('foos'), hint=( "You have configured settings.DATABASE_ROUTERS. Verify " "that the table of 'invalid_models_tests.Foo' is " "correctly routed to a separate database." ), id='fields.W344', ), ]) def test_m2m_unmanaged_shadow_models_not_checked(self): class A1(models.Model): pass class C1(models.Model): mm_a = models.ManyToManyField(A1, db_table='d1') # Unmanaged models that shadow the above models. Reused table names # shouldn't be flagged by any checks. class A2(models.Model): class Meta: managed = False class C2(models.Model): mm_a = models.ManyToManyField(A2, through='Intermediate') class Meta: managed = False class Intermediate(models.Model): a2 = models.ForeignKey(A2, models.CASCADE, db_column='a1_id') c2 = models.ForeignKey(C2, models.CASCADE, db_column='c1_id') class Meta: db_table = 'd1' managed = False self.assertEqual(C1.check(), []) self.assertEqual(C2.check(), []) def test_m2m_to_concrete_and_proxy_allowed(self): class A(models.Model): pass class Through(models.Model): a = models.ForeignKey('A', models.CASCADE) c = models.ForeignKey('C', models.CASCADE) class ThroughProxy(Through): class Meta: proxy = True class C(models.Model): mm_a = models.ManyToManyField(A, through=Through) mm_aproxy = models.ManyToManyField(A, through=ThroughProxy, related_name='proxied_m2m') self.assertEqual(C.check(), []) @isolate_apps('django.contrib.auth', kwarg_name='apps') def test_lazy_reference_checks(self, apps): class DummyModel(models.Model): author = models.ForeignKey('Author', models.CASCADE) class Meta: app_label = 'invalid_models_tests' class DummyClass: def __call__(self, **kwargs): pass def dummy_method(self): pass def dummy_function(*args, **kwargs): pass apps.lazy_model_operation(dummy_function, ('auth', 'imaginarymodel')) apps.lazy_model_operation(dummy_function, ('fanciful_app', 'imaginarymodel')) post_init.connect(dummy_function, sender='missing-app.Model', apps=apps) post_init.connect(DummyClass(), sender='missing-app.Model', apps=apps) post_init.connect(DummyClass().dummy_method, sender='missing-app.Model', apps=apps) self.assertEqual(_check_lazy_references(apps), [ Error( "%r contains a lazy reference to auth.imaginarymodel, " "but app 'auth' doesn't provide model 'imaginarymodel'." % dummy_function, obj=dummy_function, id='models.E022', ), Error( "%r contains a lazy reference to fanciful_app.imaginarymodel, " "but app 'fanciful_app' isn't installed." % dummy_function, obj=dummy_function, id='models.E022', ), Error( "An instance of class 'DummyClass' was connected to " "the 'post_init' signal with a lazy reference to the sender " "'missing-app.model', but app 'missing-app' isn't installed.", hint=None, obj='invalid_models_tests.test_models', id='signals.E001', ), Error( "Bound method 'DummyClass.dummy_method' was connected to the " "'post_init' signal with a lazy reference to the sender " "'missing-app.model', but app 'missing-app' isn't installed.", hint=None, obj='invalid_models_tests.test_models', id='signals.E001', ), Error( "The field invalid_models_tests.DummyModel.author was declared " "with a lazy reference to 'invalid_models_tests.author', but app " "'invalid_models_tests' isn't installed.", hint=None, obj=DummyModel.author.field, id='fields.E307', ), Error( "The function 'dummy_function' was connected to the 'post_init' " "signal with a lazy reference to the sender " "'missing-app.model', but app 'missing-app' isn't installed.", hint=None, obj='invalid_models_tests.test_models', id='signals.E001', ), ]) @isolate_apps('invalid_models_tests') class JSONFieldTests(TestCase): @skipUnlessDBFeature('supports_json_field') def test_ordering_pointing_to_json_field_value(self): class Model(models.Model): field = models.JSONField() class Meta: ordering = ['field__value'] self.assertEqual(Model.check(databases=self.databases), []) def test_check_jsonfield(self): class Model(models.Model): field = models.JSONField() error = Error( '%s does not support JSONFields.' % connection.display_name, obj=Model, id='fields.E180', ) expected = [] if connection.features.supports_json_field else [error] self.assertEqual(Model.check(databases=self.databases), expected) def test_check_jsonfield_required_db_features(self): class Model(models.Model): field = models.JSONField() class Meta: required_db_features = {'supports_json_field'} self.assertEqual(Model.check(databases=self.databases), []) @isolate_apps('invalid_models_tests') class ConstraintsTests(TestCase): def test_check_constraints(self): class Model(models.Model): age = models.IntegerField() class Meta: constraints = [models.CheckConstraint(check=models.Q(age__gte=18), name='is_adult')] errors = Model.check(databases=self.databases) warn = Warning( '%s does not support check constraints.' % connection.display_name, hint=( "A constraint won't be created. Silence this warning if you " "don't care about it." ), obj=Model, id='models.W027', ) expected = [] if connection.features.supports_table_check_constraints else [warn] self.assertCountEqual(errors, expected) def test_check_constraints_required_db_features(self): class Model(models.Model): age = models.IntegerField() class Meta: required_db_features = {'supports_table_check_constraints'} constraints = [models.CheckConstraint(check=models.Q(age__gte=18), name='is_adult')] self.assertEqual(Model.check(databases=self.databases), []) def test_check_constraint_pointing_to_missing_field(self): class Model(models.Model): class Meta: required_db_features = {'supports_table_check_constraints'} constraints = [ models.CheckConstraint( name='name', check=models.Q(missing_field=2), ), ] self.assertEqual(Model.check(databases=self.databases), [ Error( "'constraints' refers to the nonexistent field " "'missing_field'.", obj=Model, id='models.E012', ), ] if connection.features.supports_table_check_constraints else []) @skipUnlessDBFeature('supports_table_check_constraints') def test_check_constraint_pointing_to_reverse_fk(self): class Model(models.Model): parent = models.ForeignKey('self', models.CASCADE, related_name='parents') class Meta: constraints = [ models.CheckConstraint(name='name', check=models.Q(parents=3)), ] self.assertEqual(Model.check(databases=self.databases), [ Error( "'constraints' refers to the nonexistent field 'parents'.", obj=Model, id='models.E012', ), ]) @skipUnlessDBFeature('supports_table_check_constraints') def test_check_constraint_pointing_to_reverse_o2o(self): class Model(models.Model): parent = models.OneToOneField('self', models.CASCADE) class Meta: constraints = [ models.CheckConstraint( name='name', check=models.Q(model__isnull=True), ), ] self.assertEqual(Model.check(databases=self.databases), [ Error( "'constraints' refers to the nonexistent field 'model'.", obj=Model, id='models.E012', ), ]) @skipUnlessDBFeature('supports_table_check_constraints') def test_check_constraint_pointing_to_m2m_field(self): class Model(models.Model): m2m = models.ManyToManyField('self') class Meta: constraints = [ models.CheckConstraint(name='name', check=models.Q(m2m=2)), ] self.assertEqual(Model.check(databases=self.databases), [ Error( "'constraints' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'constraints'.", obj=Model, id='models.E013', ), ]) @skipUnlessDBFeature('supports_table_check_constraints') def test_check_constraint_pointing_to_fk(self): class Target(models.Model): pass class Model(models.Model): fk_1 = models.ForeignKey(Target, models.CASCADE, related_name='target_1') fk_2 = models.ForeignKey(Target, models.CASCADE, related_name='target_2') class Meta: constraints = [ models.CheckConstraint( name='name', check=models.Q(fk_1_id=2) | models.Q(fk_2=2), ), ] self.assertEqual(Model.check(databases=self.databases), []) @skipUnlessDBFeature('supports_table_check_constraints') def test_check_constraint_pointing_to_pk(self): class Model(models.Model): age = models.SmallIntegerField() class Meta: constraints = [ models.CheckConstraint( name='name', check=models.Q(pk__gt=5) & models.Q(age__gt=models.F('pk')), ), ] self.assertEqual(Model.check(databases=self.databases), []) @skipUnlessDBFeature('supports_table_check_constraints') def test_check_constraint_pointing_to_non_local_field(self): class Parent(models.Model): field1 = models.IntegerField() class Child(Parent): pass class Meta: constraints = [ models.CheckConstraint(name='name', check=models.Q(field1=1)), ] self.assertEqual(Child.check(databases=self.databases), [ Error( "'constraints' refers to field 'field1' which is not local to " "model 'Child'.", hint='This issue may be caused by multi-table inheritance.', obj=Child, id='models.E016', ), ]) @skipUnlessDBFeature('supports_table_check_constraints') def test_check_constraint_pointing_to_joined_fields(self): class Model(models.Model): name = models.CharField(max_length=10) field1 = models.PositiveSmallIntegerField() field2 = models.PositiveSmallIntegerField() field3 = models.PositiveSmallIntegerField() parent = models.ForeignKey('self', models.CASCADE) previous = models.OneToOneField('self', models.CASCADE, related_name='next') class Meta: constraints = [ models.CheckConstraint( name='name1', check=models.Q( field1__lt=models.F('parent__field1') + models.F('parent__field2') ) ), models.CheckConstraint( name='name2', check=models.Q(name=Lower('parent__name')) ), models.CheckConstraint( name='name3', check=models.Q(parent__field3=models.F('field1')) ), models.CheckConstraint( name='name4', check=models.Q(name=Lower('previous__name')), ), ] joined_fields = [ 'parent__field1', 'parent__field2', 'parent__field3', 'parent__name', 'previous__name', ] errors = Model.check(databases=self.databases) expected_errors = [ Error( "'constraints' refers to the joined field '%s'." % field_name, obj=Model, id='models.E041', ) for field_name in joined_fields ] self.assertCountEqual(errors, expected_errors) @skipUnlessDBFeature('supports_table_check_constraints') def test_check_constraint_pointing_to_joined_fields_complex_check(self): class Model(models.Model): name = models.PositiveSmallIntegerField() field1 = models.PositiveSmallIntegerField() field2 = models.PositiveSmallIntegerField() parent = models.ForeignKey('self', models.CASCADE) class Meta: constraints = [ models.CheckConstraint( name='name', check=models.Q( ( models.Q(name='test') & models.Q(field1__lt=models.F('parent__field1')) ) | ( models.Q(name__startswith=Lower('parent__name')) & models.Q(field1__gte=( models.F('parent__field1') + models.F('parent__field2') )) ) ) | (models.Q(name='test1')) ), ] joined_fields = ['parent__field1', 'parent__field2', 'parent__name'] errors = Model.check(databases=self.databases) expected_errors = [ Error( "'constraints' refers to the joined field '%s'." % field_name, obj=Model, id='models.E041', ) for field_name in joined_fields ] self.assertCountEqual(errors, expected_errors) def test_unique_constraint_with_condition(self): class Model(models.Model): age = models.IntegerField() class Meta: constraints = [ models.UniqueConstraint( fields=['age'], name='unique_age_gte_100', condition=models.Q(age__gte=100), ), ] errors = Model.check(databases=self.databases) expected = [] if connection.features.supports_partial_indexes else [ Warning( '%s does not support unique constraints with conditions.' % connection.display_name, hint=( "A constraint won't be created. Silence this warning if " "you don't care about it." ), obj=Model, id='models.W036', ), ] self.assertEqual(errors, expected) def test_unique_constraint_with_condition_required_db_features(self): class Model(models.Model): age = models.IntegerField() class Meta: required_db_features = {'supports_partial_indexes'} constraints = [ models.UniqueConstraint( fields=['age'], name='unique_age_gte_100', condition=models.Q(age__gte=100), ), ] self.assertEqual(Model.check(databases=self.databases), []) def test_unique_constraint_condition_pointing_to_missing_field(self): class Model(models.Model): age = models.SmallIntegerField() class Meta: required_db_features = {'supports_partial_indexes'} constraints = [ models.UniqueConstraint( name='name', fields=['age'], condition=models.Q(missing_field=2), ), ] self.assertEqual(Model.check(databases=self.databases), [ Error( "'constraints' refers to the nonexistent field " "'missing_field'.", obj=Model, id='models.E012', ), ] if connection.features.supports_partial_indexes else []) def test_unique_constraint_condition_pointing_to_joined_fields(self): class Model(models.Model): age = models.SmallIntegerField() parent = models.ForeignKey('self', models.CASCADE) class Meta: required_db_features = {'supports_partial_indexes'} constraints = [ models.UniqueConstraint( name='name', fields=['age'], condition=models.Q(parent__age__lt=2), ), ] self.assertEqual(Model.check(databases=self.databases), [ Error( "'constraints' refers to the joined field 'parent__age__lt'.", obj=Model, id='models.E041', ) ] if connection.features.supports_partial_indexes else []) def test_unique_constraint_pointing_to_reverse_o2o(self): class Model(models.Model): parent = models.OneToOneField('self', models.CASCADE) class Meta: required_db_features = {'supports_partial_indexes'} constraints = [ models.UniqueConstraint( fields=['parent'], name='name', condition=models.Q(model__isnull=True), ), ] self.assertEqual(Model.check(databases=self.databases), [ Error( "'constraints' refers to the nonexistent field 'model'.", obj=Model, id='models.E012', ), ] if connection.features.supports_partial_indexes else []) def test_deferrable_unique_constraint(self): class Model(models.Model): age = models.IntegerField() class Meta: constraints = [ models.UniqueConstraint( fields=['age'], name='unique_age_deferrable', deferrable=models.Deferrable.DEFERRED, ), ] errors = Model.check(databases=self.databases) expected = [] if connection.features.supports_deferrable_unique_constraints else [ Warning( '%s does not support deferrable unique constraints.' % connection.display_name, hint=( "A constraint won't be created. Silence this warning if " "you don't care about it." ), obj=Model, id='models.W038', ), ] self.assertEqual(errors, expected) def test_deferrable_unique_constraint_required_db_features(self): class Model(models.Model): age = models.IntegerField() class Meta: required_db_features = {'supports_deferrable_unique_constraints'} constraints = [ models.UniqueConstraint( fields=['age'], name='unique_age_deferrable', deferrable=models.Deferrable.IMMEDIATE, ), ] self.assertEqual(Model.check(databases=self.databases), []) def test_unique_constraint_pointing_to_missing_field(self): class Model(models.Model): class Meta: constraints = [models.UniqueConstraint(fields=['missing_field'], name='name')] self.assertEqual(Model.check(databases=self.databases), [ Error( "'constraints' refers to the nonexistent field " "'missing_field'.", obj=Model, id='models.E012', ), ]) def test_unique_constraint_pointing_to_m2m_field(self): class Model(models.Model): m2m = models.ManyToManyField('self') class Meta: constraints = [models.UniqueConstraint(fields=['m2m'], name='name')] self.assertEqual(Model.check(databases=self.databases), [ Error( "'constraints' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'constraints'.", obj=Model, id='models.E013', ), ]) def test_unique_constraint_pointing_to_non_local_field(self): class Parent(models.Model): field1 = models.IntegerField() class Child(Parent): field2 = models.IntegerField() class Meta: constraints = [ models.UniqueConstraint(fields=['field2', 'field1'], name='name'), ] self.assertEqual(Child.check(databases=self.databases), [ Error( "'constraints' refers to field 'field1' which is not local to " "model 'Child'.", hint='This issue may be caused by multi-table inheritance.', obj=Child, id='models.E016', ), ]) def test_unique_constraint_pointing_to_fk(self): class Target(models.Model): pass class Model(models.Model): fk_1 = models.ForeignKey(Target, models.CASCADE, related_name='target_1') fk_2 = models.ForeignKey(Target, models.CASCADE, related_name='target_2') class Meta: constraints = [ models.UniqueConstraint(fields=['fk_1_id', 'fk_2'], name='name'), ] self.assertEqual(Model.check(databases=self.databases), []) def test_unique_constraint_with_include(self): class Model(models.Model): age = models.IntegerField() class Meta: constraints = [ models.UniqueConstraint( fields=['age'], name='unique_age_include_id', include=['id'], ), ] errors = Model.check(databases=self.databases) expected = [] if connection.features.supports_covering_indexes else [ Warning( '%s does not support unique constraints with non-key columns.' % connection.display_name, hint=( "A constraint won't be created. Silence this warning if " "you don't care about it." ), obj=Model, id='models.W039', ), ] self.assertEqual(errors, expected) def test_unique_constraint_with_include_required_db_features(self): class Model(models.Model): age = models.IntegerField() class Meta: required_db_features = {'supports_covering_indexes'} constraints = [ models.UniqueConstraint( fields=['age'], name='unique_age_include_id', include=['id'], ), ] self.assertEqual(Model.check(databases=self.databases), []) @skipUnlessDBFeature('supports_covering_indexes') def test_unique_constraint_include_pointing_to_missing_field(self): class Model(models.Model): class Meta: constraints = [ models.UniqueConstraint( fields=['id'], include=['missing_field'], name='name', ), ] self.assertEqual(Model.check(databases=self.databases), [ Error( "'constraints' refers to the nonexistent field " "'missing_field'.", obj=Model, id='models.E012', ), ]) @skipUnlessDBFeature('supports_covering_indexes') def test_unique_constraint_include_pointing_to_m2m_field(self): class Model(models.Model): m2m = models.ManyToManyField('self') class Meta: constraints = [ models.UniqueConstraint( fields=['id'], include=['m2m'], name='name', ), ] self.assertEqual(Model.check(databases=self.databases), [ Error( "'constraints' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'constraints'.", obj=Model, id='models.E013', ), ]) @skipUnlessDBFeature('supports_covering_indexes') def test_unique_constraint_include_pointing_to_non_local_field(self): class Parent(models.Model): field1 = models.IntegerField() class Child(Parent): field2 = models.IntegerField() class Meta: constraints = [ models.UniqueConstraint( fields=['field2'], include=['field1'], name='name', ), ] self.assertEqual(Child.check(databases=self.databases), [ Error( "'constraints' refers to field 'field1' which is not local to " "model 'Child'.", hint='This issue may be caused by multi-table inheritance.', obj=Child, id='models.E016', ), ]) @skipUnlessDBFeature('supports_covering_indexes') def test_unique_constraint_include_pointing_to_fk(self): class Target(models.Model): pass class Model(models.Model): fk_1 = models.ForeignKey(Target, models.CASCADE, related_name='target_1') fk_2 = models.ForeignKey(Target, models.CASCADE, related_name='target_2') class Meta: constraints = [ models.UniqueConstraint( fields=['id'], include=['fk_1_id', 'fk_2'], name='name', ), ] self.assertEqual(Model.check(databases=self.databases), []) def test_func_unique_constraint(self): class Model(models.Model): name = models.CharField(max_length=10) class Meta: constraints = [ models.UniqueConstraint(Lower('name'), name='lower_name_uq'), ] warn = Warning( '%s does not support unique constraints on expressions.' % connection.display_name, hint=( "A constraint won't be created. Silence this warning if you " "don't care about it." ), obj=Model, id='models.W044', ) expected = [] if connection.features.supports_expression_indexes else [warn] self.assertEqual(Model.check(databases=self.databases), expected) def test_func_unique_constraint_required_db_features(self): class Model(models.Model): name = models.CharField(max_length=10) class Meta: constraints = [ models.UniqueConstraint(Lower('name'), name='lower_name_unq'), ] required_db_features = {'supports_expression_indexes'} self.assertEqual(Model.check(databases=self.databases), []) @skipUnlessDBFeature('supports_expression_indexes') def test_func_unique_constraint_expression_custom_lookup(self): class Model(models.Model): height = models.IntegerField() weight = models.IntegerField() class Meta: constraints = [ models.UniqueConstraint( models.F('height') / (models.F('weight__abs') + models.Value(5)), name='name', ), ] with register_lookup(models.IntegerField, Abs): self.assertEqual(Model.check(databases=self.databases), []) @skipUnlessDBFeature('supports_expression_indexes') def test_func_unique_constraint_pointing_to_missing_field(self): class Model(models.Model): class Meta: constraints = [ models.UniqueConstraint(Lower('missing_field').desc(), name='name'), ] self.assertEqual(Model.check(databases=self.databases), [ Error( "'constraints' refers to the nonexistent field " "'missing_field'.", obj=Model, id='models.E012', ), ]) @skipUnlessDBFeature('supports_expression_indexes') def test_func_unique_constraint_pointing_to_missing_field_nested(self): class Model(models.Model): class Meta: constraints = [ models.UniqueConstraint(Abs(Round('missing_field')), name='name'), ] self.assertEqual(Model.check(databases=self.databases), [ Error( "'constraints' refers to the nonexistent field " "'missing_field'.", obj=Model, id='models.E012', ), ]) @skipUnlessDBFeature('supports_expression_indexes') def test_func_unique_constraint_pointing_to_m2m_field(self): class Model(models.Model): m2m = models.ManyToManyField('self') class Meta: constraints = [models.UniqueConstraint(Lower('m2m'), name='name')] self.assertEqual(Model.check(databases=self.databases), [ Error( "'constraints' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'constraints'.", obj=Model, id='models.E013', ), ]) @skipUnlessDBFeature('supports_expression_indexes') def test_func_unique_constraint_pointing_to_non_local_field(self): class Foo(models.Model): field1 = models.CharField(max_length=15) class Bar(Foo): class Meta: constraints = [models.UniqueConstraint(Lower('field1'), name='name')] self.assertEqual(Bar.check(databases=self.databases), [ Error( "'constraints' refers to field 'field1' which is not local to " "model 'Bar'.", hint='This issue may be caused by multi-table inheritance.', obj=Bar, id='models.E016', ), ]) @skipUnlessDBFeature('supports_expression_indexes') def test_func_unique_constraint_pointing_to_fk(self): class Foo(models.Model): id = models.CharField(primary_key=True, max_length=255) class Bar(models.Model): foo_1 = models.ForeignKey(Foo, models.CASCADE, related_name='bar_1') foo_2 = models.ForeignKey(Foo, models.CASCADE, related_name='bar_2') class Meta: constraints = [ models.UniqueConstraint( Lower('foo_1_id'), Lower('foo_2'), name='name', ), ] self.assertEqual(Bar.check(databases=self.databases), [])
d13bfd211f75b55a6b071cb6f342ceb91e1434eab248f2035afbbac6f5e468f9
import datetime import pickle import unittest import uuid from collections import namedtuple from copy import deepcopy from decimal import Decimal from unittest import mock from django.core.exceptions import FieldError from django.db import DatabaseError, NotSupportedError, connection from django.db.models import ( AutoField, Avg, BinaryField, BooleanField, Case, CharField, Count, DateField, DateTimeField, DecimalField, DurationField, Exists, Expression, ExpressionList, ExpressionWrapper, F, FloatField, Func, IntegerField, Max, Min, Model, OrderBy, OuterRef, Q, StdDev, Subquery, Sum, TimeField, UUIDField, Value, Variance, When, ) from django.db.models.expressions import ( Col, Combinable, CombinedExpression, RawSQL, Ref, ) from django.db.models.functions import ( Coalesce, Concat, Left, Length, Lower, Substr, Upper, ) from django.db.models.sql import constants from django.db.models.sql.datastructures import Join from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature from django.test.utils import ( Approximate, CaptureQueriesContext, isolate_apps, register_lookup, ) from django.utils.functional import SimpleLazyObject from .models import ( UUID, UUIDPK, Company, Employee, Experiment, Manager, Number, RemoteEmployee, Result, SimulationRun, Time, ) class BasicExpressionsTests(TestCase): @classmethod def setUpTestData(cls): cls.example_inc = Company.objects.create( name="Example Inc.", num_employees=2300, num_chairs=5, ceo=Employee.objects.create(firstname="Joe", lastname="Smith", salary=10) ) cls.foobar_ltd = Company.objects.create( name="Foobar Ltd.", num_employees=3, num_chairs=4, based_in_eu=True, ceo=Employee.objects.create(firstname="Frank", lastname="Meyer", salary=20) ) cls.max = Employee.objects.create(firstname='Max', lastname='Mustermann', salary=30) cls.gmbh = Company.objects.create(name='Test GmbH', num_employees=32, num_chairs=1, ceo=cls.max) def setUp(self): self.company_query = Company.objects.values( "name", "num_employees", "num_chairs" ).order_by( "name", "num_employees", "num_chairs" ) def test_annotate_values_aggregate(self): companies = Company.objects.annotate( salaries=F('ceo__salary'), ).values('num_employees', 'salaries').aggregate( result=Sum( F('salaries') + F('num_employees'), output_field=IntegerField() ), ) self.assertEqual(companies['result'], 2395) def test_annotate_values_filter(self): companies = Company.objects.annotate( foo=RawSQL('%s', ['value']), ).filter(foo='value').order_by('name') self.assertSequenceEqual( companies, [self.example_inc, self.foobar_ltd, self.gmbh], ) def test_annotate_values_count(self): companies = Company.objects.annotate(foo=RawSQL('%s', ['value'])) self.assertEqual(companies.count(), 3) @skipUnlessDBFeature('supports_boolean_expr_in_select_clause') def test_filtering_on_annotate_that_uses_q(self): self.assertEqual( Company.objects.annotate( num_employees_check=ExpressionWrapper(Q(num_employees__gt=3), output_field=BooleanField()) ).filter(num_employees_check=True).count(), 2, ) def test_filtering_on_q_that_is_boolean(self): self.assertEqual( Company.objects.filter( ExpressionWrapper(Q(num_employees__gt=3), output_field=BooleanField()) ).count(), 2, ) def test_filtering_on_rawsql_that_is_boolean(self): self.assertEqual( Company.objects.filter( RawSQL('num_employees > %s', (3,), output_field=BooleanField()), ).count(), 2, ) def test_filter_inter_attribute(self): # We can filter on attribute relationships on same model obj, e.g. # find companies where the number of employees is greater # than the number of chairs. self.assertSequenceEqual( self.company_query.filter(num_employees__gt=F("num_chairs")), [ { "num_chairs": 5, "name": "Example Inc.", "num_employees": 2300, }, { "num_chairs": 1, "name": "Test GmbH", "num_employees": 32 }, ], ) def test_update(self): # We can set one field to have the value of another field # Make sure we have enough chairs self.company_query.update(num_chairs=F("num_employees")) self.assertSequenceEqual( self.company_query, [ { "num_chairs": 2300, "name": "Example Inc.", "num_employees": 2300 }, { "num_chairs": 3, "name": "Foobar Ltd.", "num_employees": 3 }, { "num_chairs": 32, "name": "Test GmbH", "num_employees": 32 } ], ) def test_arithmetic(self): # We can perform arithmetic operations in expressions # Make sure we have 2 spare chairs self.company_query.update(num_chairs=F("num_employees") + 2) self.assertSequenceEqual( self.company_query, [ { 'num_chairs': 2302, 'name': 'Example Inc.', 'num_employees': 2300 }, { 'num_chairs': 5, 'name': 'Foobar Ltd.', 'num_employees': 3 }, { 'num_chairs': 34, 'name': 'Test GmbH', 'num_employees': 32 } ], ) def test_order_of_operations(self): # Law of order of operations is followed self.company_query.update(num_chairs=F('num_employees') + 2 * F('num_employees')) self.assertSequenceEqual( self.company_query, [ { 'num_chairs': 6900, 'name': 'Example Inc.', 'num_employees': 2300 }, { 'num_chairs': 9, 'name': 'Foobar Ltd.', 'num_employees': 3 }, { 'num_chairs': 96, 'name': 'Test GmbH', 'num_employees': 32 } ], ) def test_parenthesis_priority(self): # Law of order of operations can be overridden by parentheses self.company_query.update(num_chairs=(F('num_employees') + 2) * F('num_employees')) self.assertSequenceEqual( self.company_query, [ { 'num_chairs': 5294600, 'name': 'Example Inc.', 'num_employees': 2300 }, { 'num_chairs': 15, 'name': 'Foobar Ltd.', 'num_employees': 3 }, { 'num_chairs': 1088, 'name': 'Test GmbH', 'num_employees': 32 } ], ) def test_update_with_fk(self): # ForeignKey can become updated with the value of another ForeignKey. self.assertEqual(Company.objects.update(point_of_contact=F('ceo')), 3) self.assertQuerysetEqual( Company.objects.all(), ['Joe Smith', 'Frank Meyer', 'Max Mustermann'], lambda c: str(c.point_of_contact), ordered=False ) def test_update_with_none(self): Number.objects.create(integer=1, float=1.0) Number.objects.create(integer=2) Number.objects.filter(float__isnull=False).update(float=Value(None)) self.assertQuerysetEqual( Number.objects.all(), [None, None], lambda n: n.float, ordered=False ) def test_filter_with_join(self): # F Expressions can also span joins Company.objects.update(point_of_contact=F('ceo')) c = Company.objects.first() c.point_of_contact = Employee.objects.create(firstname="Guido", lastname="van Rossum") c.save() self.assertQuerysetEqual( Company.objects.filter(ceo__firstname=F('point_of_contact__firstname')), ['Foobar Ltd.', 'Test GmbH'], lambda c: c.name, ordered=False ) Company.objects.exclude( ceo__firstname=F("point_of_contact__firstname") ).update(name="foo") self.assertEqual( Company.objects.exclude( ceo__firstname=F('point_of_contact__firstname') ).get().name, "foo", ) msg = "Joined field references are not permitted in this query" with self.assertRaisesMessage(FieldError, msg): Company.objects.exclude( ceo__firstname=F('point_of_contact__firstname') ).update(name=F('point_of_contact__lastname')) def test_object_update(self): # F expressions can be used to update attributes on single objects self.gmbh.num_employees = F('num_employees') + 4 self.gmbh.save() self.gmbh.refresh_from_db() self.assertEqual(self.gmbh.num_employees, 36) def test_new_object_save(self): # We should be able to use Funcs when inserting new data test_co = Company(name=Lower(Value('UPPER')), num_employees=32, num_chairs=1, ceo=self.max) test_co.save() test_co.refresh_from_db() self.assertEqual(test_co.name, "upper") def test_new_object_create(self): test_co = Company.objects.create(name=Lower(Value('UPPER')), num_employees=32, num_chairs=1, ceo=self.max) test_co.refresh_from_db() self.assertEqual(test_co.name, "upper") def test_object_create_with_aggregate(self): # Aggregates are not allowed when inserting new data msg = 'Aggregate functions are not allowed in this query (num_employees=Max(Value(1))).' with self.assertRaisesMessage(FieldError, msg): Company.objects.create( name='Company', num_employees=Max(Value(1)), num_chairs=1, ceo=Employee.objects.create(firstname="Just", lastname="Doit", salary=30), ) def test_object_update_fk(self): # F expressions cannot be used to update attributes which are foreign # keys, or attributes which involve joins. test_gmbh = Company.objects.get(pk=self.gmbh.pk) msg = 'F(ceo)": "Company.point_of_contact" must be a "Employee" instance.' with self.assertRaisesMessage(ValueError, msg): test_gmbh.point_of_contact = F('ceo') test_gmbh.point_of_contact = self.gmbh.ceo test_gmbh.save() test_gmbh.name = F('ceo__lastname') msg = 'Joined field references are not permitted in this query' with self.assertRaisesMessage(FieldError, msg): test_gmbh.save() def test_update_inherited_field_value(self): msg = 'Joined field references are not permitted in this query' with self.assertRaisesMessage(FieldError, msg): RemoteEmployee.objects.update(adjusted_salary=F('salary') * 5) def test_object_update_unsaved_objects(self): # F expressions cannot be used to update attributes on objects which do # not yet exist in the database acme = Company(name='The Acme Widget Co.', num_employees=12, num_chairs=5, ceo=self.max) acme.num_employees = F("num_employees") + 16 msg = ( 'Failed to insert expression "Col(expressions_company, ' 'expressions.Company.num_employees) + Value(16)" on ' 'expressions.Company.num_employees. F() expressions can only be ' 'used to update, not to insert.' ) with self.assertRaisesMessage(ValueError, msg): acme.save() acme.num_employees = 12 acme.name = Lower(F('name')) msg = ( 'Failed to insert expression "Lower(Col(expressions_company, ' 'expressions.Company.name))" on expressions.Company.name. F() ' 'expressions can only be used to update, not to insert.' ) with self.assertRaisesMessage(ValueError, msg): acme.save() def test_ticket_11722_iexact_lookup(self): Employee.objects.create(firstname="John", lastname="Doe") test = Employee.objects.create(firstname="Test", lastname="test") queryset = Employee.objects.filter(firstname__iexact=F('lastname')) self.assertSequenceEqual(queryset, [test]) def test_ticket_16731_startswith_lookup(self): Employee.objects.create(firstname="John", lastname="Doe") e2 = Employee.objects.create(firstname="Jack", lastname="Jackson") e3 = Employee.objects.create(firstname="Jack", lastname="jackson") self.assertSequenceEqual( Employee.objects.filter(lastname__startswith=F('firstname')), [e2, e3] if connection.features.has_case_insensitive_like else [e2] ) qs = Employee.objects.filter(lastname__istartswith=F('firstname')).order_by('pk') self.assertSequenceEqual(qs, [e2, e3]) def test_ticket_18375_join_reuse(self): # Reverse multijoin F() references and the lookup target the same join. # Pre #18375 the F() join was generated first and the lookup couldn't # reuse that join. qs = Employee.objects.filter(company_ceo_set__num_chairs=F('company_ceo_set__num_employees')) self.assertEqual(str(qs.query).count('JOIN'), 1) def test_ticket_18375_kwarg_ordering(self): # The next query was dict-randomization dependent - if the "gte=1" # was seen first, then the F() will reuse the join generated by the # gte lookup, if F() was seen first, then it generated a join the # other lookups could not reuse. qs = Employee.objects.filter( company_ceo_set__num_chairs=F('company_ceo_set__num_employees'), company_ceo_set__num_chairs__gte=1, ) self.assertEqual(str(qs.query).count('JOIN'), 1) def test_ticket_18375_kwarg_ordering_2(self): # Another similar case for F() than above. Now we have the same join # in two filter kwargs, one in the lhs lookup, one in F. Here pre # #18375 the amount of joins generated was random if dict # randomization was enabled, that is the generated query dependent # on which clause was seen first. qs = Employee.objects.filter( company_ceo_set__num_employees=F('pk'), pk=F('company_ceo_set__num_employees') ) self.assertEqual(str(qs.query).count('JOIN'), 1) def test_ticket_18375_chained_filters(self): # F() expressions do not reuse joins from previous filter. qs = Employee.objects.filter( company_ceo_set__num_employees=F('pk') ).filter( company_ceo_set__num_employees=F('company_ceo_set__num_employees') ) self.assertEqual(str(qs.query).count('JOIN'), 2) def test_order_by_exists(self): mary = Employee.objects.create(firstname='Mary', lastname='Mustermann', salary=20) mustermanns_by_seniority = Employee.objects.filter(lastname='Mustermann').order_by( # Order by whether the employee is the CEO of a company Exists(Company.objects.filter(ceo=OuterRef('pk'))).desc() ) self.assertSequenceEqual(mustermanns_by_seniority, [self.max, mary]) def test_order_by_multiline_sql(self): raw_order_by = ( RawSQL(''' CASE WHEN num_employees > 1000 THEN num_chairs ELSE 0 END ''', []).desc(), RawSQL(''' CASE WHEN num_chairs > 1 THEN 1 ELSE 0 END ''', []).asc() ) for qs in ( Company.objects.all(), Company.objects.distinct(), ): with self.subTest(qs=qs): self.assertSequenceEqual( qs.order_by(*raw_order_by), [self.example_inc, self.gmbh, self.foobar_ltd], ) def test_outerref(self): inner = Company.objects.filter(point_of_contact=OuterRef('pk')) msg = ( 'This queryset contains a reference to an outer query and may only ' 'be used in a subquery.' ) with self.assertRaisesMessage(ValueError, msg): inner.exists() outer = Employee.objects.annotate(is_point_of_contact=Exists(inner)) self.assertIs(outer.exists(), True) def test_exist_single_field_output_field(self): queryset = Company.objects.values('pk') self.assertIsInstance(Exists(queryset).output_field, BooleanField) def test_subquery(self): Company.objects.filter(name='Example Inc.').update( point_of_contact=Employee.objects.get(firstname='Joe', lastname='Smith'), ceo=self.max, ) Employee.objects.create(firstname='Bob', lastname='Brown', salary=40) qs = Employee.objects.annotate( is_point_of_contact=Exists(Company.objects.filter(point_of_contact=OuterRef('pk'))), is_not_point_of_contact=~Exists(Company.objects.filter(point_of_contact=OuterRef('pk'))), is_ceo_of_small_company=Exists(Company.objects.filter(num_employees__lt=200, ceo=OuterRef('pk'))), is_ceo_small_2=~~Exists(Company.objects.filter(num_employees__lt=200, ceo=OuterRef('pk'))), largest_company=Subquery(Company.objects.order_by('-num_employees').filter( Q(ceo=OuterRef('pk')) | Q(point_of_contact=OuterRef('pk')) ).values('name')[:1], output_field=CharField()) ).values( 'firstname', 'is_point_of_contact', 'is_not_point_of_contact', 'is_ceo_of_small_company', 'is_ceo_small_2', 'largest_company', ).order_by('firstname') results = list(qs) # Could use Coalesce(subq, Value('')) instead except for the bug in # cx_Oracle mentioned in #23843. bob = results[0] if bob['largest_company'] == '' and connection.features.interprets_empty_strings_as_nulls: bob['largest_company'] = None self.assertEqual(results, [ { 'firstname': 'Bob', 'is_point_of_contact': False, 'is_not_point_of_contact': True, 'is_ceo_of_small_company': False, 'is_ceo_small_2': False, 'largest_company': None, }, { 'firstname': 'Frank', 'is_point_of_contact': False, 'is_not_point_of_contact': True, 'is_ceo_of_small_company': True, 'is_ceo_small_2': True, 'largest_company': 'Foobar Ltd.', }, { 'firstname': 'Joe', 'is_point_of_contact': True, 'is_not_point_of_contact': False, 'is_ceo_of_small_company': False, 'is_ceo_small_2': False, 'largest_company': 'Example Inc.', }, { 'firstname': 'Max', 'is_point_of_contact': False, 'is_not_point_of_contact': True, 'is_ceo_of_small_company': True, 'is_ceo_small_2': True, 'largest_company': 'Example Inc.' } ]) # A less elegant way to write the same query: this uses a LEFT OUTER # JOIN and an IS NULL, inside a WHERE NOT IN which is probably less # efficient than EXISTS. self.assertCountEqual( qs.filter(is_point_of_contact=True).values('pk'), Employee.objects.exclude(company_point_of_contact_set=None).values('pk') ) def test_subquery_eq(self): qs = Employee.objects.annotate( is_ceo=Exists(Company.objects.filter(ceo=OuterRef('pk'))), is_point_of_contact=Exists( Company.objects.filter(point_of_contact=OuterRef('pk')), ), small_company=Exists( queryset=Company.objects.filter(num_employees__lt=200), ), ).filter(is_ceo=True, is_point_of_contact=False, small_company=True) self.assertNotEqual( qs.query.annotations['is_ceo'], qs.query.annotations['is_point_of_contact'], ) self.assertNotEqual( qs.query.annotations['is_ceo'], qs.query.annotations['small_company'], ) def test_in_subquery(self): # This is a contrived test (and you really wouldn't write this query), # but it is a succinct way to test the __in=Subquery() construct. small_companies = Company.objects.filter(num_employees__lt=200).values('pk') subquery_test = Company.objects.filter(pk__in=Subquery(small_companies)) self.assertCountEqual(subquery_test, [self.foobar_ltd, self.gmbh]) subquery_test2 = Company.objects.filter(pk=Subquery(small_companies.filter(num_employees=3))) self.assertCountEqual(subquery_test2, [self.foobar_ltd]) def test_uuid_pk_subquery(self): u = UUIDPK.objects.create() UUID.objects.create(uuid_fk=u) qs = UUIDPK.objects.filter(id__in=Subquery(UUID.objects.values('uuid_fk__id'))) self.assertCountEqual(qs, [u]) def test_nested_subquery(self): inner = Company.objects.filter(point_of_contact=OuterRef('pk')) outer = Employee.objects.annotate(is_point_of_contact=Exists(inner)) contrived = Employee.objects.annotate( is_point_of_contact=Subquery( outer.filter(pk=OuterRef('pk')).values('is_point_of_contact'), output_field=BooleanField(), ), ) self.assertCountEqual(contrived.values_list(), outer.values_list()) def test_nested_subquery_join_outer_ref(self): inner = Employee.objects.filter(pk=OuterRef('ceo__pk')).values('pk') qs = Employee.objects.annotate( ceo_company=Subquery( Company.objects.filter( ceo__in=inner, ceo__pk=OuterRef('pk'), ).values('pk'), ), ) self.assertSequenceEqual( qs.values_list('ceo_company', flat=True), [self.example_inc.pk, self.foobar_ltd.pk, self.gmbh.pk], ) def test_nested_subquery_outer_ref_2(self): first = Time.objects.create(time='09:00') second = Time.objects.create(time='17:00') third = Time.objects.create(time='21:00') SimulationRun.objects.bulk_create([ SimulationRun(start=first, end=second, midpoint='12:00'), SimulationRun(start=first, end=third, midpoint='15:00'), SimulationRun(start=second, end=first, midpoint='00:00'), ]) inner = Time.objects.filter(time=OuterRef(OuterRef('time')), pk=OuterRef('start')).values('time') middle = SimulationRun.objects.annotate(other=Subquery(inner)).values('other')[:1] outer = Time.objects.annotate(other=Subquery(middle, output_field=TimeField())) # This is a contrived example. It exercises the double OuterRef form. self.assertCountEqual(outer, [first, second, third]) def test_nested_subquery_outer_ref_with_autofield(self): first = Time.objects.create(time='09:00') second = Time.objects.create(time='17:00') SimulationRun.objects.create(start=first, end=second, midpoint='12:00') inner = SimulationRun.objects.filter(start=OuterRef(OuterRef('pk'))).values('start') middle = Time.objects.annotate(other=Subquery(inner)).values('other')[:1] outer = Time.objects.annotate(other=Subquery(middle, output_field=IntegerField())) # This exercises the double OuterRef form with AutoField as pk. self.assertCountEqual(outer, [first, second]) def test_annotations_within_subquery(self): Company.objects.filter(num_employees__lt=50).update(ceo=Employee.objects.get(firstname='Frank')) inner = Company.objects.filter( ceo=OuterRef('pk') ).values('ceo').annotate(total_employees=Sum('num_employees')).values('total_employees') outer = Employee.objects.annotate(total_employees=Subquery(inner)).filter(salary__lte=Subquery(inner)) self.assertSequenceEqual( outer.order_by('-total_employees').values('salary', 'total_employees'), [{'salary': 10, 'total_employees': 2300}, {'salary': 20, 'total_employees': 35}], ) def test_subquery_references_joined_table_twice(self): inner = Company.objects.filter( num_chairs__gte=OuterRef('ceo__salary'), num_employees__gte=OuterRef('point_of_contact__salary'), ) # Another contrived example (there is no need to have a subquery here) outer = Company.objects.filter(pk__in=Subquery(inner.values('pk'))) self.assertFalse(outer.exists()) def test_subquery_filter_by_aggregate(self): Number.objects.create(integer=1000, float=1.2) Employee.objects.create(salary=1000) qs = Number.objects.annotate( min_valuable_count=Subquery( Employee.objects.filter( salary=OuterRef('integer'), ).annotate(cnt=Count('salary')).filter(cnt__gt=0).values('cnt')[:1] ), ) self.assertEqual(qs.get().float, 1.2) def test_subquery_filter_by_lazy(self): self.max.manager = Manager.objects.create(name='Manager') self.max.save() max_manager = SimpleLazyObject( lambda: Manager.objects.get(pk=self.max.manager.pk) ) qs = Company.objects.annotate( ceo_manager=Subquery( Employee.objects.filter( lastname=OuterRef('ceo__lastname'), ).values('manager'), ), ).filter(ceo_manager=max_manager) self.assertEqual(qs.get(), self.gmbh) def test_aggregate_subquery_annotation(self): with self.assertNumQueries(1) as ctx: aggregate = Company.objects.annotate( ceo_salary=Subquery( Employee.objects.filter( id=OuterRef('ceo_id'), ).values('salary') ), ).aggregate( ceo_salary_gt_20=Count('pk', filter=Q(ceo_salary__gt=20)), ) self.assertEqual(aggregate, {'ceo_salary_gt_20': 1}) # Aggregation over a subquery annotation doesn't annotate the subquery # twice in the inner query. sql = ctx.captured_queries[0]['sql'] self.assertLessEqual(sql.count('SELECT'), 3) # GROUP BY isn't required to aggregate over a query that doesn't # contain nested aggregates. self.assertNotIn('GROUP BY', sql) @skipUnlessDBFeature('supports_over_clause') def test_aggregate_rawsql_annotation(self): with self.assertNumQueries(1) as ctx: aggregate = Company.objects.annotate( salary=RawSQL('SUM(num_chairs) OVER (ORDER BY num_employees)', []), ).aggregate( count=Count('pk'), ) self.assertEqual(aggregate, {'count': 3}) sql = ctx.captured_queries[0]['sql'] self.assertNotIn('GROUP BY', sql) def test_explicit_output_field(self): class FuncA(Func): output_field = CharField() class FuncB(Func): pass expr = FuncB(FuncA()) self.assertEqual(expr.output_field, FuncA.output_field) def test_outerref_mixed_case_table_name(self): inner = Result.objects.filter(result_time__gte=OuterRef('experiment__assigned')) outer = Result.objects.filter(pk__in=Subquery(inner.values('pk'))) self.assertFalse(outer.exists()) def test_outerref_with_operator(self): inner = Company.objects.filter(num_employees=OuterRef('ceo__salary') + 2) outer = Company.objects.filter(pk__in=Subquery(inner.values('pk'))) self.assertEqual(outer.get().name, 'Test GmbH') def test_nested_outerref_with_function(self): self.gmbh.point_of_contact = Employee.objects.get(lastname='Meyer') self.gmbh.save() inner = Employee.objects.filter( lastname__startswith=Left(OuterRef(OuterRef('lastname')), 1), ) qs = Employee.objects.annotate( ceo_company=Subquery( Company.objects.filter( point_of_contact__in=inner, ceo__pk=OuterRef('pk'), ).values('name'), ), ).filter(ceo_company__isnull=False) self.assertEqual(qs.get().ceo_company, 'Test GmbH') def test_annotation_with_outerref(self): gmbh_salary = Company.objects.annotate( max_ceo_salary_raise=Subquery( Company.objects.annotate( salary_raise=OuterRef('num_employees') + F('num_employees'), ).order_by('-salary_raise').values('salary_raise')[:1], output_field=IntegerField(), ), ).get(pk=self.gmbh.pk) self.assertEqual(gmbh_salary.max_ceo_salary_raise, 2332) def test_annotation_with_nested_outerref(self): self.gmbh.point_of_contact = Employee.objects.get(lastname='Meyer') self.gmbh.save() inner = Employee.objects.annotate( outer_lastname=OuterRef(OuterRef('lastname')), ).filter(lastname__startswith=Left('outer_lastname', 1)) qs = Employee.objects.annotate( ceo_company=Subquery( Company.objects.filter( point_of_contact__in=inner, ceo__pk=OuterRef('pk'), ).values('name'), ), ).filter(ceo_company__isnull=False) self.assertEqual(qs.get().ceo_company, 'Test GmbH') def test_pickle_expression(self): expr = Value(1) expr.convert_value # populate cached property self.assertEqual(pickle.loads(pickle.dumps(expr)), expr) def test_incorrect_field_in_F_expression(self): with self.assertRaisesMessage(FieldError, "Cannot resolve keyword 'nope' into field."): list(Employee.objects.filter(firstname=F('nope'))) def test_incorrect_joined_field_in_F_expression(self): with self.assertRaisesMessage(FieldError, "Cannot resolve keyword 'nope' into field."): list(Company.objects.filter(ceo__pk=F('point_of_contact__nope'))) def test_exists_in_filter(self): inner = Company.objects.filter(ceo=OuterRef('pk')).values('pk') qs1 = Employee.objects.filter(Exists(inner)) qs2 = Employee.objects.annotate(found=Exists(inner)).filter(found=True) self.assertCountEqual(qs1, qs2) self.assertFalse(Employee.objects.exclude(Exists(inner)).exists()) self.assertCountEqual(qs2, Employee.objects.exclude(~Exists(inner))) def test_subquery_in_filter(self): inner = Company.objects.filter(ceo=OuterRef('pk')).values('based_in_eu') self.assertSequenceEqual( Employee.objects.filter(Subquery(inner)), [self.foobar_ltd.ceo], ) def test_subquery_group_by_outerref_in_filter(self): inner = Company.objects.annotate( employee=OuterRef('pk'), ).values('employee').annotate( min_num_chairs=Min('num_chairs'), ).values('ceo') self.assertIs(Employee.objects.filter(pk__in=Subquery(inner)).exists(), True) def test_case_in_filter_if_boolean_output_field(self): is_ceo = Company.objects.filter(ceo=OuterRef('pk')) is_poc = Company.objects.filter(point_of_contact=OuterRef('pk')) qs = Employee.objects.filter( Case( When(Exists(is_ceo), then=True), When(Exists(is_poc), then=True), default=False, output_field=BooleanField(), ), ) self.assertCountEqual(qs, [self.example_inc.ceo, self.foobar_ltd.ceo, self.max]) def test_boolean_expression_combined(self): is_ceo = Company.objects.filter(ceo=OuterRef('pk')) is_poc = Company.objects.filter(point_of_contact=OuterRef('pk')) self.gmbh.point_of_contact = self.max self.gmbh.save() self.assertCountEqual( Employee.objects.filter(Exists(is_ceo) | Exists(is_poc)), [self.example_inc.ceo, self.foobar_ltd.ceo, self.max], ) self.assertCountEqual( Employee.objects.filter(Exists(is_ceo) & Exists(is_poc)), [self.max], ) self.assertCountEqual( Employee.objects.filter(Exists(is_ceo) & Q(salary__gte=30)), [self.max], ) self.assertCountEqual( Employee.objects.filter(Exists(is_poc) | Q(salary__lt=15)), [self.example_inc.ceo, self.max], ) self.assertCountEqual( Employee.objects.filter(Q(salary__gte=30) & Exists(is_ceo)), [self.max], ) self.assertCountEqual( Employee.objects.filter(Q(salary__lt=15) | Exists(is_poc)), [self.example_inc.ceo, self.max], ) def test_boolean_expression_combined_with_empty_Q(self): is_poc = Company.objects.filter(point_of_contact=OuterRef('pk')) self.gmbh.point_of_contact = self.max self.gmbh.save() tests = [ Exists(is_poc) & Q(), Q() & Exists(is_poc), Exists(is_poc) | Q(), Q() | Exists(is_poc), Q(Exists(is_poc)) & Q(), Q() & Q(Exists(is_poc)), Q(Exists(is_poc)) | Q(), Q() | Q(Exists(is_poc)), ] for conditions in tests: with self.subTest(conditions): self.assertCountEqual(Employee.objects.filter(conditions), [self.max]) def test_boolean_expression_in_Q(self): is_poc = Company.objects.filter(point_of_contact=OuterRef('pk')) self.gmbh.point_of_contact = self.max self.gmbh.save() self.assertCountEqual(Employee.objects.filter(Q(Exists(is_poc))), [self.max]) class IterableLookupInnerExpressionsTests(TestCase): @classmethod def setUpTestData(cls): ceo = Employee.objects.create(firstname='Just', lastname='Doit', salary=30) # MySQL requires that the values calculated for expressions don't pass # outside of the field's range, so it's inconvenient to use the values # in the more general tests. cls.c5020 = Company.objects.create(name='5020 Ltd', num_employees=50, num_chairs=20, ceo=ceo) cls.c5040 = Company.objects.create(name='5040 Ltd', num_employees=50, num_chairs=40, ceo=ceo) cls.c5050 = Company.objects.create(name='5050 Ltd', num_employees=50, num_chairs=50, ceo=ceo) cls.c5060 = Company.objects.create(name='5060 Ltd', num_employees=50, num_chairs=60, ceo=ceo) cls.c99300 = Company.objects.create(name='99300 Ltd', num_employees=99, num_chairs=300, ceo=ceo) def test_in_lookup_allows_F_expressions_and_expressions_for_integers(self): # __in lookups can use F() expressions for integers. queryset = Company.objects.filter(num_employees__in=([F('num_chairs') - 10])) self.assertSequenceEqual(queryset, [self.c5060]) self.assertCountEqual( Company.objects.filter(num_employees__in=([F('num_chairs') - 10, F('num_chairs') + 10])), [self.c5040, self.c5060], ) self.assertCountEqual( Company.objects.filter( num_employees__in=([F('num_chairs') - 10, F('num_chairs'), F('num_chairs') + 10]) ), [self.c5040, self.c5050, self.c5060], ) def test_expressions_in_lookups_join_choice(self): midpoint = datetime.time(13, 0) t1 = Time.objects.create(time=datetime.time(12, 0)) t2 = Time.objects.create(time=datetime.time(14, 0)) s1 = SimulationRun.objects.create(start=t1, end=t2, midpoint=midpoint) SimulationRun.objects.create(start=t1, end=None, midpoint=midpoint) SimulationRun.objects.create(start=None, end=t2, midpoint=midpoint) SimulationRun.objects.create(start=None, end=None, midpoint=midpoint) queryset = SimulationRun.objects.filter(midpoint__range=[F('start__time'), F('end__time')]) self.assertSequenceEqual(queryset, [s1]) for alias in queryset.query.alias_map.values(): if isinstance(alias, Join): self.assertEqual(alias.join_type, constants.INNER) queryset = SimulationRun.objects.exclude(midpoint__range=[F('start__time'), F('end__time')]) self.assertQuerysetEqual(queryset, [], ordered=False) for alias in queryset.query.alias_map.values(): if isinstance(alias, Join): self.assertEqual(alias.join_type, constants.LOUTER) def test_range_lookup_allows_F_expressions_and_expressions_for_integers(self): # Range lookups can use F() expressions for integers. Company.objects.filter(num_employees__exact=F("num_chairs")) self.assertCountEqual( Company.objects.filter(num_employees__range=(F('num_chairs'), 100)), [self.c5020, self.c5040, self.c5050], ) self.assertCountEqual( Company.objects.filter(num_employees__range=(F('num_chairs') - 10, F('num_chairs') + 10)), [self.c5040, self.c5050, self.c5060], ) self.assertCountEqual( Company.objects.filter(num_employees__range=(F('num_chairs') - 10, 100)), [self.c5020, self.c5040, self.c5050, self.c5060], ) self.assertCountEqual( Company.objects.filter(num_employees__range=(1, 100)), [self.c5020, self.c5040, self.c5050, self.c5060, self.c99300], ) def test_range_lookup_namedtuple(self): EmployeeRange = namedtuple('EmployeeRange', ['minimum', 'maximum']) qs = Company.objects.filter( num_employees__range=EmployeeRange(minimum=51, maximum=100), ) self.assertSequenceEqual(qs, [self.c99300]) @unittest.skipUnless(connection.vendor == 'sqlite', "This defensive test only works on databases that don't validate parameter types") def test_complex_expressions_do_not_introduce_sql_injection_via_untrusted_string_inclusion(self): """ This tests that SQL injection isn't possible using compilation of expressions in iterable filters, as their compilation happens before the main query compilation. It's limited to SQLite, as PostgreSQL, Oracle and other vendors have defense in depth against this by type checking. Testing against SQLite (the most permissive of the built-in databases) demonstrates that the problem doesn't exist while keeping the test simple. """ queryset = Company.objects.filter(name__in=[F('num_chairs') + '1)) OR ((1==1']) self.assertQuerysetEqual(queryset, [], ordered=False) def test_in_lookup_allows_F_expressions_and_expressions_for_datetimes(self): start = datetime.datetime(2016, 2, 3, 15, 0, 0) end = datetime.datetime(2016, 2, 5, 15, 0, 0) experiment_1 = Experiment.objects.create( name='Integrity testing', assigned=start.date(), start=start, end=end, completed=end.date(), estimated_time=end - start, ) experiment_2 = Experiment.objects.create( name='Taste testing', assigned=start.date(), start=start, end=end, completed=end.date(), estimated_time=end - start, ) r1 = Result.objects.create( experiment=experiment_1, result_time=datetime.datetime(2016, 2, 4, 15, 0, 0), ) Result.objects.create( experiment=experiment_1, result_time=datetime.datetime(2016, 3, 10, 2, 0, 0), ) Result.objects.create( experiment=experiment_2, result_time=datetime.datetime(2016, 1, 8, 5, 0, 0), ) within_experiment_time = [F('experiment__start'), F('experiment__end')] queryset = Result.objects.filter(result_time__range=within_experiment_time) self.assertSequenceEqual(queryset, [r1]) within_experiment_time = [F('experiment__start'), F('experiment__end')] queryset = Result.objects.filter(result_time__range=within_experiment_time) self.assertSequenceEqual(queryset, [r1]) class FTests(SimpleTestCase): def test_deepcopy(self): f = F("foo") g = deepcopy(f) self.assertEqual(f.name, g.name) def test_deconstruct(self): f = F('name') path, args, kwargs = f.deconstruct() self.assertEqual(path, 'django.db.models.expressions.F') self.assertEqual(args, (f.name,)) self.assertEqual(kwargs, {}) def test_equal(self): f = F('name') same_f = F('name') other_f = F('username') self.assertEqual(f, same_f) self.assertNotEqual(f, other_f) def test_hash(self): d = {F('name'): 'Bob'} self.assertIn(F('name'), d) self.assertEqual(d[F('name')], 'Bob') def test_not_equal_Value(self): f = F('name') value = Value('name') self.assertNotEqual(f, value) self.assertNotEqual(value, f) class ExpressionsTests(TestCase): def test_F_reuse(self): f = F('id') n = Number.objects.create(integer=-1) c = Company.objects.create( name="Example Inc.", num_employees=2300, num_chairs=5, ceo=Employee.objects.create(firstname="Joe", lastname="Smith") ) c_qs = Company.objects.filter(id=f) self.assertEqual(c_qs.get(), c) # Reuse the same F-object for another queryset n_qs = Number.objects.filter(id=f) self.assertEqual(n_qs.get(), n) # The original query still works correctly self.assertEqual(c_qs.get(), c) def test_patterns_escape(self): r""" Special characters (e.g. %, _ and \) stored in database are properly escaped when using a pattern lookup with an expression refs #16731 """ Employee.objects.bulk_create([ Employee(firstname="Johnny", lastname="%John"), Employee(firstname="Jean-Claude", lastname="Claud_"), Employee(firstname="Jean-Claude", lastname="Claude%"), Employee(firstname="Johnny", lastname="Joh\\n"), Employee(firstname="Johnny", lastname="_ohn"), ]) claude = Employee.objects.create(firstname='Jean-Claude', lastname='Claude') john = Employee.objects.create(firstname='Johnny', lastname='John') john_sign = Employee.objects.create(firstname='%Joh\\nny', lastname='%Joh\\n') self.assertCountEqual( Employee.objects.filter(firstname__contains=F('lastname')), [john_sign, john, claude], ) self.assertCountEqual( Employee.objects.filter(firstname__startswith=F('lastname')), [john_sign, john], ) self.assertSequenceEqual( Employee.objects.filter(firstname__endswith=F('lastname')), [claude], ) def test_insensitive_patterns_escape(self): r""" Special characters (e.g. %, _ and \) stored in database are properly escaped when using a case insensitive pattern lookup with an expression -- refs #16731 """ Employee.objects.bulk_create([ Employee(firstname="Johnny", lastname="%john"), Employee(firstname="Jean-Claude", lastname="claud_"), Employee(firstname="Jean-Claude", lastname="claude%"), Employee(firstname="Johnny", lastname="joh\\n"), Employee(firstname="Johnny", lastname="_ohn"), ]) claude = Employee.objects.create(firstname='Jean-Claude', lastname='claude') john = Employee.objects.create(firstname='Johnny', lastname='john') john_sign = Employee.objects.create(firstname='%Joh\\nny', lastname='%joh\\n') self.assertCountEqual( Employee.objects.filter(firstname__icontains=F('lastname')), [john_sign, john, claude], ) self.assertCountEqual( Employee.objects.filter(firstname__istartswith=F('lastname')), [john_sign, john], ) self.assertSequenceEqual( Employee.objects.filter(firstname__iendswith=F('lastname')), [claude], ) @isolate_apps('expressions') class SimpleExpressionTests(SimpleTestCase): def test_equal(self): self.assertEqual(Expression(), Expression()) self.assertEqual( Expression(IntegerField()), Expression(output_field=IntegerField()) ) self.assertEqual(Expression(IntegerField()), mock.ANY) self.assertNotEqual( Expression(IntegerField()), Expression(CharField()) ) class TestModel(Model): field = IntegerField() other_field = IntegerField() self.assertNotEqual( Expression(TestModel._meta.get_field('field')), Expression(TestModel._meta.get_field('other_field')), ) def test_hash(self): self.assertEqual(hash(Expression()), hash(Expression())) self.assertEqual( hash(Expression(IntegerField())), hash(Expression(output_field=IntegerField())) ) self.assertNotEqual( hash(Expression(IntegerField())), hash(Expression(CharField())), ) class TestModel(Model): field = IntegerField() other_field = IntegerField() self.assertNotEqual( hash(Expression(TestModel._meta.get_field('field'))), hash(Expression(TestModel._meta.get_field('other_field'))), ) class ExpressionsNumericTests(TestCase): @classmethod def setUpTestData(cls): Number(integer=-1).save() Number(integer=42).save() Number(integer=1337).save() Number.objects.update(float=F('integer')) def test_fill_with_value_from_same_object(self): """ We can fill a value in all objects with an other value of the same object. """ self.assertQuerysetEqual( Number.objects.all(), [(-1, -1), (42, 42), (1337, 1337)], lambda n: (n.integer, round(n.float)), ordered=False ) def test_increment_value(self): """ We can increment a value of all objects in a query set. """ self.assertEqual(Number.objects.filter(integer__gt=0).update(integer=F('integer') + 1), 2) self.assertQuerysetEqual( Number.objects.all(), [(-1, -1), (43, 42), (1338, 1337)], lambda n: (n.integer, round(n.float)), ordered=False ) def test_filter_not_equals_other_field(self): """ We can filter for objects, where a value is not equals the value of an other field. """ self.assertEqual(Number.objects.filter(integer__gt=0).update(integer=F('integer') + 1), 2) self.assertQuerysetEqual( Number.objects.exclude(float=F('integer')), [(43, 42), (1338, 1337)], lambda n: (n.integer, round(n.float)), ordered=False ) def test_complex_expressions(self): """ Complex expressions of different connection types are possible. """ n = Number.objects.create(integer=10, float=123.45) self.assertEqual(Number.objects.filter(pk=n.pk).update( float=F('integer') + F('float') * 2), 1) self.assertEqual(Number.objects.get(pk=n.pk).integer, 10) self.assertEqual(Number.objects.get(pk=n.pk).float, Approximate(256.900, places=3)) class ExpressionOperatorTests(TestCase): @classmethod def setUpTestData(cls): cls.n = Number.objects.create(integer=42, float=15.5) cls.n1 = Number.objects.create(integer=-42, float=-15.5) def test_lefthand_addition(self): # LH Addition of floats and integers Number.objects.filter(pk=self.n.pk).update( integer=F('integer') + 15, float=F('float') + 42.7 ) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3)) def test_lefthand_subtraction(self): # LH Subtraction of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') - 15, float=F('float') - 42.7) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(-27.200, places=3)) def test_lefthand_multiplication(self): # Multiplication of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') * 15, float=F('float') * 42.7) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3)) def test_lefthand_division(self): # LH Division of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') / 2, float=F('float') / 42.7) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 21) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(0.363, places=3)) def test_lefthand_modulo(self): # LH Modulo arithmetic on integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') % 20) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 2) def test_lefthand_bitwise_and(self): # LH Bitwise ands on integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer').bitand(56)) Number.objects.filter(pk=self.n1.pk).update(integer=F('integer').bitand(-56)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 40) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -64) def test_lefthand_bitwise_left_shift_operator(self): Number.objects.update(integer=F('integer').bitleftshift(2)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 168) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -168) def test_lefthand_bitwise_right_shift_operator(self): Number.objects.update(integer=F('integer').bitrightshift(2)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 10) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -11) def test_lefthand_bitwise_or(self): # LH Bitwise or on integers Number.objects.update(integer=F('integer').bitor(48)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 58) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -10) def test_lefthand_transformed_field_bitwise_or(self): Employee.objects.create(firstname='Max', lastname='Mustermann') with register_lookup(CharField, Length): qs = Employee.objects.annotate(bitor=F('lastname__length').bitor(48)) self.assertEqual(qs.get().bitor, 58) def test_lefthand_power(self): # LH Power arithmetic operation on floats and integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') ** 2, float=F('float') ** 1.5) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 1764) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(61.02, places=2)) def test_lefthand_bitwise_xor(self): Number.objects.update(integer=F('integer').bitxor(48)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 26) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -26) def test_lefthand_bitwise_xor_null(self): employee = Employee.objects.create(firstname='John', lastname='Doe') Employee.objects.update(salary=F('salary').bitxor(48)) employee.refresh_from_db() self.assertIsNone(employee.salary) @unittest.skipUnless(connection.vendor == 'oracle', "Oracle doesn't support bitwise XOR.") def test_lefthand_bitwise_xor_not_supported(self): msg = 'Bitwise XOR is not supported in Oracle.' with self.assertRaisesMessage(NotSupportedError, msg): Number.objects.update(integer=F('integer').bitxor(48)) def test_right_hand_addition(self): # Right hand operators Number.objects.filter(pk=self.n.pk).update(integer=15 + F('integer'), float=42.7 + F('float')) # RH Addition of floats and integers self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3)) def test_right_hand_subtraction(self): Number.objects.filter(pk=self.n.pk).update(integer=15 - F('integer'), float=42.7 - F('float')) # RH Subtraction of floats and integers self.assertEqual(Number.objects.get(pk=self.n.pk).integer, -27) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(27.200, places=3)) def test_right_hand_multiplication(self): # RH Multiplication of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=15 * F('integer'), float=42.7 * F('float')) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3)) def test_right_hand_division(self): # RH Division of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=640 / F('integer'), float=42.7 / F('float')) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 15) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(2.755, places=3)) def test_right_hand_modulo(self): # RH Modulo arithmetic on integers Number.objects.filter(pk=self.n.pk).update(integer=69 % F('integer')) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27) def test_righthand_power(self): # RH Power arithmetic operation on floats and integers Number.objects.filter(pk=self.n.pk).update(integer=2 ** F('integer'), float=1.5 ** F('float')) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 4398046511104) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(536.308, places=3)) class FTimeDeltaTests(TestCase): @classmethod def setUpTestData(cls): cls.sday = sday = datetime.date(2010, 6, 25) cls.stime = stime = datetime.datetime(2010, 6, 25, 12, 15, 30, 747000) midnight = datetime.time(0) delta0 = datetime.timedelta(0) delta1 = datetime.timedelta(microseconds=253000) delta2 = datetime.timedelta(seconds=44) delta3 = datetime.timedelta(hours=21, minutes=8) delta4 = datetime.timedelta(days=10) delta5 = datetime.timedelta(days=90) # Test data is set so that deltas and delays will be # strictly increasing. cls.deltas = [] cls.delays = [] cls.days_long = [] # e0: started same day as assigned, zero duration end = stime + delta0 cls.e0 = Experiment.objects.create( name='e0', assigned=sday, start=stime, end=end, completed=end.date(), estimated_time=delta0, ) cls.deltas.append(delta0) cls.delays.append(cls.e0.start - datetime.datetime.combine(cls.e0.assigned, midnight)) cls.days_long.append(cls.e0.completed - cls.e0.assigned) # e1: started one day after assigned, tiny duration, data # set so that end time has no fractional seconds, which # tests an edge case on sqlite. delay = datetime.timedelta(1) end = stime + delay + delta1 e1 = Experiment.objects.create( name='e1', assigned=sday, start=stime + delay, end=end, completed=end.date(), estimated_time=delta1, ) cls.deltas.append(delta1) cls.delays.append(e1.start - datetime.datetime.combine(e1.assigned, midnight)) cls.days_long.append(e1.completed - e1.assigned) # e2: started three days after assigned, small duration end = stime + delta2 e2 = Experiment.objects.create( name='e2', assigned=sday - datetime.timedelta(3), start=stime, end=end, completed=end.date(), estimated_time=datetime.timedelta(hours=1), ) cls.deltas.append(delta2) cls.delays.append(e2.start - datetime.datetime.combine(e2.assigned, midnight)) cls.days_long.append(e2.completed - e2.assigned) # e3: started four days after assigned, medium duration delay = datetime.timedelta(4) end = stime + delay + delta3 e3 = Experiment.objects.create( name='e3', assigned=sday, start=stime + delay, end=end, completed=end.date(), estimated_time=delta3, ) cls.deltas.append(delta3) cls.delays.append(e3.start - datetime.datetime.combine(e3.assigned, midnight)) cls.days_long.append(e3.completed - e3.assigned) # e4: started 10 days after assignment, long duration end = stime + delta4 e4 = Experiment.objects.create( name='e4', assigned=sday - datetime.timedelta(10), start=stime, end=end, completed=end.date(), estimated_time=delta4 - datetime.timedelta(1), ) cls.deltas.append(delta4) cls.delays.append(e4.start - datetime.datetime.combine(e4.assigned, midnight)) cls.days_long.append(e4.completed - e4.assigned) # e5: started a month after assignment, very long duration delay = datetime.timedelta(30) end = stime + delay + delta5 e5 = Experiment.objects.create( name='e5', assigned=sday, start=stime + delay, end=end, completed=end.date(), estimated_time=delta5, ) cls.deltas.append(delta5) cls.delays.append(e5.start - datetime.datetime.combine(e5.assigned, midnight)) cls.days_long.append(e5.completed - e5.assigned) cls.expnames = [e.name for e in Experiment.objects.all()] def test_multiple_query_compilation(self): # Ticket #21643 queryset = Experiment.objects.filter(end__lt=F('start') + datetime.timedelta(hours=1)) q1 = str(queryset.query) q2 = str(queryset.query) self.assertEqual(q1, q2) def test_query_clone(self): # Ticket #21643 - Crash when compiling query more than once qs = Experiment.objects.filter(end__lt=F('start') + datetime.timedelta(hours=1)) qs2 = qs.all() list(qs) list(qs2) # Intentionally no assert def test_delta_add(self): for i, delta in enumerate(self.deltas): test_set = [e.name for e in Experiment.objects.filter(end__lt=F('start') + delta)] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(end__lt=delta + F('start'))] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(end__lte=F('start') + delta)] self.assertEqual(test_set, self.expnames[:i + 1]) def test_delta_subtract(self): for i, delta in enumerate(self.deltas): test_set = [e.name for e in Experiment.objects.filter(start__gt=F('end') - delta)] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(start__gte=F('end') - delta)] self.assertEqual(test_set, self.expnames[:i + 1]) def test_exclude(self): for i, delta in enumerate(self.deltas): test_set = [e.name for e in Experiment.objects.exclude(end__lt=F('start') + delta)] self.assertEqual(test_set, self.expnames[i:]) test_set = [e.name for e in Experiment.objects.exclude(end__lte=F('start') + delta)] self.assertEqual(test_set, self.expnames[i + 1:]) def test_date_comparison(self): for i, days in enumerate(self.days_long): test_set = [e.name for e in Experiment.objects.filter(completed__lt=F('assigned') + days)] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(completed__lte=F('assigned') + days)] self.assertEqual(test_set, self.expnames[:i + 1]) @skipUnlessDBFeature("supports_mixed_date_datetime_comparisons") def test_mixed_comparisons1(self): for i, delay in enumerate(self.delays): test_set = [e.name for e in Experiment.objects.filter(assigned__gt=F('start') - delay)] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(assigned__gte=F('start') - delay)] self.assertEqual(test_set, self.expnames[:i + 1]) def test_mixed_comparisons2(self): for i, delay in enumerate(self.delays): delay = datetime.timedelta(delay.days) test_set = [e.name for e in Experiment.objects.filter(start__lt=F('assigned') + delay)] self.assertEqual(test_set, self.expnames[:i]) test_set = [ e.name for e in Experiment.objects.filter(start__lte=F('assigned') + delay + datetime.timedelta(1)) ] self.assertEqual(test_set, self.expnames[:i + 1]) def test_delta_update(self): for delta in self.deltas: exps = Experiment.objects.all() expected_durations = [e.duration() for e in exps] expected_starts = [e.start + delta for e in exps] expected_ends = [e.end + delta for e in exps] Experiment.objects.update(start=F('start') + delta, end=F('end') + delta) exps = Experiment.objects.all() new_starts = [e.start for e in exps] new_ends = [e.end for e in exps] new_durations = [e.duration() for e in exps] self.assertEqual(expected_starts, new_starts) self.assertEqual(expected_ends, new_ends) self.assertEqual(expected_durations, new_durations) def test_invalid_operator(self): with self.assertRaises(DatabaseError): list(Experiment.objects.filter(start=F('start') * datetime.timedelta(0))) def test_durationfield_add(self): zeros = [e.name for e in Experiment.objects.filter(start=F('start') + F('estimated_time'))] self.assertEqual(zeros, ['e0']) end_less = [e.name for e in Experiment.objects.filter(end__lt=F('start') + F('estimated_time'))] self.assertEqual(end_less, ['e2']) delta_math = [ e.name for e in Experiment.objects.filter(end__gte=F('start') + F('estimated_time') + datetime.timedelta(hours=1)) ] self.assertEqual(delta_math, ['e4']) queryset = Experiment.objects.annotate(shifted=ExpressionWrapper( F('start') + Value(None, output_field=DurationField()), output_field=DateTimeField(), )) self.assertIsNone(queryset.first().shifted) def test_durationfield_multiply_divide(self): Experiment.objects.update(scalar=2) tests = [ (Decimal('2'), 2), (F('scalar'), 2), (2, 2), (3.2, 3.2), ] for expr, scalar in tests: with self.subTest(expr=expr): qs = Experiment.objects.annotate( multiplied=ExpressionWrapper( expr * F('estimated_time'), output_field=DurationField(), ), divided=ExpressionWrapper( F('estimated_time') / expr, output_field=DurationField(), ), ) for experiment in qs: self.assertEqual( experiment.multiplied, experiment.estimated_time * scalar, ) self.assertEqual( experiment.divided, experiment.estimated_time / scalar, ) def test_duration_expressions(self): for delta in self.deltas: qs = Experiment.objects.annotate(duration=F('estimated_time') + delta) for obj in qs: self.assertEqual(obj.duration, obj.estimated_time + delta) @skipUnlessDBFeature('supports_temporal_subtraction') def test_date_subtraction(self): queryset = Experiment.objects.annotate( completion_duration=F('completed') - F('assigned'), ) at_least_5_days = {e.name for e in queryset.filter(completion_duration__gte=datetime.timedelta(days=5))} self.assertEqual(at_least_5_days, {'e3', 'e4', 'e5'}) at_least_120_days = {e.name for e in queryset.filter(completion_duration__gte=datetime.timedelta(days=120))} self.assertEqual(at_least_120_days, {'e5'}) less_than_5_days = {e.name for e in queryset.filter(completion_duration__lt=datetime.timedelta(days=5))} self.assertEqual(less_than_5_days, {'e0', 'e1', 'e2'}) queryset = Experiment.objects.annotate( difference=F('completed') - Value(None, output_field=DateField()), ) self.assertIsNone(queryset.first().difference) queryset = Experiment.objects.annotate(shifted=ExpressionWrapper( F('completed') - Value(None, output_field=DurationField()), output_field=DateField(), )) self.assertIsNone(queryset.first().shifted) @skipUnlessDBFeature('supports_temporal_subtraction') def test_date_subquery_subtraction(self): subquery = Experiment.objects.filter(pk=OuterRef('pk')).values('completed') queryset = Experiment.objects.annotate( difference=subquery - F('completed'), ).filter(difference=datetime.timedelta()) self.assertTrue(queryset.exists()) @skipUnlessDBFeature('supports_temporal_subtraction') def test_date_case_subtraction(self): queryset = Experiment.objects.annotate( date_case=Case( When(Q(name='e0'), then=F('completed')), output_field=DateField(), ), completed_value=Value( self.e0.completed, output_field=DateField(), ), difference=F('date_case') - F('completed_value'), ).filter(difference=datetime.timedelta()) self.assertEqual(queryset.get(), self.e0) @skipUnlessDBFeature('supports_temporal_subtraction') def test_time_subtraction(self): Time.objects.create(time=datetime.time(12, 30, 15, 2345)) queryset = Time.objects.annotate( difference=F('time') - Value(datetime.time(11, 15, 0)), ) self.assertEqual( queryset.get().difference, datetime.timedelta(hours=1, minutes=15, seconds=15, microseconds=2345) ) queryset = Time.objects.annotate( difference=F('time') - Value(None, output_field=TimeField()), ) self.assertIsNone(queryset.first().difference) queryset = Time.objects.annotate(shifted=ExpressionWrapper( F('time') - Value(None, output_field=DurationField()), output_field=TimeField(), )) self.assertIsNone(queryset.first().shifted) @skipUnlessDBFeature('supports_temporal_subtraction') def test_time_subquery_subtraction(self): Time.objects.create(time=datetime.time(12, 30, 15, 2345)) subquery = Time.objects.filter(pk=OuterRef('pk')).values('time') queryset = Time.objects.annotate( difference=subquery - F('time'), ).filter(difference=datetime.timedelta()) self.assertTrue(queryset.exists()) @skipUnlessDBFeature('supports_temporal_subtraction') def test_datetime_subtraction(self): under_estimate = [ e.name for e in Experiment.objects.filter(estimated_time__gt=F('end') - F('start')) ] self.assertEqual(under_estimate, ['e2']) over_estimate = [ e.name for e in Experiment.objects.filter(estimated_time__lt=F('end') - F('start')) ] self.assertEqual(over_estimate, ['e4']) queryset = Experiment.objects.annotate( difference=F('start') - Value(None, output_field=DateTimeField()), ) self.assertIsNone(queryset.first().difference) queryset = Experiment.objects.annotate(shifted=ExpressionWrapper( F('start') - Value(None, output_field=DurationField()), output_field=DateTimeField(), )) self.assertIsNone(queryset.first().shifted) @skipUnlessDBFeature('supports_temporal_subtraction') def test_datetime_subquery_subtraction(self): subquery = Experiment.objects.filter(pk=OuterRef('pk')).values('start') queryset = Experiment.objects.annotate( difference=subquery - F('start'), ).filter(difference=datetime.timedelta()) self.assertTrue(queryset.exists()) @skipUnlessDBFeature('supports_temporal_subtraction') def test_datetime_subtraction_microseconds(self): delta = datetime.timedelta(microseconds=8999999999999999) Experiment.objects.update(end=F('start') + delta) qs = Experiment.objects.annotate(delta=F('end') - F('start')) for e in qs: self.assertEqual(e.delta, delta) def test_duration_with_datetime(self): # Exclude e1 which has very high precision so we can test this on all # backends regardless of whether or not it supports # microsecond_precision. over_estimate = Experiment.objects.exclude(name='e1').filter( completed__gt=self.stime + F('estimated_time'), ).order_by('name') self.assertQuerysetEqual(over_estimate, ['e3', 'e4', 'e5'], lambda e: e.name) def test_duration_with_datetime_microseconds(self): delta = datetime.timedelta(microseconds=8999999999999999) qs = Experiment.objects.annotate(dt=ExpressionWrapper( F('start') + delta, output_field=DateTimeField(), )) for e in qs: self.assertEqual(e.dt, e.start + delta) def test_date_minus_duration(self): more_than_4_days = Experiment.objects.filter( assigned__lt=F('completed') - Value(datetime.timedelta(days=4)) ) self.assertQuerysetEqual(more_than_4_days, ['e3', 'e4', 'e5'], lambda e: e.name) def test_negative_timedelta_update(self): # subtract 30 seconds, 30 minutes, 2 hours and 2 days experiments = Experiment.objects.filter(name='e0').annotate( start_sub_seconds=F('start') + datetime.timedelta(seconds=-30), ).annotate( start_sub_minutes=F('start_sub_seconds') + datetime.timedelta(minutes=-30), ).annotate( start_sub_hours=F('start_sub_minutes') + datetime.timedelta(hours=-2), ).annotate( new_start=F('start_sub_hours') + datetime.timedelta(days=-2), ) expected_start = datetime.datetime(2010, 6, 23, 9, 45, 0) # subtract 30 microseconds experiments = experiments.annotate(new_start=F('new_start') + datetime.timedelta(microseconds=-30)) expected_start += datetime.timedelta(microseconds=+746970) experiments.update(start=F('new_start')) e0 = Experiment.objects.get(name='e0') self.assertEqual(e0.start, expected_start) class ValueTests(TestCase): def test_update_TimeField_using_Value(self): Time.objects.create() Time.objects.update(time=Value(datetime.time(1), output_field=TimeField())) self.assertEqual(Time.objects.get().time, datetime.time(1)) def test_update_UUIDField_using_Value(self): UUID.objects.create() UUID.objects.update(uuid=Value(uuid.UUID('12345678901234567890123456789012'), output_field=UUIDField())) self.assertEqual(UUID.objects.get().uuid, uuid.UUID('12345678901234567890123456789012')) def test_deconstruct(self): value = Value('name') path, args, kwargs = value.deconstruct() self.assertEqual(path, 'django.db.models.expressions.Value') self.assertEqual(args, (value.value,)) self.assertEqual(kwargs, {}) def test_deconstruct_output_field(self): value = Value('name', output_field=CharField()) path, args, kwargs = value.deconstruct() self.assertEqual(path, 'django.db.models.expressions.Value') self.assertEqual(args, (value.value,)) self.assertEqual(len(kwargs), 1) self.assertEqual(kwargs['output_field'].deconstruct(), CharField().deconstruct()) def test_repr(self): tests = [ (None, 'Value(None)'), ('str', "Value('str')"), (True, 'Value(True)'), (42, 'Value(42)'), ( datetime.datetime(2019, 5, 15), 'Value(datetime.datetime(2019, 5, 15, 0, 0))', ), (Decimal('3.14'), "Value(Decimal('3.14'))"), ] for value, expected in tests: with self.subTest(value=value): self.assertEqual(repr(Value(value)), expected) def test_equal(self): value = Value('name') self.assertEqual(value, Value('name')) self.assertNotEqual(value, Value('username')) def test_hash(self): d = {Value('name'): 'Bob'} self.assertIn(Value('name'), d) self.assertEqual(d[Value('name')], 'Bob') def test_equal_output_field(self): value = Value('name', output_field=CharField()) same_value = Value('name', output_field=CharField()) other_value = Value('name', output_field=TimeField()) no_output_field = Value('name') self.assertEqual(value, same_value) self.assertNotEqual(value, other_value) self.assertNotEqual(value, no_output_field) def test_raise_empty_expressionlist(self): msg = 'ExpressionList requires at least one expression' with self.assertRaisesMessage(ValueError, msg): ExpressionList() def test_compile_unresolved(self): # This test might need to be revisited later on if #25425 is enforced. compiler = Time.objects.all().query.get_compiler(connection=connection) value = Value('foo') self.assertEqual(value.as_sql(compiler, connection), ('%s', ['foo'])) value = Value('foo', output_field=CharField()) self.assertEqual(value.as_sql(compiler, connection), ('%s', ['foo'])) def test_output_field_decimalfield(self): Time.objects.create() time = Time.objects.annotate(one=Value(1, output_field=DecimalField())).first() self.assertEqual(time.one, 1) def test_resolve_output_field(self): value_types = [ ('str', CharField), (True, BooleanField), (42, IntegerField), (3.14, FloatField), (datetime.date(2019, 5, 15), DateField), (datetime.datetime(2019, 5, 15), DateTimeField), (datetime.time(3, 16), TimeField), (datetime.timedelta(1), DurationField), (Decimal('3.14'), DecimalField), (b'', BinaryField), (uuid.uuid4(), UUIDField), ] for value, output_field_type in value_types: with self.subTest(type=type(value)): expr = Value(value) self.assertIsInstance(expr.output_field, output_field_type) def test_resolve_output_field_failure(self): msg = 'Cannot resolve expression type, unknown output_field' with self.assertRaisesMessage(FieldError, msg): Value(object()).output_field class ExistsTests(TestCase): def test_optimizations(self): with CaptureQueriesContext(connection) as context: list(Experiment.objects.values(exists=Exists( Experiment.objects.order_by('pk'), )).order_by()) captured_queries = context.captured_queries self.assertEqual(len(captured_queries), 1) captured_sql = captured_queries[0]['sql'] self.assertNotIn( connection.ops.quote_name(Experiment._meta.pk.column), captured_sql, ) self.assertIn( connection.ops.limit_offset_sql(None, 1), captured_sql, ) self.assertNotIn('ORDER BY', captured_sql) class FieldTransformTests(TestCase): @classmethod def setUpTestData(cls): cls.sday = sday = datetime.date(2010, 6, 25) cls.stime = stime = datetime.datetime(2010, 6, 25, 12, 15, 30, 747000) cls.ex1 = Experiment.objects.create( name='Experiment 1', assigned=sday, completed=sday + datetime.timedelta(2), estimated_time=datetime.timedelta(2), start=stime, end=stime + datetime.timedelta(2), ) def test_month_aggregation(self): self.assertEqual( Experiment.objects.aggregate(month_count=Count('assigned__month')), {'month_count': 1} ) def test_transform_in_values(self): self.assertSequenceEqual( Experiment.objects.values('assigned__month'), [{'assigned__month': 6}], ) def test_multiple_transforms_in_values(self): self.assertSequenceEqual( Experiment.objects.values('end__date__month'), [{'end__date__month': 6}], ) class ReprTests(SimpleTestCase): def test_expressions(self): self.assertEqual( repr(Case(When(a=1))), "<Case: CASE WHEN <Q: (AND: ('a', 1))> THEN Value(None), ELSE Value(None)>" ) self.assertEqual( repr(When(Q(age__gte=18), then=Value('legal'))), "<When: WHEN <Q: (AND: ('age__gte', 18))> THEN Value('legal')>" ) self.assertEqual(repr(Col('alias', 'field')), "Col(alias, field)") self.assertEqual(repr(F('published')), "F(published)") self.assertEqual(repr(F('cost') + F('tax')), "<CombinedExpression: F(cost) + F(tax)>") self.assertEqual( repr(ExpressionWrapper(F('cost') + F('tax'), IntegerField())), "ExpressionWrapper(F(cost) + F(tax))" ) self.assertEqual(repr(Func('published', function='TO_CHAR')), "Func(F(published), function=TO_CHAR)") self.assertEqual(repr(OrderBy(Value(1))), 'OrderBy(Value(1), descending=False)') self.assertEqual(repr(RawSQL('table.col', [])), "RawSQL(table.col, [])") self.assertEqual(repr(Ref('sum_cost', Sum('cost'))), "Ref(sum_cost, Sum(F(cost)))") self.assertEqual(repr(Value(1)), "Value(1)") self.assertEqual( repr(ExpressionList(F('col'), F('anothercol'))), 'ExpressionList(F(col), F(anothercol))' ) self.assertEqual( repr(ExpressionList(OrderBy(F('col'), descending=False))), 'ExpressionList(OrderBy(F(col), descending=False))' ) def test_functions(self): self.assertEqual(repr(Coalesce('a', 'b')), "Coalesce(F(a), F(b))") self.assertEqual(repr(Concat('a', 'b')), "Concat(ConcatPair(F(a), F(b)))") self.assertEqual(repr(Length('a')), "Length(F(a))") self.assertEqual(repr(Lower('a')), "Lower(F(a))") self.assertEqual(repr(Substr('a', 1, 3)), "Substr(F(a), Value(1), Value(3))") self.assertEqual(repr(Upper('a')), "Upper(F(a))") def test_aggregates(self): self.assertEqual(repr(Avg('a')), "Avg(F(a))") self.assertEqual(repr(Count('a')), "Count(F(a))") self.assertEqual(repr(Count('*')), "Count('*')") self.assertEqual(repr(Max('a')), "Max(F(a))") self.assertEqual(repr(Min('a')), "Min(F(a))") self.assertEqual(repr(StdDev('a')), "StdDev(F(a), sample=False)") self.assertEqual(repr(Sum('a')), "Sum(F(a))") self.assertEqual(repr(Variance('a', sample=True)), "Variance(F(a), sample=True)") def test_distinct_aggregates(self): self.assertEqual(repr(Count('a', distinct=True)), "Count(F(a), distinct=True)") self.assertEqual(repr(Count('*', distinct=True)), "Count('*', distinct=True)") def test_filtered_aggregates(self): filter = Q(a=1) self.assertEqual(repr(Avg('a', filter=filter)), "Avg(F(a), filter=(AND: ('a', 1)))") self.assertEqual(repr(Count('a', filter=filter)), "Count(F(a), filter=(AND: ('a', 1)))") self.assertEqual(repr(Max('a', filter=filter)), "Max(F(a), filter=(AND: ('a', 1)))") self.assertEqual(repr(Min('a', filter=filter)), "Min(F(a), filter=(AND: ('a', 1)))") self.assertEqual(repr(StdDev('a', filter=filter)), "StdDev(F(a), filter=(AND: ('a', 1)), sample=False)") self.assertEqual(repr(Sum('a', filter=filter)), "Sum(F(a), filter=(AND: ('a', 1)))") self.assertEqual( repr(Variance('a', sample=True, filter=filter)), "Variance(F(a), filter=(AND: ('a', 1)), sample=True)" ) self.assertEqual( repr(Count('a', filter=filter, distinct=True)), "Count(F(a), distinct=True, filter=(AND: ('a', 1)))" ) class CombinableTests(SimpleTestCase): bitwise_msg = 'Use .bitand() and .bitor() for bitwise logical operations.' def test_negation(self): c = Combinable() self.assertEqual(-c, c * -1) def test_and(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): Combinable() & Combinable() def test_or(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): Combinable() | Combinable() def test_reversed_and(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): object() & Combinable() def test_reversed_or(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): object() | Combinable() class CombinedExpressionTests(SimpleTestCase): def test_resolve_output_field(self): tests = [ (IntegerField, AutoField, IntegerField), (AutoField, IntegerField, IntegerField), (IntegerField, DecimalField, DecimalField), (DecimalField, IntegerField, DecimalField), (IntegerField, FloatField, FloatField), (FloatField, IntegerField, FloatField), ] connectors = [Combinable.ADD, Combinable.SUB, Combinable.MUL, Combinable.DIV] for lhs, rhs, combined in tests: for connector in connectors: with self.subTest(lhs=lhs, connector=connector, rhs=rhs, combined=combined): expr = CombinedExpression( Expression(lhs()), connector, Expression(rhs()), ) self.assertIsInstance(expr.output_field, combined) class ExpressionWrapperTests(SimpleTestCase): def test_empty_group_by(self): expr = ExpressionWrapper(Value(3), output_field=IntegerField()) self.assertEqual(expr.get_group_by_cols(alias=None), []) def test_non_empty_group_by(self): value = Value('f') value.output_field = None expr = ExpressionWrapper(Lower(value), output_field=IntegerField()) group_by_cols = expr.get_group_by_cols(alias=None) self.assertEqual(group_by_cols, [expr.expression]) self.assertEqual(group_by_cols[0].output_field, expr.output_field) class OrderByTests(SimpleTestCase): def test_equal(self): self.assertEqual( OrderBy(F('field'), nulls_last=True), OrderBy(F('field'), nulls_last=True), ) self.assertNotEqual( OrderBy(F('field'), nulls_last=True), OrderBy(F('field'), nulls_last=False), ) def test_hash(self): self.assertEqual( hash(OrderBy(F('field'), nulls_last=True)), hash(OrderBy(F('field'), nulls_last=True)), ) self.assertNotEqual( hash(OrderBy(F('field'), nulls_last=True)), hash(OrderBy(F('field'), nulls_last=False)), )
548f79befa97ba9b4eb580170acc3f88247dae663c8f02c5b67734fc976d2c0c
""" Tests for F() query expression syntax. """ import uuid from django.db import models class Manager(models.Model): name = models.CharField(max_length=50) class Employee(models.Model): firstname = models.CharField(max_length=50) lastname = models.CharField(max_length=50) salary = models.IntegerField(blank=True, null=True) manager = models.ForeignKey(Manager, models.CASCADE, null=True) def __str__(self): return '%s %s' % (self.firstname, self.lastname) class RemoteEmployee(Employee): adjusted_salary = models.IntegerField() class Company(models.Model): name = models.CharField(max_length=100) num_employees = models.PositiveIntegerField() num_chairs = models.PositiveIntegerField() ceo = models.ForeignKey( Employee, models.CASCADE, related_name='company_ceo_set', ) point_of_contact = models.ForeignKey( Employee, models.SET_NULL, related_name='company_point_of_contact_set', null=True, ) based_in_eu = models.BooleanField(default=False) def __str__(self): return self.name class Number(models.Model): integer = models.BigIntegerField(db_column='the_integer') float = models.FloatField(null=True, db_column='the_float') def __str__(self): return '%i, %.3f' % (self.integer, self.float) class Experiment(models.Model): name = models.CharField(max_length=24) assigned = models.DateField() completed = models.DateField() estimated_time = models.DurationField() start = models.DateTimeField() end = models.DateTimeField() scalar = models.IntegerField(null=True) class Meta: db_table = 'expressions_ExPeRiMeNt' ordering = ('name',) def duration(self): return self.end - self.start class Result(models.Model): experiment = models.ForeignKey(Experiment, models.CASCADE) result_time = models.DateTimeField() def __str__(self): return "Result at %s" % self.result_time class Time(models.Model): time = models.TimeField(null=True) def __str__(self): return str(self.time) class SimulationRun(models.Model): start = models.ForeignKey(Time, models.CASCADE, null=True, related_name='+') end = models.ForeignKey(Time, models.CASCADE, null=True, related_name='+') midpoint = models.TimeField() def __str__(self): return "%s (%s to %s)" % (self.midpoint, self.start, self.end) class UUIDPK(models.Model): id = models.UUIDField(primary_key=True, default=uuid.uuid4) class UUID(models.Model): uuid = models.UUIDField(null=True) uuid_fk = models.ForeignKey(UUIDPK, models.CASCADE, null=True)
e17ec1fdfcd62e40eed1f3d15ef32f18296c5d8451817de5f863fe4d4fdd0d7f
import os import re import shutil import tempfile import time import warnings from io import StringIO from pathlib import Path from unittest import mock, skipIf, skipUnless from admin_scripts.tests import AdminScriptTestCase from django.core import management from django.core.management import execute_from_command_line from django.core.management.base import CommandError from django.core.management.commands.makemessages import ( Command as MakeMessagesCommand, write_pot_file, ) from django.core.management.utils import find_command from django.test import SimpleTestCase, override_settings from django.test.utils import captured_stderr, captured_stdout from django.utils._os import symlinks_supported from django.utils.translation import TranslatorCommentWarning from .utils import POFileAssertionMixin, RunInTmpDirMixin, copytree LOCALE = 'de' has_xgettext = find_command('xgettext') gettext_version = MakeMessagesCommand().gettext_version if has_xgettext else None requires_gettext_019 = skipIf(has_xgettext and gettext_version < (0, 19), 'gettext 0.19 required') @skipUnless(has_xgettext, 'xgettext is mandatory for extraction tests') class ExtractorTests(POFileAssertionMixin, RunInTmpDirMixin, SimpleTestCase): work_subdir = 'commands' PO_FILE = 'locale/%s/LC_MESSAGES/django.po' % LOCALE def _run_makemessages(self, **options): out = StringIO() management.call_command('makemessages', locale=[LOCALE], verbosity=2, stdout=out, **options) output = out.getvalue() self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE) as fp: po_contents = fp.read() return output, po_contents def assertMsgIdPlural(self, msgid, haystack, use_quotes=True): return self._assertPoKeyword('msgid_plural', msgid, haystack, use_quotes=use_quotes) def assertMsgStr(self, msgstr, haystack, use_quotes=True): return self._assertPoKeyword('msgstr', msgstr, haystack, use_quotes=use_quotes) def assertNotMsgId(self, msgid, s, use_quotes=True): if use_quotes: msgid = '"%s"' % msgid msgid = re.escape(msgid) return self.assertTrue(not re.search('^msgid %s' % msgid, s, re.MULTILINE)) def _assertPoLocComment(self, assert_presence, po_filename, line_number, *comment_parts): with open(po_filename) as fp: po_contents = fp.read() if os.name == 'nt': # #: .\path\to\file.html:123 cwd_prefix = '%s%s' % (os.curdir, os.sep) else: # #: path/to/file.html:123 cwd_prefix = '' path = os.path.join(cwd_prefix, *comment_parts) parts = [path] if isinstance(line_number, str): line_number = self._get_token_line_number(path, line_number) if line_number is not None: parts.append(':%d' % line_number) needle = ''.join(parts) pattern = re.compile(r'^\#\:.*' + re.escape(needle), re.MULTILINE) if assert_presence: return self.assertRegex(po_contents, pattern, '"%s" not found in final .po file.' % needle) else: return self.assertNotRegex(po_contents, pattern, '"%s" shouldn\'t be in final .po file.' % needle) def _get_token_line_number(self, path, token): with open(path) as f: for line, content in enumerate(f, 1): if token in content: return line self.fail("The token '%s' could not be found in %s, please check the test config" % (token, path)) def assertLocationCommentPresent(self, po_filename, line_number, *comment_parts): r""" self.assertLocationCommentPresent('django.po', 42, 'dirA', 'dirB', 'foo.py') verifies that the django.po file has a gettext-style location comment of the form `#: dirA/dirB/foo.py:42` (or `#: .\dirA\dirB\foo.py:42` on Windows) None can be passed for the line_number argument to skip checking of the :42 suffix part. A string token can also be passed as line_number, in which case it will be searched in the template, and its line number will be used. A msgid is a suitable candidate. """ return self._assertPoLocComment(True, po_filename, line_number, *comment_parts) def assertLocationCommentNotPresent(self, po_filename, line_number, *comment_parts): """Check the opposite of assertLocationComment()""" return self._assertPoLocComment(False, po_filename, line_number, *comment_parts) def assertRecentlyModified(self, path): """ Assert that file was recently modified (modification time was less than 10 seconds ago). """ delta = time.time() - os.stat(path).st_mtime self.assertLess(delta, 10, "%s was recently modified" % path) def assertNotRecentlyModified(self, path): """ Assert that file was not recently modified (modification time was more than 10 seconds ago). """ delta = time.time() - os.stat(path).st_mtime self.assertGreater(delta, 10, "%s wasn't recently modified" % path) class BasicExtractorTests(ExtractorTests): @override_settings(USE_I18N=False) def test_use_i18n_false(self): """ makemessages also runs successfully when USE_I18N is False. """ management.call_command('makemessages', locale=[LOCALE], verbosity=0) self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE, encoding='utf-8') as fp: po_contents = fp.read() # Check two random strings self.assertIn('#. Translators: One-line translator comment #1', po_contents) self.assertIn('msgctxt "Special trans context #1"', po_contents) def test_no_option(self): # One of either the --locale, --exclude, or --all options is required. msg = "Type 'manage.py help makemessages' for usage information." with mock.patch( 'django.core.management.commands.makemessages.sys.argv', ['manage.py', 'makemessages'], ): with self.assertRaisesRegex(CommandError, msg): management.call_command('makemessages') def test_valid_locale(self): out = StringIO() management.call_command('makemessages', locale=['de'], stdout=out, verbosity=1) self.assertNotIn('invalid locale de', out.getvalue()) self.assertIn('processing locale de', out.getvalue()) self.assertIs(Path(self.PO_FILE).exists(), True) def test_invalid_locale(self): out = StringIO() management.call_command('makemessages', locale=['pl-PL'], stdout=out, verbosity=1) self.assertIn('invalid locale pl-PL, did you mean pl_PL?', out.getvalue()) self.assertNotIn('processing locale pl-PL', out.getvalue()) self.assertIs(Path('locale/pl-PL/LC_MESSAGES/django.po').exists(), False) def test_comments_extractor(self): management.call_command('makemessages', locale=[LOCALE], verbosity=0) self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE, encoding='utf-8') as fp: po_contents = fp.read() self.assertNotIn('This comment should not be extracted', po_contents) # Comments in templates self.assertIn('#. Translators: This comment should be extracted', po_contents) self.assertIn( "#. Translators: Django comment block for translators\n#. " "string's meaning unveiled", po_contents ) self.assertIn('#. Translators: One-line translator comment #1', po_contents) self.assertIn('#. Translators: Two-line translator comment #1\n#. continued here.', po_contents) self.assertIn('#. Translators: One-line translator comment #2', po_contents) self.assertIn('#. Translators: Two-line translator comment #2\n#. continued here.', po_contents) self.assertIn('#. Translators: One-line translator comment #3', po_contents) self.assertIn('#. Translators: Two-line translator comment #3\n#. continued here.', po_contents) self.assertIn('#. Translators: One-line translator comment #4', po_contents) self.assertIn('#. Translators: Two-line translator comment #4\n#. continued here.', po_contents) self.assertIn( '#. Translators: One-line translator comment #5 -- with ' 'non ASCII characters: áéíóúö', po_contents ) self.assertIn( '#. Translators: Two-line translator comment #5 -- with ' 'non ASCII characters: áéíóúö\n#. continued here.', po_contents ) def test_special_char_extracted(self): management.call_command('makemessages', locale=[LOCALE], verbosity=0) self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE, encoding='utf-8') as fp: po_contents = fp.read() self.assertMsgId("Non-breaking space\u00a0:", po_contents) def test_blocktranslate_trimmed(self): management.call_command('makemessages', locale=[LOCALE], verbosity=0) self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE) as fp: po_contents = fp.read() # should not be trimmed self.assertNotMsgId('Text with a few line breaks.', po_contents) # should be trimmed self.assertMsgId("Again some text with a few line breaks, this time should be trimmed.", po_contents) # #21406 -- Should adjust for eaten line numbers self.assertMsgId("Get my line number", po_contents) self.assertLocationCommentPresent(self.PO_FILE, 'Get my line number', 'templates', 'test.html') def test_extraction_error(self): msg = ( 'Translation blocks must not include other block tags: blocktranslate ' '(file %s, line 3)' % os.path.join('templates', 'template_with_error.tpl') ) with self.assertRaisesMessage(SyntaxError, msg): management.call_command('makemessages', locale=[LOCALE], extensions=['tpl'], verbosity=0) # The temporary file was cleaned up self.assertFalse(os.path.exists('./templates/template_with_error.tpl.py')) def test_unicode_decode_error(self): shutil.copyfile('./not_utf8.sample', './not_utf8.txt') out = StringIO() management.call_command('makemessages', locale=[LOCALE], stdout=out) self.assertIn("UnicodeDecodeError: skipped file not_utf8.txt in .", out.getvalue()) def test_unicode_file_name(self): open(os.path.join(self.test_dir, 'vidéo.txt'), 'a').close() management.call_command('makemessages', locale=[LOCALE], verbosity=0) def test_extraction_warning(self): """test xgettext warning about multiple bare interpolation placeholders""" shutil.copyfile('./code.sample', './code_sample.py') out = StringIO() management.call_command('makemessages', locale=[LOCALE], stdout=out) self.assertIn("code_sample.py:4", out.getvalue()) def test_template_message_context_extractor(self): """ Message contexts are correctly extracted for the {% translate %} and {% blocktranslate %} template tags (#14806). """ management.call_command('makemessages', locale=[LOCALE], verbosity=0) self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE) as fp: po_contents = fp.read() # {% translate %} self.assertIn('msgctxt "Special trans context #1"', po_contents) self.assertMsgId("Translatable literal #7a", po_contents) self.assertIn('msgctxt "Special trans context #2"', po_contents) self.assertMsgId("Translatable literal #7b", po_contents) self.assertIn('msgctxt "Special trans context #3"', po_contents) self.assertMsgId("Translatable literal #7c", po_contents) # {% translate %} with a filter for minor_part in 'abcdefgh': # Iterate from #7.1a to #7.1h template markers self.assertIn('msgctxt "context #7.1{}"'.format(minor_part), po_contents) self.assertMsgId('Translatable literal #7.1{}'.format(minor_part), po_contents) # {% blocktranslate %} self.assertIn('msgctxt "Special blocktranslate context #1"', po_contents) self.assertMsgId("Translatable literal #8a", po_contents) self.assertIn('msgctxt "Special blocktranslate context #2"', po_contents) self.assertMsgId("Translatable literal #8b-singular", po_contents) self.assertIn("Translatable literal #8b-plural", po_contents) self.assertIn('msgctxt "Special blocktranslate context #3"', po_contents) self.assertMsgId("Translatable literal #8c-singular", po_contents) self.assertIn("Translatable literal #8c-plural", po_contents) self.assertIn('msgctxt "Special blocktranslate context #4"', po_contents) self.assertMsgId("Translatable literal #8d %(a)s", po_contents) # {% trans %} and {% blocktrans %} self.assertMsgId('trans text', po_contents) self.assertMsgId('blocktrans text', po_contents) def test_context_in_single_quotes(self): management.call_command('makemessages', locale=[LOCALE], verbosity=0) self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE) as fp: po_contents = fp.read() # {% translate %} self.assertIn('msgctxt "Context wrapped in double quotes"', po_contents) self.assertIn('msgctxt "Context wrapped in single quotes"', po_contents) # {% blocktranslate %} self.assertIn('msgctxt "Special blocktranslate context wrapped in double quotes"', po_contents) self.assertIn('msgctxt "Special blocktranslate context wrapped in single quotes"', po_contents) def test_template_comments(self): """Template comment tags on the same line of other constructs (#19552)""" # Test detection/end user reporting of old, incorrect templates # translator comments syntax with warnings.catch_warnings(record=True) as ws: warnings.simplefilter('always') management.call_command('makemessages', locale=[LOCALE], extensions=['thtml'], verbosity=0) self.assertEqual(len(ws), 3) for w in ws: self.assertTrue(issubclass(w.category, TranslatorCommentWarning)) self.assertRegex( str(ws[0].message), r"The translator-targeted comment 'Translators: ignored i18n " r"comment #1' \(file templates[/\\]comments.thtml, line 4\) " r"was ignored, because it wasn't the last item on the line\." ) self.assertRegex( str(ws[1].message), r"The translator-targeted comment 'Translators: ignored i18n " r"comment #3' \(file templates[/\\]comments.thtml, line 6\) " r"was ignored, because it wasn't the last item on the line\." ) self.assertRegex( str(ws[2].message), r"The translator-targeted comment 'Translators: ignored i18n " r"comment #4' \(file templates[/\\]comments.thtml, line 8\) " r"was ignored, because it wasn't the last item on the line\." ) # Now test .po file contents self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE) as fp: po_contents = fp.read() self.assertMsgId('Translatable literal #9a', po_contents) self.assertNotIn('ignored comment #1', po_contents) self.assertNotIn('Translators: ignored i18n comment #1', po_contents) self.assertMsgId("Translatable literal #9b", po_contents) self.assertNotIn('ignored i18n comment #2', po_contents) self.assertNotIn('ignored comment #2', po_contents) self.assertMsgId('Translatable literal #9c', po_contents) self.assertNotIn('ignored comment #3', po_contents) self.assertNotIn('ignored i18n comment #3', po_contents) self.assertMsgId('Translatable literal #9d', po_contents) self.assertNotIn('ignored comment #4', po_contents) self.assertMsgId('Translatable literal #9e', po_contents) self.assertNotIn('ignored comment #5', po_contents) self.assertNotIn('ignored i18n comment #4', po_contents) self.assertMsgId('Translatable literal #9f', po_contents) self.assertIn('#. Translators: valid i18n comment #5', po_contents) self.assertMsgId('Translatable literal #9g', po_contents) self.assertIn('#. Translators: valid i18n comment #6', po_contents) self.assertMsgId('Translatable literal #9h', po_contents) self.assertIn('#. Translators: valid i18n comment #7', po_contents) self.assertMsgId('Translatable literal #9i', po_contents) self.assertRegex(po_contents, r'#\..+Translators: valid i18n comment #8') self.assertRegex(po_contents, r'#\..+Translators: valid i18n comment #9') self.assertMsgId("Translatable literal #9j", po_contents) def test_makemessages_find_files(self): """ find_files only discover files having the proper extensions. """ cmd = MakeMessagesCommand() cmd.ignore_patterns = ['CVS', '.*', '*~', '*.pyc'] cmd.symlinks = False cmd.domain = 'django' cmd.extensions = ['html', 'txt', 'py'] cmd.verbosity = 0 cmd.locale_paths = [] cmd.default_locale_path = os.path.join(self.test_dir, 'locale') found_files = cmd.find_files(self.test_dir) found_exts = {os.path.splitext(tfile.file)[1] for tfile in found_files} self.assertEqual(found_exts.difference({'.py', '.html', '.txt'}), set()) cmd.extensions = ['js'] cmd.domain = 'djangojs' found_files = cmd.find_files(self.test_dir) found_exts = {os.path.splitext(tfile.file)[1] for tfile in found_files} self.assertEqual(found_exts.difference({'.js'}), set()) @mock.patch('django.core.management.commands.makemessages.popen_wrapper') def test_makemessages_gettext_version(self, mocked_popen_wrapper): # "Normal" output: mocked_popen_wrapper.return_value = ( "xgettext (GNU gettext-tools) 0.18.1\n" "Copyright (C) 1995-1998, 2000-2010 Free Software Foundation, Inc.\n" "License GPLv3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html>\n" "This is free software: you are free to change and redistribute it.\n" "There is NO WARRANTY, to the extent permitted by law.\n" "Written by Ulrich Drepper.\n", '', 0) cmd = MakeMessagesCommand() self.assertEqual(cmd.gettext_version, (0, 18, 1)) # Version number with only 2 parts (#23788) mocked_popen_wrapper.return_value = ( "xgettext (GNU gettext-tools) 0.17\n", '', 0) cmd = MakeMessagesCommand() self.assertEqual(cmd.gettext_version, (0, 17)) # Bad version output mocked_popen_wrapper.return_value = ( "any other return value\n", '', 0) cmd = MakeMessagesCommand() with self.assertRaisesMessage(CommandError, "Unable to get gettext version. Is it installed?"): cmd.gettext_version def test_po_file_encoding_when_updating(self): """ Update of PO file doesn't corrupt it with non-UTF-8 encoding on Windows (#23271). """ BR_PO_BASE = 'locale/pt_BR/LC_MESSAGES/django' shutil.copyfile(BR_PO_BASE + '.pristine', BR_PO_BASE + '.po') management.call_command('makemessages', locale=['pt_BR'], verbosity=0) self.assertTrue(os.path.exists(BR_PO_BASE + '.po')) with open(BR_PO_BASE + '.po', encoding='utf-8') as fp: po_contents = fp.read() self.assertMsgStr("Größe", po_contents) def test_pot_charset_header_is_utf8(self): """Content-Type: ... charset=CHARSET is replaced with charset=UTF-8""" msgs = ( '# SOME DESCRIPTIVE TITLE.\n' '# (some lines truncated as they are not relevant)\n' '"Content-Type: text/plain; charset=CHARSET\\n"\n' '"Content-Transfer-Encoding: 8bit\\n"\n' '\n' '#: somefile.py:8\n' 'msgid "mañana; charset=CHARSET"\n' 'msgstr ""\n' ) with tempfile.NamedTemporaryFile() as pot_file: pot_filename = pot_file.name write_pot_file(pot_filename, msgs) with open(pot_filename, encoding='utf-8') as fp: pot_contents = fp.read() self.assertIn('Content-Type: text/plain; charset=UTF-8', pot_contents) self.assertIn('mañana; charset=CHARSET', pot_contents) class JavaScriptExtractorTests(ExtractorTests): PO_FILE = 'locale/%s/LC_MESSAGES/djangojs.po' % LOCALE def test_javascript_literals(self): _, po_contents = self._run_makemessages(domain='djangojs') self.assertMsgId('This literal should be included.', po_contents) self.assertMsgId('gettext_noop should, too.', po_contents) self.assertMsgId('This one as well.', po_contents) self.assertMsgId(r'He said, \"hello\".', po_contents) self.assertMsgId("okkkk", po_contents) self.assertMsgId("TEXT", po_contents) self.assertMsgId("It's at http://example.com", po_contents) self.assertMsgId("String", po_contents) self.assertMsgId("/* but this one will be too */ 'cause there is no way of telling...", po_contents) self.assertMsgId("foo", po_contents) self.assertMsgId("bar", po_contents) self.assertMsgId("baz", po_contents) self.assertMsgId("quz", po_contents) self.assertMsgId("foobar", po_contents) def test_media_static_dirs_ignored(self): """ Regression test for #23583. """ with override_settings(STATIC_ROOT=os.path.join(self.test_dir, 'static/'), MEDIA_ROOT=os.path.join(self.test_dir, 'media_root/')): _, po_contents = self._run_makemessages(domain='djangojs') self.assertMsgId("Static content inside app should be included.", po_contents) self.assertNotMsgId("Content from STATIC_ROOT should not be included", po_contents) @override_settings(STATIC_ROOT=None, MEDIA_ROOT='') def test_default_root_settings(self): """ Regression test for #23717. """ _, po_contents = self._run_makemessages(domain='djangojs') self.assertMsgId("Static content inside app should be included.", po_contents) class IgnoredExtractorTests(ExtractorTests): def test_ignore_directory(self): out, po_contents = self._run_makemessages(ignore_patterns=[ os.path.join('ignore_dir', '*'), ]) self.assertIn("ignoring directory ignore_dir", out) self.assertMsgId('This literal should be included.', po_contents) self.assertNotMsgId('This should be ignored.', po_contents) def test_ignore_subdirectory(self): out, po_contents = self._run_makemessages(ignore_patterns=[ 'templates/*/ignore.html', 'templates/subdir/*', ]) self.assertIn("ignoring directory subdir", out) self.assertNotMsgId('This subdir should be ignored too.', po_contents) def test_ignore_file_patterns(self): out, po_contents = self._run_makemessages(ignore_patterns=[ 'xxx_*', ]) self.assertIn("ignoring file xxx_ignored.html", out) self.assertNotMsgId('This should be ignored too.', po_contents) def test_media_static_dirs_ignored(self): with override_settings(STATIC_ROOT=os.path.join(self.test_dir, 'static/'), MEDIA_ROOT=os.path.join(self.test_dir, 'media_root/')): out, _ = self._run_makemessages() self.assertIn("ignoring directory static", out) self.assertIn("ignoring directory media_root", out) class SymlinkExtractorTests(ExtractorTests): def setUp(self): super().setUp() self.symlinked_dir = os.path.join(self.test_dir, 'templates_symlinked') def test_symlink(self): if symlinks_supported(): os.symlink(os.path.join(self.test_dir, 'templates'), self.symlinked_dir) else: self.skipTest("os.symlink() not available on this OS + Python version combination.") management.call_command('makemessages', locale=[LOCALE], verbosity=0, symlinks=True) self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE) as fp: po_contents = fp.read() self.assertMsgId('This literal should be included.', po_contents) self.assertLocationCommentPresent(self.PO_FILE, None, 'templates_symlinked', 'test.html') class CopyPluralFormsExtractorTests(ExtractorTests): PO_FILE_ES = 'locale/es/LC_MESSAGES/django.po' def test_copy_plural_forms(self): management.call_command('makemessages', locale=[LOCALE], verbosity=0) self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE) as fp: po_contents = fp.read() self.assertIn('Plural-Forms: nplurals=2; plural=(n != 1)', po_contents) def test_override_plural_forms(self): """Ticket #20311.""" management.call_command('makemessages', locale=['es'], extensions=['djtpl'], verbosity=0) self.assertTrue(os.path.exists(self.PO_FILE_ES)) with open(self.PO_FILE_ES, encoding='utf-8') as fp: po_contents = fp.read() found = re.findall(r'^(?P<value>"Plural-Forms.+?\\n")\s*$', po_contents, re.MULTILINE | re.DOTALL) self.assertEqual(1, len(found)) def test_translate_and_plural_blocktranslate_collision(self): """ Ensures a correct workaround for the gettext bug when handling a literal found inside a {% translate %} tag and also in another file inside a {% blocktranslate %} with a plural (#17375). """ management.call_command('makemessages', locale=[LOCALE], extensions=['html', 'djtpl'], verbosity=0) self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE) as fp: po_contents = fp.read() self.assertNotIn("#-#-#-#-# django.pot (PACKAGE VERSION) #-#-#-#-#\\n", po_contents) self.assertMsgId('First `translate`, then `blocktranslate` with a plural', po_contents) self.assertMsgIdPlural('Plural for a `translate` and `blocktranslate` collision case', po_contents) class NoWrapExtractorTests(ExtractorTests): def test_no_wrap_enabled(self): management.call_command('makemessages', locale=[LOCALE], verbosity=0, no_wrap=True) self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE) as fp: po_contents = fp.read() self.assertMsgId( 'This literal should also be included wrapped or not wrapped ' 'depending on the use of the --no-wrap option.', po_contents ) def test_no_wrap_disabled(self): management.call_command('makemessages', locale=[LOCALE], verbosity=0, no_wrap=False) self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE) as fp: po_contents = fp.read() self.assertMsgId( '""\n"This literal should also be included wrapped or not ' 'wrapped depending on the "\n"use of the --no-wrap option."', po_contents, use_quotes=False ) class LocationCommentsTests(ExtractorTests): def test_no_location_enabled(self): """Behavior is correct if --no-location switch is specified. See #16903.""" management.call_command('makemessages', locale=[LOCALE], verbosity=0, no_location=True) self.assertTrue(os.path.exists(self.PO_FILE)) self.assertLocationCommentNotPresent(self.PO_FILE, None, 'test.html') def test_no_location_disabled(self): """Behavior is correct if --no-location switch isn't specified.""" management.call_command('makemessages', locale=[LOCALE], verbosity=0, no_location=False) self.assertTrue(os.path.exists(self.PO_FILE)) # #16903 -- Standard comment with source file relative path should be present self.assertLocationCommentPresent(self.PO_FILE, 'Translatable literal #6b', 'templates', 'test.html') def test_location_comments_for_templatized_files(self): """ Ensure no leaky paths in comments, e.g. #: path\to\file.html.py:123 Refs #21209/#26341. """ management.call_command('makemessages', locale=[LOCALE], verbosity=0) self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE) as fp: po_contents = fp.read() self.assertMsgId('#: templates/test.html.py', po_contents) self.assertLocationCommentNotPresent(self.PO_FILE, None, '.html.py') self.assertLocationCommentPresent(self.PO_FILE, 5, 'templates', 'test.html') @requires_gettext_019 def test_add_location_full(self): """makemessages --add-location=full""" management.call_command('makemessages', locale=[LOCALE], verbosity=0, add_location='full') self.assertTrue(os.path.exists(self.PO_FILE)) # Comment with source file relative path and line number is present. self.assertLocationCommentPresent(self.PO_FILE, 'Translatable literal #6b', 'templates', 'test.html') @requires_gettext_019 def test_add_location_file(self): """makemessages --add-location=file""" management.call_command('makemessages', locale=[LOCALE], verbosity=0, add_location='file') self.assertTrue(os.path.exists(self.PO_FILE)) # Comment with source file relative path is present. self.assertLocationCommentPresent(self.PO_FILE, None, 'templates', 'test.html') # But it should not contain the line number. self.assertLocationCommentNotPresent(self.PO_FILE, 'Translatable literal #6b', 'templates', 'test.html') @requires_gettext_019 def test_add_location_never(self): """makemessages --add-location=never""" management.call_command('makemessages', locale=[LOCALE], verbosity=0, add_location='never') self.assertTrue(os.path.exists(self.PO_FILE)) self.assertLocationCommentNotPresent(self.PO_FILE, None, 'test.html') @mock.patch('django.core.management.commands.makemessages.Command.gettext_version', new=(0, 18, 99)) def test_add_location_gettext_version_check(self): """ CommandError is raised when using makemessages --add-location with gettext < 0.19. """ msg = "The --add-location option requires gettext 0.19 or later. You have 0.18.99." with self.assertRaisesMessage(CommandError, msg): management.call_command('makemessages', locale=[LOCALE], verbosity=0, add_location='full') class KeepPotFileExtractorTests(ExtractorTests): POT_FILE = 'locale/django.pot' def test_keep_pot_disabled_by_default(self): management.call_command('makemessages', locale=[LOCALE], verbosity=0) self.assertFalse(os.path.exists(self.POT_FILE)) def test_keep_pot_explicitly_disabled(self): management.call_command('makemessages', locale=[LOCALE], verbosity=0, keep_pot=False) self.assertFalse(os.path.exists(self.POT_FILE)) def test_keep_pot_enabled(self): management.call_command('makemessages', locale=[LOCALE], verbosity=0, keep_pot=True) self.assertTrue(os.path.exists(self.POT_FILE)) class MultipleLocaleExtractionTests(ExtractorTests): PO_FILE_PT = 'locale/pt/LC_MESSAGES/django.po' PO_FILE_DE = 'locale/de/LC_MESSAGES/django.po' PO_FILE_KO = 'locale/ko/LC_MESSAGES/django.po' LOCALES = ['pt', 'de', 'ch'] def test_multiple_locales(self): management.call_command('makemessages', locale=['pt', 'de'], verbosity=0) self.assertTrue(os.path.exists(self.PO_FILE_PT)) self.assertTrue(os.path.exists(self.PO_FILE_DE)) def test_all_locales(self): """ When the `locale` flag is absent, all dirs from the parent locale dir are considered as language directories, except if the directory doesn't start with two letters (which excludes __pycache__, .gitignore, etc.). """ os.mkdir(os.path.join('locale', '_do_not_pick')) # Excluding locales that do not compile management.call_command('makemessages', exclude=['ja', 'es_AR'], verbosity=0) self.assertTrue(os.path.exists(self.PO_FILE_KO)) self.assertFalse(os.path.exists('locale/_do_not_pick/LC_MESSAGES/django.po')) class ExcludedLocaleExtractionTests(ExtractorTests): work_subdir = 'exclude' LOCALES = ['en', 'fr', 'it'] PO_FILE = 'locale/%s/LC_MESSAGES/django.po' def _set_times_for_all_po_files(self): """ Set access and modification times to the Unix epoch time for all the .po files. """ for locale in self.LOCALES: os.utime(self.PO_FILE % locale, (0, 0)) def setUp(self): super().setUp() copytree('canned_locale', 'locale') self._set_times_for_all_po_files() def test_command_help(self): with captured_stdout(), captured_stderr(): # `call_command` bypasses the parser; by calling # `execute_from_command_line` with the help subcommand we # ensure that there are no issues with the parser itself. execute_from_command_line(['django-admin', 'help', 'makemessages']) def test_one_locale_excluded(self): management.call_command('makemessages', exclude=['it'], verbosity=0) self.assertRecentlyModified(self.PO_FILE % 'en') self.assertRecentlyModified(self.PO_FILE % 'fr') self.assertNotRecentlyModified(self.PO_FILE % 'it') def test_multiple_locales_excluded(self): management.call_command('makemessages', exclude=['it', 'fr'], verbosity=0) self.assertRecentlyModified(self.PO_FILE % 'en') self.assertNotRecentlyModified(self.PO_FILE % 'fr') self.assertNotRecentlyModified(self.PO_FILE % 'it') def test_one_locale_excluded_with_locale(self): management.call_command('makemessages', locale=['en', 'fr'], exclude=['fr'], verbosity=0) self.assertRecentlyModified(self.PO_FILE % 'en') self.assertNotRecentlyModified(self.PO_FILE % 'fr') self.assertNotRecentlyModified(self.PO_FILE % 'it') def test_multiple_locales_excluded_with_locale(self): management.call_command('makemessages', locale=['en', 'fr', 'it'], exclude=['fr', 'it'], verbosity=0) self.assertRecentlyModified(self.PO_FILE % 'en') self.assertNotRecentlyModified(self.PO_FILE % 'fr') self.assertNotRecentlyModified(self.PO_FILE % 'it') class CustomLayoutExtractionTests(ExtractorTests): work_subdir = 'project_dir' def test_no_locale_raises(self): msg = ( "Unable to find a locale path to store translations for file " "__init__.py. Make sure the 'locale' directory exist in an app or " "LOCALE_PATHS setting is set." ) with self.assertRaisesMessage(management.CommandError, msg): management.call_command('makemessages', locale=[LOCALE], verbosity=0) def test_project_locale_paths(self): self._test_project_locale_paths(os.path.join(self.test_dir, 'project_locale')) def test_project_locale_paths_pathlib(self): self._test_project_locale_paths(Path(self.test_dir) / 'project_locale') def _test_project_locale_paths(self, locale_path): """ * translations for an app containing a locale folder are stored in that folder * translations outside of that app are in LOCALE_PATHS[0] """ with override_settings(LOCALE_PATHS=[locale_path]): management.call_command('makemessages', locale=[LOCALE], verbosity=0) project_de_locale = os.path.join( self.test_dir, 'project_locale', 'de', 'LC_MESSAGES', 'django.po') app_de_locale = os.path.join( self.test_dir, 'app_with_locale', 'locale', 'de', 'LC_MESSAGES', 'django.po') self.assertTrue(os.path.exists(project_de_locale)) self.assertTrue(os.path.exists(app_de_locale)) with open(project_de_locale) as fp: po_contents = fp.read() self.assertMsgId('This app has no locale directory', po_contents) self.assertMsgId('This is a project-level string', po_contents) with open(app_de_locale) as fp: po_contents = fp.read() self.assertMsgId('This app has a locale directory', po_contents) @skipUnless(has_xgettext, 'xgettext is mandatory for extraction tests') class NoSettingsExtractionTests(AdminScriptTestCase): def test_makemessages_no_settings(self): out, err = self.run_django_admin(['makemessages', '-l', 'en', '-v', '0']) self.assertNoOutput(err) self.assertNoOutput(out)
886421da972d2ba40cecb42dd154f313e9eb200a7af95a09d195c173e3e0f0e6
import datetime import decimal import gettext as gettext_module import os import pickle import re import tempfile from contextlib import contextmanager from importlib import import_module from pathlib import Path from unittest import mock from asgiref.local import Local from django import forms from django.apps import AppConfig from django.conf import settings from django.conf.locale import LANG_INFO from django.conf.urls.i18n import i18n_patterns from django.template import Context, Template from django.test import ( RequestFactory, SimpleTestCase, TestCase, override_settings, ) from django.utils import translation from django.utils.formats import ( date_format, get_format, get_format_modules, iter_format_modules, localize, localize_input, reset_format_cache, sanitize_separators, sanitize_strftime_format, time_format, ) from django.utils.numberformat import format as nformat from django.utils.safestring import SafeString, mark_safe from django.utils.translation import ( activate, check_for_language, deactivate, get_language, get_language_bidi, get_language_from_request, get_language_info, gettext, gettext_lazy, ngettext, ngettext_lazy, npgettext, npgettext_lazy, pgettext, round_away_from_one, to_language, to_locale, trans_null, trans_real, ) from django.utils.translation.reloader import ( translation_file_changed, watch_for_translation_changes, ) from .forms import CompanyForm, I18nForm, SelectDateForm from .models import Company, TestModel here = os.path.dirname(os.path.abspath(__file__)) extended_locale_paths = settings.LOCALE_PATHS + [ os.path.join(here, 'other', 'locale'), ] class AppModuleStub: def __init__(self, **kwargs): self.__dict__.update(kwargs) @contextmanager def patch_formats(lang, **settings): from django.utils.formats import _format_cache # Populate _format_cache with temporary values for key, value in settings.items(): _format_cache[(key, lang)] = value try: yield finally: reset_format_cache() class TranslationTests(SimpleTestCase): @translation.override('fr') def test_plural(self): """ Test plurals with ngettext. French differs from English in that 0 is singular. """ self.assertEqual(ngettext("%d year", "%d years", 0) % 0, "0 année") self.assertEqual(ngettext("%d year", "%d years", 2) % 2, "2 années") self.assertEqual(ngettext("%(size)d byte", "%(size)d bytes", 0) % {'size': 0}, "0 octet") self.assertEqual(ngettext("%(size)d byte", "%(size)d bytes", 2) % {'size': 2}, "2 octets") def test_plural_null(self): g = trans_null.ngettext self.assertEqual(g('%d year', '%d years', 0) % 0, '0 years') self.assertEqual(g('%d year', '%d years', 1) % 1, '1 year') self.assertEqual(g('%d year', '%d years', 2) % 2, '2 years') @override_settings(LOCALE_PATHS=extended_locale_paths) @translation.override('fr') def test_multiple_plurals_per_language(self): """ Normally, French has 2 plurals. As other/locale/fr/LC_MESSAGES/django.po has a different plural equation with 3 plurals, this tests if those plural are honored. """ self.assertEqual(ngettext("%d singular", "%d plural", 0) % 0, "0 pluriel1") self.assertEqual(ngettext("%d singular", "%d plural", 1) % 1, "1 singulier") self.assertEqual(ngettext("%d singular", "%d plural", 2) % 2, "2 pluriel2") french = trans_real.catalog() # Internal _catalog can query subcatalogs (from different po files). self.assertEqual(french._catalog[('%d singular', 0)], '%d singulier') self.assertEqual(french._catalog[('%d hour', 0)], '%d heure') def test_override(self): activate('de') try: with translation.override('pl'): self.assertEqual(get_language(), 'pl') self.assertEqual(get_language(), 'de') with translation.override(None): self.assertIsNone(get_language()) with translation.override('pl'): pass self.assertIsNone(get_language()) self.assertEqual(get_language(), 'de') finally: deactivate() def test_override_decorator(self): @translation.override('pl') def func_pl(): self.assertEqual(get_language(), 'pl') @translation.override(None) def func_none(): self.assertIsNone(get_language()) try: activate('de') func_pl() self.assertEqual(get_language(), 'de') func_none() self.assertEqual(get_language(), 'de') finally: deactivate() def test_override_exit(self): """ The language restored is the one used when the function was called, not the one used when the decorator was initialized (#23381). """ activate('fr') @translation.override('pl') def func_pl(): pass deactivate() try: activate('en') func_pl() self.assertEqual(get_language(), 'en') finally: deactivate() def test_lazy_objects(self): """ Format string interpolation should work with *_lazy objects. """ s = gettext_lazy('Add %(name)s') d = {'name': 'Ringo'} self.assertEqual('Add Ringo', s % d) with translation.override('de', deactivate=True): self.assertEqual('Ringo hinzuf\xfcgen', s % d) with translation.override('pl'): self.assertEqual('Dodaj Ringo', s % d) # It should be possible to compare *_lazy objects. s1 = gettext_lazy('Add %(name)s') self.assertEqual(s, s1) s2 = gettext_lazy('Add %(name)s') s3 = gettext_lazy('Add %(name)s') self.assertEqual(s2, s3) self.assertEqual(s, s2) s4 = gettext_lazy('Some other string') self.assertNotEqual(s, s4) def test_lazy_pickle(self): s1 = gettext_lazy("test") self.assertEqual(str(s1), "test") s2 = pickle.loads(pickle.dumps(s1)) self.assertEqual(str(s2), "test") @override_settings(LOCALE_PATHS=extended_locale_paths) def test_ngettext_lazy(self): simple_with_format = ngettext_lazy('%d good result', '%d good results') simple_context_with_format = npgettext_lazy('Exclamation', '%d good result', '%d good results') simple_without_format = ngettext_lazy('good result', 'good results') with translation.override('de'): self.assertEqual(simple_with_format % 1, '1 gutes Resultat') self.assertEqual(simple_with_format % 4, '4 guten Resultate') self.assertEqual(simple_context_with_format % 1, '1 gutes Resultat!') self.assertEqual(simple_context_with_format % 4, '4 guten Resultate!') self.assertEqual(simple_without_format % 1, 'gutes Resultat') self.assertEqual(simple_without_format % 4, 'guten Resultate') complex_nonlazy = ngettext_lazy('Hi %(name)s, %(num)d good result', 'Hi %(name)s, %(num)d good results', 4) complex_deferred = ngettext_lazy( 'Hi %(name)s, %(num)d good result', 'Hi %(name)s, %(num)d good results', 'num' ) complex_context_nonlazy = npgettext_lazy( 'Greeting', 'Hi %(name)s, %(num)d good result', 'Hi %(name)s, %(num)d good results', 4 ) complex_context_deferred = npgettext_lazy( 'Greeting', 'Hi %(name)s, %(num)d good result', 'Hi %(name)s, %(num)d good results', 'num' ) with translation.override('de'): self.assertEqual(complex_nonlazy % {'num': 4, 'name': 'Jim'}, 'Hallo Jim, 4 guten Resultate') self.assertEqual(complex_deferred % {'name': 'Jim', 'num': 1}, 'Hallo Jim, 1 gutes Resultat') self.assertEqual(complex_deferred % {'name': 'Jim', 'num': 5}, 'Hallo Jim, 5 guten Resultate') with self.assertRaisesMessage(KeyError, 'Your dictionary lacks key'): complex_deferred % {'name': 'Jim'} self.assertEqual(complex_context_nonlazy % {'num': 4, 'name': 'Jim'}, 'Willkommen Jim, 4 guten Resultate') self.assertEqual(complex_context_deferred % {'name': 'Jim', 'num': 1}, 'Willkommen Jim, 1 gutes Resultat') self.assertEqual(complex_context_deferred % {'name': 'Jim', 'num': 5}, 'Willkommen Jim, 5 guten Resultate') with self.assertRaisesMessage(KeyError, 'Your dictionary lacks key'): complex_context_deferred % {'name': 'Jim'} @override_settings(LOCALE_PATHS=extended_locale_paths) def test_ngettext_lazy_format_style(self): simple_with_format = ngettext_lazy('{} good result', '{} good results') simple_context_with_format = npgettext_lazy('Exclamation', '{} good result', '{} good results') with translation.override('de'): self.assertEqual(simple_with_format.format(1), '1 gutes Resultat') self.assertEqual(simple_with_format.format(4), '4 guten Resultate') self.assertEqual(simple_context_with_format.format(1), '1 gutes Resultat!') self.assertEqual(simple_context_with_format.format(4), '4 guten Resultate!') complex_nonlazy = ngettext_lazy('Hi {name}, {num} good result', 'Hi {name}, {num} good results', 4) complex_deferred = ngettext_lazy( 'Hi {name}, {num} good result', 'Hi {name}, {num} good results', 'num' ) complex_context_nonlazy = npgettext_lazy( 'Greeting', 'Hi {name}, {num} good result', 'Hi {name}, {num} good results', 4 ) complex_context_deferred = npgettext_lazy( 'Greeting', 'Hi {name}, {num} good result', 'Hi {name}, {num} good results', 'num' ) with translation.override('de'): self.assertEqual(complex_nonlazy.format(num=4, name='Jim'), 'Hallo Jim, 4 guten Resultate') self.assertEqual(complex_deferred.format(name='Jim', num=1), 'Hallo Jim, 1 gutes Resultat') self.assertEqual(complex_deferred.format(name='Jim', num=5), 'Hallo Jim, 5 guten Resultate') with self.assertRaisesMessage(KeyError, 'Your dictionary lacks key'): complex_deferred.format(name='Jim') self.assertEqual(complex_context_nonlazy.format(num=4, name='Jim'), 'Willkommen Jim, 4 guten Resultate') self.assertEqual(complex_context_deferred.format(name='Jim', num=1), 'Willkommen Jim, 1 gutes Resultat') self.assertEqual(complex_context_deferred.format(name='Jim', num=5), 'Willkommen Jim, 5 guten Resultate') with self.assertRaisesMessage(KeyError, 'Your dictionary lacks key'): complex_context_deferred.format(name='Jim') def test_ngettext_lazy_bool(self): self.assertTrue(ngettext_lazy('%d good result', '%d good results')) self.assertFalse(ngettext_lazy('', '')) def test_ngettext_lazy_pickle(self): s1 = ngettext_lazy('%d good result', '%d good results') self.assertEqual(s1 % 1, '1 good result') self.assertEqual(s1 % 8, '8 good results') s2 = pickle.loads(pickle.dumps(s1)) self.assertEqual(s2 % 1, '1 good result') self.assertEqual(s2 % 8, '8 good results') @override_settings(LOCALE_PATHS=extended_locale_paths) def test_pgettext(self): trans_real._active = Local() trans_real._translations = {} with translation.override('de'): self.assertEqual(pgettext("unexisting", "May"), "May") self.assertEqual(pgettext("month name", "May"), "Mai") self.assertEqual(pgettext("verb", "May"), "Kann") self.assertEqual(npgettext("search", "%d result", "%d results", 4) % 4, "4 Resultate") def test_empty_value(self): """Empty value must stay empty after being translated (#23196).""" with translation.override('de'): self.assertEqual('', gettext('')) s = mark_safe('') self.assertEqual(s, gettext(s)) @override_settings(LOCALE_PATHS=extended_locale_paths) def test_safe_status(self): """ Translating a string requiring no auto-escaping with gettext or pgettext shouldn't change the "safe" status. """ trans_real._active = Local() trans_real._translations = {} s1 = mark_safe('Password') s2 = mark_safe('May') with translation.override('de', deactivate=True): self.assertIs(type(gettext(s1)), SafeString) self.assertIs(type(pgettext('month name', s2)), SafeString) self.assertEqual('aPassword', SafeString('a') + s1) self.assertEqual('Passworda', s1 + SafeString('a')) self.assertEqual('Passworda', s1 + mark_safe('a')) self.assertEqual('aPassword', mark_safe('a') + s1) self.assertEqual('as', mark_safe('a') + mark_safe('s')) def test_maclines(self): """ Translations on files with Mac or DOS end of lines will be converted to unix EOF in .po catalogs. """ ca_translation = trans_real.translation('ca') ca_translation._catalog['Mac\nEOF\n'] = 'Catalan Mac\nEOF\n' ca_translation._catalog['Win\nEOF\n'] = 'Catalan Win\nEOF\n' with translation.override('ca', deactivate=True): self.assertEqual('Catalan Mac\nEOF\n', gettext('Mac\rEOF\r')) self.assertEqual('Catalan Win\nEOF\n', gettext('Win\r\nEOF\r\n')) def test_to_locale(self): tests = ( ('en', 'en'), ('EN', 'en'), ('en-us', 'en_US'), ('EN-US', 'en_US'), ('en_US', 'en_US'), # With > 2 characters after the dash. ('sr-latn', 'sr_Latn'), ('sr-LATN', 'sr_Latn'), ('sr_Latn', 'sr_Latn'), # 3-char language codes. ('ber-MA', 'ber_MA'), ('BER-MA', 'ber_MA'), ('BER_MA', 'ber_MA'), ('ber_MA', 'ber_MA'), # With private use subtag (x-informal). ('nl-nl-x-informal', 'nl_NL-x-informal'), ('NL-NL-X-INFORMAL', 'nl_NL-x-informal'), ('sr-latn-x-informal', 'sr_Latn-x-informal'), ('SR-LATN-X-INFORMAL', 'sr_Latn-x-informal'), ) for lang, locale in tests: with self.subTest(lang=lang): self.assertEqual(to_locale(lang), locale) def test_to_language(self): self.assertEqual(to_language('en_US'), 'en-us') self.assertEqual(to_language('sr_Lat'), 'sr-lat') def test_language_bidi(self): self.assertIs(get_language_bidi(), False) with translation.override(None): self.assertIs(get_language_bidi(), False) def test_language_bidi_null(self): self.assertIs(trans_null.get_language_bidi(), False) with override_settings(LANGUAGE_CODE='he'): self.assertIs(get_language_bidi(), True) class TranslationLoadingTests(SimpleTestCase): def setUp(self): """Clear translation state.""" self._old_language = get_language() self._old_translations = trans_real._translations deactivate() trans_real._translations = {} def tearDown(self): trans_real._translations = self._old_translations activate(self._old_language) @override_settings( USE_I18N=True, LANGUAGE_CODE='en', LANGUAGES=[ ('en', 'English'), ('en-ca', 'English (Canada)'), ('en-nz', 'English (New Zealand)'), ('en-au', 'English (Australia)'), ], LOCALE_PATHS=[os.path.join(here, 'loading')], INSTALLED_APPS=['i18n.loading_app'], ) def test_translation_loading(self): """ "loading_app" does not have translations for all languages provided by "loading". Catalogs are merged correctly. """ tests = [ ('en', 'local country person'), ('en_AU', 'aussie'), ('en_NZ', 'kiwi'), ('en_CA', 'canuck'), ] # Load all relevant translations. for language, _ in tests: activate(language) # Catalogs are merged correctly. for language, nickname in tests: with self.subTest(language=language): activate(language) self.assertEqual(gettext('local country person'), nickname) class TranslationThreadSafetyTests(SimpleTestCase): def setUp(self): self._old_language = get_language() self._translations = trans_real._translations # here we rely on .split() being called inside the _fetch() # in trans_real.translation() class sideeffect_str(str): def split(self, *args, **kwargs): res = str.split(self, *args, **kwargs) trans_real._translations['en-YY'] = None return res trans_real._translations = {sideeffect_str('en-XX'): None} def tearDown(self): trans_real._translations = self._translations activate(self._old_language) def test_bug14894_translation_activate_thread_safety(self): translation_count = len(trans_real._translations) # May raise RuntimeError if translation.activate() isn't thread-safe. translation.activate('pl') # make sure sideeffect_str actually added a new translation self.assertLess(translation_count, len(trans_real._translations)) @override_settings(USE_L10N=True) class FormattingTests(SimpleTestCase): def setUp(self): super().setUp() self.n = decimal.Decimal('66666.666') self.f = 99999.999 self.d = datetime.date(2009, 12, 31) self.dt = datetime.datetime(2009, 12, 31, 20, 50) self.t = datetime.time(10, 15, 48) self.long = 10000 self.ctxt = Context({ 'n': self.n, 't': self.t, 'd': self.d, 'dt': self.dt, 'f': self.f, 'l': self.long, }) def test_all_format_strings(self): all_locales = LANG_INFO.keys() some_date = datetime.date(2017, 10, 14) some_datetime = datetime.datetime(2017, 10, 14, 10, 23) for locale in all_locales: with self.subTest(locale=locale), translation.override(locale): self.assertIn('2017', date_format(some_date)) # Uses DATE_FORMAT by default self.assertIn('23', time_format(some_datetime)) # Uses TIME_FORMAT by default self.assertIn('2017', date_format(some_datetime, format=get_format('DATETIME_FORMAT'))) self.assertIn('2017', date_format(some_date, format=get_format('YEAR_MONTH_FORMAT'))) self.assertIn('14', date_format(some_date, format=get_format('MONTH_DAY_FORMAT'))) self.assertIn('2017', date_format(some_date, format=get_format('SHORT_DATE_FORMAT'))) self.assertIn('2017', date_format(some_datetime, format=get_format('SHORT_DATETIME_FORMAT'))) def test_locale_independent(self): """ Localization of numbers """ with self.settings(USE_THOUSAND_SEPARATOR=False): self.assertEqual('66666.66', nformat(self.n, decimal_sep='.', decimal_pos=2, grouping=3, thousand_sep=',')) self.assertEqual('66666A6', nformat(self.n, decimal_sep='A', decimal_pos=1, grouping=1, thousand_sep='B')) self.assertEqual('66666', nformat(self.n, decimal_sep='X', decimal_pos=0, grouping=1, thousand_sep='Y')) with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual( '66,666.66', nformat(self.n, decimal_sep='.', decimal_pos=2, grouping=3, thousand_sep=',') ) self.assertEqual( '6B6B6B6B6A6', nformat(self.n, decimal_sep='A', decimal_pos=1, grouping=1, thousand_sep='B') ) self.assertEqual('-66666.6', nformat(-66666.666, decimal_sep='.', decimal_pos=1)) self.assertEqual('-66666.0', nformat(int('-66666'), decimal_sep='.', decimal_pos=1)) self.assertEqual('10000.0', nformat(self.long, decimal_sep='.', decimal_pos=1)) self.assertEqual( '10,00,00,000.00', nformat(100000000.00, decimal_sep='.', decimal_pos=2, grouping=(3, 2, 0), thousand_sep=',') ) self.assertEqual( '1,0,00,000,0000.00', nformat(10000000000.00, decimal_sep='.', decimal_pos=2, grouping=(4, 3, 2, 1, 0), thousand_sep=',') ) self.assertEqual( '10000,00,000.00', nformat(1000000000.00, decimal_sep='.', decimal_pos=2, grouping=(3, 2, -1), thousand_sep=',') ) # This unusual grouping/force_grouping combination may be triggered by the intcomma filter (#17414) self.assertEqual( '10000', nformat(self.long, decimal_sep='.', decimal_pos=0, grouping=0, force_grouping=True) ) # date filter self.assertEqual('31.12.2009 в 20:50', Template('{{ dt|date:"d.m.Y в H:i" }}').render(self.ctxt)) self.assertEqual('⌚ 10:15', Template('{{ t|time:"⌚ H:i" }}').render(self.ctxt)) @override_settings(USE_L10N=False) def test_l10n_disabled(self): """ Catalan locale with format i18n disabled translations will be used, but not formats """ with translation.override('ca', deactivate=True): self.maxDiff = 3000 self.assertEqual('N j, Y', get_format('DATE_FORMAT')) self.assertEqual(0, get_format('FIRST_DAY_OF_WEEK')) self.assertEqual('.', get_format('DECIMAL_SEPARATOR')) self.assertEqual('10:15 a.m.', time_format(self.t)) self.assertEqual('Des. 31, 2009', date_format(self.d)) self.assertEqual('desembre 2009', date_format(self.d, 'YEAR_MONTH_FORMAT')) self.assertEqual('12/31/2009 8:50 p.m.', date_format(self.dt, 'SHORT_DATETIME_FORMAT')) self.assertEqual('No localizable', localize('No localizable')) self.assertEqual('66666.666', localize(self.n)) self.assertEqual('99999.999', localize(self.f)) self.assertEqual('10000', localize(self.long)) self.assertEqual('Des. 31, 2009', localize(self.d)) self.assertEqual('Des. 31, 2009, 8:50 p.m.', localize(self.dt)) self.assertEqual('66666.666', Template('{{ n }}').render(self.ctxt)) self.assertEqual('99999.999', Template('{{ f }}').render(self.ctxt)) self.assertEqual('Des. 31, 2009', Template('{{ d }}').render(self.ctxt)) self.assertEqual('Des. 31, 2009, 8:50 p.m.', Template('{{ dt }}').render(self.ctxt)) self.assertEqual('66666.67', Template('{{ n|floatformat:2 }}').render(self.ctxt)) self.assertEqual('100000.0', Template('{{ f|floatformat }}').render(self.ctxt)) self.assertEqual( '66666.67', Template('{{ n|floatformat:"2g" }}').render(self.ctxt), ) self.assertEqual( '100000.0', Template('{{ f|floatformat:"g" }}').render(self.ctxt), ) self.assertEqual('10:15 a.m.', Template('{{ t|time:"TIME_FORMAT" }}').render(self.ctxt)) self.assertEqual('12/31/2009', Template('{{ d|date:"SHORT_DATE_FORMAT" }}').render(self.ctxt)) self.assertEqual( '12/31/2009 8:50 p.m.', Template('{{ dt|date:"SHORT_DATETIME_FORMAT" }}').render(self.ctxt) ) form = I18nForm({ 'decimal_field': '66666,666', 'float_field': '99999,999', 'date_field': '31/12/2009', 'datetime_field': '31/12/2009 20:50', 'time_field': '20:50', 'integer_field': '1.234', }) self.assertFalse(form.is_valid()) self.assertEqual(['Introdu\xefu un n\xfamero.'], form.errors['float_field']) self.assertEqual(['Introdu\xefu un n\xfamero.'], form.errors['decimal_field']) self.assertEqual(['Introdu\xefu una data v\xe0lida.'], form.errors['date_field']) self.assertEqual(['Introdu\xefu una data/hora v\xe0lides.'], form.errors['datetime_field']) self.assertEqual(['Introdu\xefu un n\xfamero enter.'], form.errors['integer_field']) form2 = SelectDateForm({ 'date_field_month': '12', 'date_field_day': '31', 'date_field_year': '2009' }) self.assertTrue(form2.is_valid()) self.assertEqual(datetime.date(2009, 12, 31), form2.cleaned_data['date_field']) self.assertHTMLEqual( '<select name="mydate_month" id="id_mydate_month">' '<option value="">---</option>' '<option value="1">gener</option>' '<option value="2">febrer</option>' '<option value="3">mar\xe7</option>' '<option value="4">abril</option>' '<option value="5">maig</option>' '<option value="6">juny</option>' '<option value="7">juliol</option>' '<option value="8">agost</option>' '<option value="9">setembre</option>' '<option value="10">octubre</option>' '<option value="11">novembre</option>' '<option value="12" selected>desembre</option>' '</select>' '<select name="mydate_day" id="id_mydate_day">' '<option value="">---</option>' '<option value="1">1</option>' '<option value="2">2</option>' '<option value="3">3</option>' '<option value="4">4</option>' '<option value="5">5</option>' '<option value="6">6</option>' '<option value="7">7</option>' '<option value="8">8</option>' '<option value="9">9</option>' '<option value="10">10</option>' '<option value="11">11</option>' '<option value="12">12</option>' '<option value="13">13</option>' '<option value="14">14</option>' '<option value="15">15</option>' '<option value="16">16</option>' '<option value="17">17</option>' '<option value="18">18</option>' '<option value="19">19</option>' '<option value="20">20</option>' '<option value="21">21</option>' '<option value="22">22</option>' '<option value="23">23</option>' '<option value="24">24</option>' '<option value="25">25</option>' '<option value="26">26</option>' '<option value="27">27</option>' '<option value="28">28</option>' '<option value="29">29</option>' '<option value="30">30</option>' '<option value="31" selected>31</option>' '</select>' '<select name="mydate_year" id="id_mydate_year">' '<option value="">---</option>' '<option value="2009" selected>2009</option>' '<option value="2010">2010</option>' '<option value="2011">2011</option>' '<option value="2012">2012</option>' '<option value="2013">2013</option>' '<option value="2014">2014</option>' '<option value="2015">2015</option>' '<option value="2016">2016</option>' '<option value="2017">2017</option>' '<option value="2018">2018</option>' '</select>', forms.SelectDateWidget(years=range(2009, 2019)).render('mydate', datetime.date(2009, 12, 31)) ) # We shouldn't change the behavior of the floatformat filter re: # thousand separator and grouping when USE_L10N is False even # if the USE_THOUSAND_SEPARATOR, NUMBER_GROUPING and # THOUSAND_SEPARATOR settings are specified with self.settings(USE_THOUSAND_SEPARATOR=True, NUMBER_GROUPING=1, THOUSAND_SEPARATOR='!'): self.assertEqual('66666.67', Template('{{ n|floatformat:2 }}').render(self.ctxt)) self.assertEqual('100000.0', Template('{{ f|floatformat }}').render(self.ctxt)) def test_false_like_locale_formats(self): """ The active locale's formats take precedence over the default settings even if they would be interpreted as False in a conditional test (e.g. 0 or empty string) (#16938). """ with translation.override('fr'): with self.settings(USE_THOUSAND_SEPARATOR=True, THOUSAND_SEPARATOR='!'): self.assertEqual('\xa0', get_format('THOUSAND_SEPARATOR')) # Even a second time (after the format has been cached)... self.assertEqual('\xa0', get_format('THOUSAND_SEPARATOR')) with self.settings(FIRST_DAY_OF_WEEK=0): self.assertEqual(1, get_format('FIRST_DAY_OF_WEEK')) # Even a second time (after the format has been cached)... self.assertEqual(1, get_format('FIRST_DAY_OF_WEEK')) def test_l10n_enabled(self): self.maxDiff = 3000 # Catalan locale with translation.override('ca', deactivate=True): self.assertEqual(r'j \d\e F \d\e Y', get_format('DATE_FORMAT')) self.assertEqual(1, get_format('FIRST_DAY_OF_WEEK')) self.assertEqual(',', get_format('DECIMAL_SEPARATOR')) self.assertEqual('10:15', time_format(self.t)) self.assertEqual('31 de desembre de 2009', date_format(self.d)) self.assertEqual('desembre del 2009', date_format(self.d, 'YEAR_MONTH_FORMAT')) self.assertEqual('31/12/2009 20:50', date_format(self.dt, 'SHORT_DATETIME_FORMAT')) self.assertEqual('No localizable', localize('No localizable')) with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual('66.666,666', localize(self.n)) self.assertEqual('99.999,999', localize(self.f)) self.assertEqual('10.000', localize(self.long)) self.assertEqual('True', localize(True)) with self.settings(USE_THOUSAND_SEPARATOR=False): self.assertEqual('66666,666', localize(self.n)) self.assertEqual('99999,999', localize(self.f)) self.assertEqual('10000', localize(self.long)) self.assertEqual('31 de desembre de 2009', localize(self.d)) self.assertEqual('31 de desembre de 2009 a les 20:50', localize(self.dt)) with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual('66.666,666', Template('{{ n }}').render(self.ctxt)) self.assertEqual('99.999,999', Template('{{ f }}').render(self.ctxt)) self.assertEqual('10.000', Template('{{ l }}').render(self.ctxt)) with self.settings(USE_THOUSAND_SEPARATOR=True): form3 = I18nForm({ 'decimal_field': '66.666,666', 'float_field': '99.999,999', 'date_field': '31/12/2009', 'datetime_field': '31/12/2009 20:50', 'time_field': '20:50', 'integer_field': '1.234', }) self.assertTrue(form3.is_valid()) self.assertEqual(decimal.Decimal('66666.666'), form3.cleaned_data['decimal_field']) self.assertEqual(99999.999, form3.cleaned_data['float_field']) self.assertEqual(datetime.date(2009, 12, 31), form3.cleaned_data['date_field']) self.assertEqual(datetime.datetime(2009, 12, 31, 20, 50), form3.cleaned_data['datetime_field']) self.assertEqual(datetime.time(20, 50), form3.cleaned_data['time_field']) self.assertEqual(1234, form3.cleaned_data['integer_field']) with self.settings(USE_THOUSAND_SEPARATOR=False): self.assertEqual('66666,666', Template('{{ n }}').render(self.ctxt)) self.assertEqual('99999,999', Template('{{ f }}').render(self.ctxt)) self.assertEqual('31 de desembre de 2009', Template('{{ d }}').render(self.ctxt)) self.assertEqual('31 de desembre de 2009 a les 20:50', Template('{{ dt }}').render(self.ctxt)) self.assertEqual('66666,67', Template('{{ n|floatformat:2 }}').render(self.ctxt)) self.assertEqual('100000,0', Template('{{ f|floatformat }}').render(self.ctxt)) self.assertEqual( '66.666,67', Template('{{ n|floatformat:"2g" }}').render(self.ctxt), ) self.assertEqual( '100.000,0', Template('{{ f|floatformat:"g" }}').render(self.ctxt), ) self.assertEqual('10:15', Template('{{ t|time:"TIME_FORMAT" }}').render(self.ctxt)) self.assertEqual('31/12/2009', Template('{{ d|date:"SHORT_DATE_FORMAT" }}').render(self.ctxt)) self.assertEqual( '31/12/2009 20:50', Template('{{ dt|date:"SHORT_DATETIME_FORMAT" }}').render(self.ctxt) ) self.assertEqual(date_format(datetime.datetime.now(), "DATE_FORMAT"), Template('{% now "DATE_FORMAT" %}').render(self.ctxt)) with self.settings(USE_THOUSAND_SEPARATOR=False): form4 = I18nForm({ 'decimal_field': '66666,666', 'float_field': '99999,999', 'date_field': '31/12/2009', 'datetime_field': '31/12/2009 20:50', 'time_field': '20:50', 'integer_field': '1234', }) self.assertTrue(form4.is_valid()) self.assertEqual(decimal.Decimal('66666.666'), form4.cleaned_data['decimal_field']) self.assertEqual(99999.999, form4.cleaned_data['float_field']) self.assertEqual(datetime.date(2009, 12, 31), form4.cleaned_data['date_field']) self.assertEqual(datetime.datetime(2009, 12, 31, 20, 50), form4.cleaned_data['datetime_field']) self.assertEqual(datetime.time(20, 50), form4.cleaned_data['time_field']) self.assertEqual(1234, form4.cleaned_data['integer_field']) form5 = SelectDateForm({ 'date_field_month': '12', 'date_field_day': '31', 'date_field_year': '2009' }) self.assertTrue(form5.is_valid()) self.assertEqual(datetime.date(2009, 12, 31), form5.cleaned_data['date_field']) self.assertHTMLEqual( '<select name="mydate_day" id="id_mydate_day">' '<option value="">---</option>' '<option value="1">1</option>' '<option value="2">2</option>' '<option value="3">3</option>' '<option value="4">4</option>' '<option value="5">5</option>' '<option value="6">6</option>' '<option value="7">7</option>' '<option value="8">8</option>' '<option value="9">9</option>' '<option value="10">10</option>' '<option value="11">11</option>' '<option value="12">12</option>' '<option value="13">13</option>' '<option value="14">14</option>' '<option value="15">15</option>' '<option value="16">16</option>' '<option value="17">17</option>' '<option value="18">18</option>' '<option value="19">19</option>' '<option value="20">20</option>' '<option value="21">21</option>' '<option value="22">22</option>' '<option value="23">23</option>' '<option value="24">24</option>' '<option value="25">25</option>' '<option value="26">26</option>' '<option value="27">27</option>' '<option value="28">28</option>' '<option value="29">29</option>' '<option value="30">30</option>' '<option value="31" selected>31</option>' '</select>' '<select name="mydate_month" id="id_mydate_month">' '<option value="">---</option>' '<option value="1">gener</option>' '<option value="2">febrer</option>' '<option value="3">mar\xe7</option>' '<option value="4">abril</option>' '<option value="5">maig</option>' '<option value="6">juny</option>' '<option value="7">juliol</option>' '<option value="8">agost</option>' '<option value="9">setembre</option>' '<option value="10">octubre</option>' '<option value="11">novembre</option>' '<option value="12" selected>desembre</option>' '</select>' '<select name="mydate_year" id="id_mydate_year">' '<option value="">---</option>' '<option value="2009" selected>2009</option>' '<option value="2010">2010</option>' '<option value="2011">2011</option>' '<option value="2012">2012</option>' '<option value="2013">2013</option>' '<option value="2014">2014</option>' '<option value="2015">2015</option>' '<option value="2016">2016</option>' '<option value="2017">2017</option>' '<option value="2018">2018</option>' '</select>', forms.SelectDateWidget(years=range(2009, 2019)).render('mydate', datetime.date(2009, 12, 31)) ) # Russian locale (with E as month) with translation.override('ru', deactivate=True): self.assertHTMLEqual( '<select name="mydate_day" id="id_mydate_day">' '<option value="">---</option>' '<option value="1">1</option>' '<option value="2">2</option>' '<option value="3">3</option>' '<option value="4">4</option>' '<option value="5">5</option>' '<option value="6">6</option>' '<option value="7">7</option>' '<option value="8">8</option>' '<option value="9">9</option>' '<option value="10">10</option>' '<option value="11">11</option>' '<option value="12">12</option>' '<option value="13">13</option>' '<option value="14">14</option>' '<option value="15">15</option>' '<option value="16">16</option>' '<option value="17">17</option>' '<option value="18">18</option>' '<option value="19">19</option>' '<option value="20">20</option>' '<option value="21">21</option>' '<option value="22">22</option>' '<option value="23">23</option>' '<option value="24">24</option>' '<option value="25">25</option>' '<option value="26">26</option>' '<option value="27">27</option>' '<option value="28">28</option>' '<option value="29">29</option>' '<option value="30">30</option>' '<option value="31" selected>31</option>' '</select>' '<select name="mydate_month" id="id_mydate_month">' '<option value="">---</option>' '<option value="1">\u042f\u043d\u0432\u0430\u0440\u044c</option>' '<option value="2">\u0424\u0435\u0432\u0440\u0430\u043b\u044c</option>' '<option value="3">\u041c\u0430\u0440\u0442</option>' '<option value="4">\u0410\u043f\u0440\u0435\u043b\u044c</option>' '<option value="5">\u041c\u0430\u0439</option>' '<option value="6">\u0418\u044e\u043d\u044c</option>' '<option value="7">\u0418\u044e\u043b\u044c</option>' '<option value="8">\u0410\u0432\u0433\u0443\u0441\u0442</option>' '<option value="9">\u0421\u0435\u043d\u0442\u044f\u0431\u0440\u044c</option>' '<option value="10">\u041e\u043a\u0442\u044f\u0431\u0440\u044c</option>' '<option value="11">\u041d\u043e\u044f\u0431\u0440\u044c</option>' '<option value="12" selected>\u0414\u0435\u043a\u0430\u0431\u0440\u044c</option>' '</select>' '<select name="mydate_year" id="id_mydate_year">' '<option value="">---</option>' '<option value="2009" selected>2009</option>' '<option value="2010">2010</option>' '<option value="2011">2011</option>' '<option value="2012">2012</option>' '<option value="2013">2013</option>' '<option value="2014">2014</option>' '<option value="2015">2015</option>' '<option value="2016">2016</option>' '<option value="2017">2017</option>' '<option value="2018">2018</option>' '</select>', forms.SelectDateWidget(years=range(2009, 2019)).render('mydate', datetime.date(2009, 12, 31)) ) # English locale with translation.override('en', deactivate=True): self.assertEqual('N j, Y', get_format('DATE_FORMAT')) self.assertEqual(0, get_format('FIRST_DAY_OF_WEEK')) self.assertEqual('.', get_format('DECIMAL_SEPARATOR')) self.assertEqual('Dec. 31, 2009', date_format(self.d)) self.assertEqual('December 2009', date_format(self.d, 'YEAR_MONTH_FORMAT')) self.assertEqual('12/31/2009 8:50 p.m.', date_format(self.dt, 'SHORT_DATETIME_FORMAT')) self.assertEqual('No localizable', localize('No localizable')) with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual('66,666.666', localize(self.n)) self.assertEqual('99,999.999', localize(self.f)) self.assertEqual('10,000', localize(self.long)) with self.settings(USE_THOUSAND_SEPARATOR=False): self.assertEqual('66666.666', localize(self.n)) self.assertEqual('99999.999', localize(self.f)) self.assertEqual('10000', localize(self.long)) self.assertEqual('Dec. 31, 2009', localize(self.d)) self.assertEqual('Dec. 31, 2009, 8:50 p.m.', localize(self.dt)) with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual('66,666.666', Template('{{ n }}').render(self.ctxt)) self.assertEqual('99,999.999', Template('{{ f }}').render(self.ctxt)) self.assertEqual('10,000', Template('{{ l }}').render(self.ctxt)) with self.settings(USE_THOUSAND_SEPARATOR=False): self.assertEqual('66666.666', Template('{{ n }}').render(self.ctxt)) self.assertEqual('99999.999', Template('{{ f }}').render(self.ctxt)) self.assertEqual('Dec. 31, 2009', Template('{{ d }}').render(self.ctxt)) self.assertEqual('Dec. 31, 2009, 8:50 p.m.', Template('{{ dt }}').render(self.ctxt)) self.assertEqual('66666.67', Template('{{ n|floatformat:2 }}').render(self.ctxt)) self.assertEqual('100000.0', Template('{{ f|floatformat }}').render(self.ctxt)) self.assertEqual( '66,666.67', Template('{{ n|floatformat:"2g" }}').render(self.ctxt), ) self.assertEqual( '100,000.0', Template('{{ f|floatformat:"g" }}').render(self.ctxt), ) self.assertEqual('12/31/2009', Template('{{ d|date:"SHORT_DATE_FORMAT" }}').render(self.ctxt)) self.assertEqual( '12/31/2009 8:50 p.m.', Template('{{ dt|date:"SHORT_DATETIME_FORMAT" }}').render(self.ctxt) ) form5 = I18nForm({ 'decimal_field': '66666.666', 'float_field': '99999.999', 'date_field': '12/31/2009', 'datetime_field': '12/31/2009 20:50', 'time_field': '20:50', 'integer_field': '1234', }) self.assertTrue(form5.is_valid()) self.assertEqual(decimal.Decimal('66666.666'), form5.cleaned_data['decimal_field']) self.assertEqual(99999.999, form5.cleaned_data['float_field']) self.assertEqual(datetime.date(2009, 12, 31), form5.cleaned_data['date_field']) self.assertEqual(datetime.datetime(2009, 12, 31, 20, 50), form5.cleaned_data['datetime_field']) self.assertEqual(datetime.time(20, 50), form5.cleaned_data['time_field']) self.assertEqual(1234, form5.cleaned_data['integer_field']) form6 = SelectDateForm({ 'date_field_month': '12', 'date_field_day': '31', 'date_field_year': '2009' }) self.assertTrue(form6.is_valid()) self.assertEqual(datetime.date(2009, 12, 31), form6.cleaned_data['date_field']) self.assertHTMLEqual( '<select name="mydate_month" id="id_mydate_month">' '<option value="">---</option>' '<option value="1">January</option>' '<option value="2">February</option>' '<option value="3">March</option>' '<option value="4">April</option>' '<option value="5">May</option>' '<option value="6">June</option>' '<option value="7">July</option>' '<option value="8">August</option>' '<option value="9">September</option>' '<option value="10">October</option>' '<option value="11">November</option>' '<option value="12" selected>December</option>' '</select>' '<select name="mydate_day" id="id_mydate_day">' '<option value="">---</option>' '<option value="1">1</option>' '<option value="2">2</option>' '<option value="3">3</option>' '<option value="4">4</option>' '<option value="5">5</option>' '<option value="6">6</option>' '<option value="7">7</option>' '<option value="8">8</option>' '<option value="9">9</option>' '<option value="10">10</option>' '<option value="11">11</option>' '<option value="12">12</option>' '<option value="13">13</option>' '<option value="14">14</option>' '<option value="15">15</option>' '<option value="16">16</option>' '<option value="17">17</option>' '<option value="18">18</option>' '<option value="19">19</option>' '<option value="20">20</option>' '<option value="21">21</option>' '<option value="22">22</option>' '<option value="23">23</option>' '<option value="24">24</option>' '<option value="25">25</option>' '<option value="26">26</option>' '<option value="27">27</option>' '<option value="28">28</option>' '<option value="29">29</option>' '<option value="30">30</option>' '<option value="31" selected>31</option>' '</select>' '<select name="mydate_year" id="id_mydate_year">' '<option value="">---</option>' '<option value="2009" selected>2009</option>' '<option value="2010">2010</option>' '<option value="2011">2011</option>' '<option value="2012">2012</option>' '<option value="2013">2013</option>' '<option value="2014">2014</option>' '<option value="2015">2015</option>' '<option value="2016">2016</option>' '<option value="2017">2017</option>' '<option value="2018">2018</option>' '</select>', forms.SelectDateWidget(years=range(2009, 2019)).render('mydate', datetime.date(2009, 12, 31)) ) def test_sub_locales(self): """ Check if sublocales fall back to the main locale """ with self.settings(USE_THOUSAND_SEPARATOR=True): with translation.override('de-at', deactivate=True): self.assertEqual('66.666,666', Template('{{ n }}').render(self.ctxt)) with translation.override('es-us', deactivate=True): self.assertEqual('31 de Diciembre de 2009', date_format(self.d)) def test_localized_input(self): """ Tests if form input is correctly localized """ self.maxDiff = 1200 with translation.override('de-at', deactivate=True): form6 = CompanyForm({ 'name': 'acme', 'date_added': datetime.datetime(2009, 12, 31, 6, 0, 0), 'cents_paid': decimal.Decimal('59.47'), 'products_delivered': 12000, }) self.assertTrue(form6.is_valid()) self.assertHTMLEqual( form6.as_ul(), '<li><label for="id_name">Name:</label>' '<input id="id_name" type="text" name="name" value="acme" maxlength="50" required></li>' '<li><label for="id_date_added">Date added:</label>' '<input type="text" name="date_added" value="31.12.2009 06:00:00" id="id_date_added" required></li>' '<li><label for="id_cents_paid">Cents paid:</label>' '<input type="text" name="cents_paid" value="59,47" id="id_cents_paid" required></li>' '<li><label for="id_products_delivered">Products delivered:</label>' '<input type="text" name="products_delivered" value="12000" id="id_products_delivered" required>' '</li>' ) self.assertEqual(localize_input(datetime.datetime(2009, 12, 31, 6, 0, 0)), '31.12.2009 06:00:00') self.assertEqual(datetime.datetime(2009, 12, 31, 6, 0, 0), form6.cleaned_data['date_added']) with self.settings(USE_THOUSAND_SEPARATOR=True): # Checking for the localized "products_delivered" field self.assertInHTML( '<input type="text" name="products_delivered" ' 'value="12.000" id="id_products_delivered" required>', form6.as_ul() ) def test_localized_input_func(self): tests = ( (True, 'True'), (datetime.date(1, 1, 1), '0001-01-01'), (datetime.datetime(1, 1, 1), '0001-01-01 00:00:00'), ) with self.settings(USE_THOUSAND_SEPARATOR=True): for value, expected in tests: with self.subTest(value=value): self.assertEqual(localize_input(value), expected) def test_sanitize_strftime_format(self): for year in (1, 99, 999, 1000): dt = datetime.date(year, 1, 1) for fmt, expected in [ ('%C', '%02d' % (year // 100)), ('%F', '%04d-01-01' % year), ('%G', '%04d' % year), ('%Y', '%04d' % year), ]: with self.subTest(year=year, fmt=fmt): fmt = sanitize_strftime_format(fmt) self.assertEqual(dt.strftime(fmt), expected) def test_sanitize_strftime_format_with_escaped_percent(self): dt = datetime.date(1, 1, 1) for fmt, expected in [ ('%%C', '%C'), ('%%F', '%F'), ('%%G', '%G'), ('%%Y', '%Y'), ('%%%%C', '%%C'), ('%%%%F', '%%F'), ('%%%%G', '%%G'), ('%%%%Y', '%%Y'), ]: with self.subTest(fmt=fmt): fmt = sanitize_strftime_format(fmt) self.assertEqual(dt.strftime(fmt), expected) for year in (1, 99, 999, 1000): dt = datetime.date(year, 1, 1) for fmt, expected in [ ('%%%C', '%%%02d' % (year // 100)), ('%%%F', '%%%04d-01-01' % year), ('%%%G', '%%%04d' % year), ('%%%Y', '%%%04d' % year), ('%%%%%C', '%%%%%02d' % (year // 100)), ('%%%%%F', '%%%%%04d-01-01' % year), ('%%%%%G', '%%%%%04d' % year), ('%%%%%Y', '%%%%%04d' % year), ]: with self.subTest(year=year, fmt=fmt): fmt = sanitize_strftime_format(fmt) self.assertEqual(dt.strftime(fmt), expected) def test_sanitize_separators(self): """ Tests django.utils.formats.sanitize_separators. """ # Non-strings are untouched self.assertEqual(sanitize_separators(123), 123) with translation.override('ru', deactivate=True): # Russian locale has non-breaking space (\xa0) as thousand separator # Usual space is accepted too when sanitizing inputs with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual(sanitize_separators('1\xa0234\xa0567'), '1234567') self.assertEqual(sanitize_separators('77\xa0777,777'), '77777.777') self.assertEqual(sanitize_separators('12 345'), '12345') self.assertEqual(sanitize_separators('77 777,777'), '77777.777') with self.settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=False): self.assertEqual(sanitize_separators('12\xa0345'), '12\xa0345') with self.settings(USE_THOUSAND_SEPARATOR=True): with patch_formats(get_language(), THOUSAND_SEPARATOR='.', DECIMAL_SEPARATOR=','): self.assertEqual(sanitize_separators('10.234'), '10234') # Suspicion that user entered dot as decimal separator (#22171) self.assertEqual(sanitize_separators('10.10'), '10.10') with self.settings(USE_L10N=False, DECIMAL_SEPARATOR=','): self.assertEqual(sanitize_separators('1001,10'), '1001.10') self.assertEqual(sanitize_separators('1001.10'), '1001.10') with self.settings( USE_L10N=False, DECIMAL_SEPARATOR=',', USE_THOUSAND_SEPARATOR=True, THOUSAND_SEPARATOR='.' ): self.assertEqual(sanitize_separators('1.001,10'), '1001.10') self.assertEqual(sanitize_separators('1001,10'), '1001.10') self.assertEqual(sanitize_separators('1001.10'), '1001.10') self.assertEqual(sanitize_separators('1,001.10'), '1.001.10') # Invalid output def test_iter_format_modules(self): """ Tests the iter_format_modules function. """ # Importing some format modules so that we can compare the returned # modules with these expected modules default_mod = import_module('django.conf.locale.de.formats') test_mod = import_module('i18n.other.locale.de.formats') test_mod2 = import_module('i18n.other2.locale.de.formats') with translation.override('de-at', deactivate=True): # Should return the correct default module when no setting is set self.assertEqual(list(iter_format_modules('de')), [default_mod]) # When the setting is a string, should return the given module and # the default module self.assertEqual( list(iter_format_modules('de', 'i18n.other.locale')), [test_mod, default_mod]) # When setting is a list of strings, should return the given # modules and the default module self.assertEqual( list(iter_format_modules('de', ['i18n.other.locale', 'i18n.other2.locale'])), [test_mod, test_mod2, default_mod]) def test_iter_format_modules_stability(self): """ Tests the iter_format_modules function always yields format modules in a stable and correct order in presence of both base ll and ll_CC formats. """ en_format_mod = import_module('django.conf.locale.en.formats') en_gb_format_mod = import_module('django.conf.locale.en_GB.formats') self.assertEqual(list(iter_format_modules('en-gb')), [en_gb_format_mod, en_format_mod]) def test_get_format_modules_lang(self): with translation.override('de', deactivate=True): self.assertEqual('.', get_format('DECIMAL_SEPARATOR', lang='en')) def test_get_format_modules_stability(self): with self.settings(FORMAT_MODULE_PATH='i18n.other.locale'): with translation.override('de', deactivate=True): old = "%r" % get_format_modules(reverse=True) new = "%r" % get_format_modules(reverse=True) # second try self.assertEqual(new, old, 'Value returned by get_formats_modules() must be preserved between calls.') def test_localize_templatetag_and_filter(self): """ Test the {% localize %} templatetag and the localize/unlocalize filters. """ context = Context({'int': 1455, 'float': 3.14, 'date': datetime.date(2016, 12, 31)}) template1 = Template( '{% load l10n %}{% localize %}{{ int }}/{{ float }}/{{ date }}{% endlocalize %}; ' '{% localize on %}{{ int }}/{{ float }}/{{ date }}{% endlocalize %}' ) template2 = Template( '{% load l10n %}{{ int }}/{{ float }}/{{ date }}; ' '{% localize off %}{{ int }}/{{ float }}/{{ date }};{% endlocalize %} ' '{{ int }}/{{ float }}/{{ date }}' ) template3 = Template( '{% load l10n %}{{ int }}/{{ float }}/{{ date }}; ' '{{ int|unlocalize }}/{{ float|unlocalize }}/{{ date|unlocalize }}' ) template4 = Template( '{% load l10n %}{{ int }}/{{ float }}/{{ date }}; ' '{{ int|localize }}/{{ float|localize }}/{{ date|localize }}' ) expected_localized = '1.455/3,14/31. Dezember 2016' expected_unlocalized = '1455/3.14/Dez. 31, 2016' output1 = '; '.join([expected_localized, expected_localized]) output2 = '; '.join([expected_localized, expected_unlocalized, expected_localized]) output3 = '; '.join([expected_localized, expected_unlocalized]) output4 = '; '.join([expected_unlocalized, expected_localized]) with translation.override('de', deactivate=True): with self.settings(USE_L10N=False, USE_THOUSAND_SEPARATOR=True): self.assertEqual(template1.render(context), output1) self.assertEqual(template4.render(context), output4) with self.settings(USE_L10N=True, USE_THOUSAND_SEPARATOR=True): self.assertEqual(template1.render(context), output1) self.assertEqual(template2.render(context), output2) self.assertEqual(template3.render(context), output3) def test_localized_off_numbers(self): """A string representation is returned for unlocalized numbers.""" template = Template( '{% load l10n %}{% localize off %}' '{{ int }}/{{ float }}/{{ decimal }}{% endlocalize %}' ) context = Context( {'int': 1455, 'float': 3.14, 'decimal': decimal.Decimal('24.1567')} ) for use_l10n in [True, False]: with self.subTest(use_l10n=use_l10n), self.settings( USE_L10N=use_l10n, DECIMAL_SEPARATOR=',', USE_THOUSAND_SEPARATOR=True, THOUSAND_SEPARATOR='°', NUMBER_GROUPING=2, ): self.assertEqual(template.render(context), '1455/3.14/24.1567') def test_localized_as_text_as_hidden_input(self): """ Tests if form input with 'as_hidden' or 'as_text' is correctly localized. Ticket #18777 """ self.maxDiff = 1200 with translation.override('de-at', deactivate=True): template = Template('{% load l10n %}{{ form.date_added }}; {{ form.cents_paid }}') template_as_text = Template('{% load l10n %}{{ form.date_added.as_text }}; {{ form.cents_paid.as_text }}') template_as_hidden = Template( '{% load l10n %}{{ form.date_added.as_hidden }}; {{ form.cents_paid.as_hidden }}' ) form = CompanyForm({ 'name': 'acme', 'date_added': datetime.datetime(2009, 12, 31, 6, 0, 0), 'cents_paid': decimal.Decimal('59.47'), 'products_delivered': 12000, }) context = Context({'form': form}) self.assertTrue(form.is_valid()) self.assertHTMLEqual( template.render(context), '<input id="id_date_added" name="date_added" type="text" value="31.12.2009 06:00:00" required>;' '<input id="id_cents_paid" name="cents_paid" type="text" value="59,47" required>' ) self.assertHTMLEqual( template_as_text.render(context), '<input id="id_date_added" name="date_added" type="text" value="31.12.2009 06:00:00" required>;' ' <input id="id_cents_paid" name="cents_paid" type="text" value="59,47" required>' ) self.assertHTMLEqual( template_as_hidden.render(context), '<input id="id_date_added" name="date_added" type="hidden" value="31.12.2009 06:00:00">;' '<input id="id_cents_paid" name="cents_paid" type="hidden" value="59,47">' ) def test_format_arbitrary_settings(self): self.assertEqual(get_format('DEBUG'), 'DEBUG') def test_get_custom_format(self): reset_format_cache() with self.settings(FORMAT_MODULE_PATH='i18n.other.locale'): with translation.override('fr', deactivate=True): self.assertEqual('d/m/Y CUSTOM', get_format('CUSTOM_DAY_FORMAT')) def test_admin_javascript_supported_input_formats(self): """ The first input format for DATE_INPUT_FORMATS, TIME_INPUT_FORMATS, and DATETIME_INPUT_FORMATS must not contain %f since that's unsupported by the admin's time picker widget. """ regex = re.compile('%([^BcdHImMpSwxXyY%])') for language_code, language_name in settings.LANGUAGES: for format_name in ('DATE_INPUT_FORMATS', 'TIME_INPUT_FORMATS', 'DATETIME_INPUT_FORMATS'): with self.subTest(language=language_code, format=format_name): formatter = get_format(format_name, lang=language_code)[0] self.assertEqual( regex.findall(formatter), [], "%s locale's %s uses an unsupported format code." % (language_code, format_name) ) class MiscTests(SimpleTestCase): rf = RequestFactory() @override_settings(LANGUAGE_CODE='de') def test_english_fallback(self): """ With a non-English LANGUAGE_CODE and if the active language is English or one of its variants, the untranslated string should be returned (instead of falling back to LANGUAGE_CODE) (See #24413). """ self.assertEqual(gettext("Image"), "Bild") with translation.override('en'): self.assertEqual(gettext("Image"), "Image") with translation.override('en-us'): self.assertEqual(gettext("Image"), "Image") with translation.override('en-ca'): self.assertEqual(gettext("Image"), "Image") def test_parse_spec_http_header(self): """ Testing HTTP header parsing. First, we test that we can parse the values according to the spec (and that we extract all the pieces in the right order). """ tests = [ # Good headers ('de', [('de', 1.0)]), ('en-AU', [('en-au', 1.0)]), ('es-419', [('es-419', 1.0)]), ('*;q=1.00', [('*', 1.0)]), ('en-AU;q=0.123', [('en-au', 0.123)]), ('en-au;q=0.5', [('en-au', 0.5)]), ('en-au;q=1.0', [('en-au', 1.0)]), ('da, en-gb;q=0.25, en;q=0.5', [('da', 1.0), ('en', 0.5), ('en-gb', 0.25)]), ('en-au-xx', [('en-au-xx', 1.0)]), ('de,en-au;q=0.75,en-us;q=0.5,en;q=0.25,es;q=0.125,fa;q=0.125', [('de', 1.0), ('en-au', 0.75), ('en-us', 0.5), ('en', 0.25), ('es', 0.125), ('fa', 0.125)]), ('*', [('*', 1.0)]), ('de;q=0.', [('de', 0.0)]), ('en; q=1,', [('en', 1.0)]), ('en; q=1.0, * ; q=0.5', [('en', 1.0), ('*', 0.5)]), # Bad headers ('en-gb;q=1.0000', []), ('en;q=0.1234', []), ('en;q=.2', []), ('abcdefghi-au', []), ('**', []), ('en,,gb', []), ('en-au;q=0.1.0', []), (('X' * 97) + 'Z,en', []), ('da, en-gb;q=0.8, en;q=0.7,#', []), ('de;q=2.0', []), ('de;q=0.a', []), ('12-345', []), ('', []), ('en;q=1e0', []), ] for value, expected in tests: with self.subTest(value=value): self.assertEqual(trans_real.parse_accept_lang_header(value), tuple(expected)) def test_parse_literal_http_header(self): """ Now test that we parse a literal HTTP header correctly. """ g = get_language_from_request r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt-br'} self.assertEqual('pt-br', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt'} self.assertEqual('pt', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'es,de'} self.assertEqual('es', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'es-ar,de'} self.assertEqual('es-ar', g(r)) # This test assumes there won't be a Django translation to a US # variation of the Spanish language, a safe assumption. When the # user sets it as the preferred language, the main 'es' # translation should be selected instead. r.META = {'HTTP_ACCEPT_LANGUAGE': 'es-us'} self.assertEqual(g(r), 'es') # This tests the following scenario: there isn't a main language (zh) # translation of Django but there is a translation to variation (zh-hans) # the user sets zh-hans as the preferred language, it should be selected # by Django without falling back nor ignoring it. r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-hans,de'} self.assertEqual(g(r), 'zh-hans') r.META = {'HTTP_ACCEPT_LANGUAGE': 'NL'} self.assertEqual('nl', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'fy'} self.assertEqual('fy', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'ia'} self.assertEqual('ia', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'sr-latn'} self.assertEqual('sr-latn', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-hans'} self.assertEqual('zh-hans', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-hant'} self.assertEqual('zh-hant', g(r)) @override_settings( LANGUAGES=[ ('en', 'English'), ('zh-hans', 'Simplified Chinese'), ('zh-hant', 'Traditional Chinese'), ] ) def test_support_for_deprecated_chinese_language_codes(self): """ Some browsers (Firefox, IE, etc.) use deprecated language codes. As these language codes will be removed in Django 1.9, these will be incorrectly matched. For example zh-tw (traditional) will be interpreted as zh-hans (simplified), which is wrong. So we should also accept these deprecated language codes. refs #18419 -- this is explicitly for browser compatibility """ g = get_language_from_request r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-cn,en'} self.assertEqual(g(r), 'zh-hans') r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-tw,en'} self.assertEqual(g(r), 'zh-hant') def test_special_fallback_language(self): """ Some languages may have special fallbacks that don't follow the simple 'fr-ca' -> 'fr' logic (notably Chinese codes). """ r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-my,en'} self.assertEqual(get_language_from_request(r), 'zh-hans') def test_subsequent_code_fallback_language(self): """ Subsequent language codes should be used when the language code is not supported. """ tests = [ ('zh-Hans-CN', 'zh-hans'), ('zh-hans-mo', 'zh-hans'), ('zh-hans-HK', 'zh-hans'), ('zh-Hant-HK', 'zh-hant'), ('zh-hant-tw', 'zh-hant'), ('zh-hant-SG', 'zh-hant'), ] r = self.rf.get('/') r.COOKIES = {} for value, expected in tests: with self.subTest(value=value): r.META = {'HTTP_ACCEPT_LANGUAGE': f'{value},en'} self.assertEqual(get_language_from_request(r), expected) def test_parse_language_cookie(self): """ Now test that we parse language preferences stored in a cookie correctly. """ g = get_language_from_request r = self.rf.get('/') r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'pt-br'} r.META = {} self.assertEqual('pt-br', g(r)) r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'pt'} r.META = {} self.assertEqual('pt', g(r)) r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'es'} r.META = {'HTTP_ACCEPT_LANGUAGE': 'de'} self.assertEqual('es', g(r)) # This test assumes there won't be a Django translation to a US # variation of the Spanish language, a safe assumption. When the # user sets it as the preferred language, the main 'es' # translation should be selected instead. r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'es-us'} r.META = {} self.assertEqual(g(r), 'es') # This tests the following scenario: there isn't a main language (zh) # translation of Django but there is a translation to variation (zh-hans) # the user sets zh-hans as the preferred language, it should be selected # by Django without falling back nor ignoring it. r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'zh-hans'} r.META = {'HTTP_ACCEPT_LANGUAGE': 'de'} self.assertEqual(g(r), 'zh-hans') @override_settings( USE_I18N=True, LANGUAGES=[ ('en', 'English'), ('de', 'German'), ('de-at', 'Austrian German'), ('pt-br', 'Portuguese (Brazil)'), ], ) def test_get_supported_language_variant_real(self): g = trans_real.get_supported_language_variant self.assertEqual(g('en'), 'en') self.assertEqual(g('en-gb'), 'en') self.assertEqual(g('de'), 'de') self.assertEqual(g('de-at'), 'de-at') self.assertEqual(g('de-ch'), 'de') self.assertEqual(g('pt-br'), 'pt-br') self.assertEqual(g('pt'), 'pt-br') self.assertEqual(g('pt-pt'), 'pt-br') with self.assertRaises(LookupError): g('pt', strict=True) with self.assertRaises(LookupError): g('pt-pt', strict=True) with self.assertRaises(LookupError): g('xyz') with self.assertRaises(LookupError): g('xy-zz') def test_get_supported_language_variant_null(self): g = trans_null.get_supported_language_variant self.assertEqual(g(settings.LANGUAGE_CODE), settings.LANGUAGE_CODE) with self.assertRaises(LookupError): g('pt') with self.assertRaises(LookupError): g('de') with self.assertRaises(LookupError): g('de-at') with self.assertRaises(LookupError): g('de', strict=True) with self.assertRaises(LookupError): g('de-at', strict=True) with self.assertRaises(LookupError): g('xyz') @override_settings( LANGUAGES=[ ('en', 'English'), ('de', 'German'), ('de-at', 'Austrian German'), ('pl', 'Polish'), ], ) def test_get_language_from_path_real(self): g = trans_real.get_language_from_path self.assertEqual(g('/pl/'), 'pl') self.assertEqual(g('/pl'), 'pl') self.assertIsNone(g('/xyz/')) self.assertEqual(g('/en/'), 'en') self.assertEqual(g('/en-gb/'), 'en') self.assertEqual(g('/de/'), 'de') self.assertEqual(g('/de-at/'), 'de-at') self.assertEqual(g('/de-ch/'), 'de') self.assertIsNone(g('/de-simple-page/')) def test_get_language_from_path_null(self): g = trans_null.get_language_from_path self.assertIsNone(g('/pl/')) self.assertIsNone(g('/pl')) self.assertIsNone(g('/xyz/')) def test_cache_resetting(self): """ After setting LANGUAGE, the cache should be cleared and languages previously valid should not be used (#14170). """ g = get_language_from_request r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt-br'} self.assertEqual('pt-br', g(r)) with self.settings(LANGUAGES=[('en', 'English')]): self.assertNotEqual('pt-br', g(r)) def test_i18n_patterns_returns_list(self): with override_settings(USE_I18N=False): self.assertIsInstance(i18n_patterns([]), list) with override_settings(USE_I18N=True): self.assertIsInstance(i18n_patterns([]), list) class ResolutionOrderI18NTests(SimpleTestCase): def setUp(self): super().setUp() activate('de') def tearDown(self): deactivate() super().tearDown() def assertGettext(self, msgid, msgstr): result = gettext(msgid) self.assertIn( msgstr, result, "The string '%s' isn't in the translation of '%s'; the actual result is '%s'." % (msgstr, msgid, result) ) class AppResolutionOrderI18NTests(ResolutionOrderI18NTests): @override_settings(LANGUAGE_CODE='de') def test_app_translation(self): # Original translation. self.assertGettext('Date/time', 'Datum/Zeit') # Different translation. with self.modify_settings(INSTALLED_APPS={'append': 'i18n.resolution'}): # Force refreshing translations. activate('de') # Doesn't work because it's added later in the list. self.assertGettext('Date/time', 'Datum/Zeit') with self.modify_settings(INSTALLED_APPS={'remove': 'django.contrib.admin.apps.SimpleAdminConfig'}): # Force refreshing translations. activate('de') # Unless the original is removed from the list. self.assertGettext('Date/time', 'Datum/Zeit (APP)') @override_settings(LOCALE_PATHS=extended_locale_paths) class LocalePathsResolutionOrderI18NTests(ResolutionOrderI18NTests): def test_locale_paths_translation(self): self.assertGettext('Time', 'LOCALE_PATHS') def test_locale_paths_override_app_translation(self): with self.settings(INSTALLED_APPS=['i18n.resolution']): self.assertGettext('Time', 'LOCALE_PATHS') class DjangoFallbackResolutionOrderI18NTests(ResolutionOrderI18NTests): def test_django_fallback(self): self.assertEqual(gettext('Date/time'), 'Datum/Zeit') @override_settings(INSTALLED_APPS=['i18n.territorial_fallback']) class TranslationFallbackI18NTests(ResolutionOrderI18NTests): def test_sparse_territory_catalog(self): """ Untranslated strings for territorial language variants use the translations of the generic language. In this case, the de-de translation falls back to de. """ with translation.override('de-de'): self.assertGettext('Test 1 (en)', '(de-de)') self.assertGettext('Test 2 (en)', '(de)') class TestModels(TestCase): def test_lazy(self): tm = TestModel() tm.save() def test_safestr(self): c = Company(cents_paid=12, products_delivered=1) c.name = SafeString('Iñtërnâtiônàlizætiøn1') c.save() class TestLanguageInfo(SimpleTestCase): def test_localized_language_info(self): li = get_language_info('de') self.assertEqual(li['code'], 'de') self.assertEqual(li['name_local'], 'Deutsch') self.assertEqual(li['name'], 'German') self.assertIs(li['bidi'], False) def test_unknown_language_code(self): with self.assertRaisesMessage(KeyError, "Unknown language code xx"): get_language_info('xx') with translation.override('xx'): # A language with no translation catalogs should fallback to the # untranslated string. self.assertEqual(gettext("Title"), "Title") def test_unknown_only_country_code(self): li = get_language_info('de-xx') self.assertEqual(li['code'], 'de') self.assertEqual(li['name_local'], 'Deutsch') self.assertEqual(li['name'], 'German') self.assertIs(li['bidi'], False) def test_unknown_language_code_and_country_code(self): with self.assertRaisesMessage(KeyError, "Unknown language code xx-xx and xx"): get_language_info('xx-xx') def test_fallback_language_code(self): """ get_language_info return the first fallback language info if the lang_info struct does not contain the 'name' key. """ li = get_language_info('zh-my') self.assertEqual(li['code'], 'zh-hans') li = get_language_info('zh-hans') self.assertEqual(li['code'], 'zh-hans') @override_settings( USE_I18N=True, LANGUAGES=[ ('en', 'English'), ('fr', 'French'), ], MIDDLEWARE=[ 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', ], ROOT_URLCONF='i18n.urls', ) class LocaleMiddlewareTests(TestCase): def test_streaming_response(self): # Regression test for #5241 response = self.client.get('/fr/streaming/') self.assertContains(response, "Oui/Non") response = self.client.get('/en/streaming/') self.assertContains(response, "Yes/No") @override_settings( USE_I18N=True, LANGUAGES=[ ('en', 'English'), ('de', 'German'), ('fr', 'French'), ], MIDDLEWARE=[ 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', ], ROOT_URLCONF='i18n.urls_default_unprefixed', LANGUAGE_CODE='en', ) class UnprefixedDefaultLanguageTests(SimpleTestCase): def test_default_lang_without_prefix(self): """ With i18n_patterns(..., prefix_default_language=False), the default language (settings.LANGUAGE_CODE) should be accessible without a prefix. """ response = self.client.get('/simple/') self.assertEqual(response.content, b'Yes') def test_other_lang_with_prefix(self): response = self.client.get('/fr/simple/') self.assertEqual(response.content, b'Oui') def test_unprefixed_language_other_than_accept_language(self): response = self.client.get('/simple/', HTTP_ACCEPT_LANGUAGE='fr') self.assertEqual(response.content, b'Yes') def test_page_with_dash(self): # A page starting with /de* shouldn't match the 'de' language code. response = self.client.get('/de-simple-page/') self.assertEqual(response.content, b'Yes') def test_no_redirect_on_404(self): """ A request for a nonexistent URL shouldn't cause a redirect to /<default_language>/<request_url> when prefix_default_language=False and /<default_language>/<request_url> has a URL match (#27402). """ # A match for /group1/group2/ must exist for this to act as a # regression test. response = self.client.get('/group1/group2/') self.assertEqual(response.status_code, 200) response = self.client.get('/nonexistent/') self.assertEqual(response.status_code, 404) @override_settings( USE_I18N=True, LANGUAGES=[ ('bg', 'Bulgarian'), ('en-us', 'English'), ('pt-br', 'Portuguese (Brazil)'), ], MIDDLEWARE=[ 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', ], ROOT_URLCONF='i18n.urls' ) class CountrySpecificLanguageTests(SimpleTestCase): rf = RequestFactory() def test_check_for_language(self): self.assertTrue(check_for_language('en')) self.assertTrue(check_for_language('en-us')) self.assertTrue(check_for_language('en-US')) self.assertFalse(check_for_language('en_US')) self.assertTrue(check_for_language('be')) self.assertTrue(check_for_language('be@latin')) self.assertTrue(check_for_language('sr-RS@latin')) self.assertTrue(check_for_language('sr-RS@12345')) self.assertFalse(check_for_language('en-ü')) self.assertFalse(check_for_language('en\x00')) self.assertFalse(check_for_language(None)) self.assertFalse(check_for_language('be@ ')) # Specifying encoding is not supported (Django enforces UTF-8) self.assertFalse(check_for_language('tr-TR.UTF-8')) self.assertFalse(check_for_language('tr-TR.UTF8')) self.assertFalse(check_for_language('de-DE.utf-8')) def test_check_for_language_null(self): self.assertIs(trans_null.check_for_language('en'), True) def test_get_language_from_request(self): # issue 19919 r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'en-US,en;q=0.8,bg;q=0.6,ru;q=0.4'} lang = get_language_from_request(r) self.assertEqual('en-us', lang) r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'bg-bg,en-US;q=0.8,en;q=0.6,ru;q=0.4'} lang = get_language_from_request(r) self.assertEqual('bg', lang) def test_get_language_from_request_null(self): lang = trans_null.get_language_from_request(None) self.assertEqual(lang, 'en') with override_settings(LANGUAGE_CODE='de'): lang = trans_null.get_language_from_request(None) self.assertEqual(lang, 'de') def test_specific_language_codes(self): # issue 11915 r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt,en-US;q=0.8,en;q=0.6,ru;q=0.4'} lang = get_language_from_request(r) self.assertEqual('pt-br', lang) r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt-pt,en-US;q=0.8,en;q=0.6,ru;q=0.4'} lang = get_language_from_request(r) self.assertEqual('pt-br', lang) class TranslationFilesMissing(SimpleTestCase): def setUp(self): super().setUp() self.gettext_find_builtin = gettext_module.find def tearDown(self): gettext_module.find = self.gettext_find_builtin super().tearDown() def patchGettextFind(self): gettext_module.find = lambda *args, **kw: None def test_failure_finding_default_mo_files(self): """OSError is raised if the default language is unparseable.""" self.patchGettextFind() trans_real._translations = {} with self.assertRaises(OSError): activate('en') class NonDjangoLanguageTests(SimpleTestCase): """ A language non present in default Django languages can still be installed/used by a Django project. """ @override_settings( USE_I18N=True, LANGUAGES=[ ('en-us', 'English'), ('xxx', 'Somelanguage'), ], LANGUAGE_CODE='xxx', LOCALE_PATHS=[os.path.join(here, 'commands', 'locale')], ) def test_non_django_language(self): self.assertEqual(get_language(), 'xxx') self.assertEqual(gettext("year"), "reay") @override_settings(USE_I18N=True) def test_check_for_language(self): with tempfile.TemporaryDirectory() as app_dir: os.makedirs(os.path.join(app_dir, 'locale', 'dummy_Lang', 'LC_MESSAGES')) open(os.path.join(app_dir, 'locale', 'dummy_Lang', 'LC_MESSAGES', 'django.mo'), 'w').close() app_config = AppConfig('dummy_app', AppModuleStub(__path__=[app_dir])) with mock.patch('django.apps.apps.get_app_configs', return_value=[app_config]): self.assertIs(check_for_language('dummy-lang'), True) @override_settings( USE_I18N=True, LANGUAGES=[ ('en-us', 'English'), # xyz language has no locale files ('xyz', 'XYZ'), ], ) @translation.override('xyz') def test_plural_non_django_language(self): self.assertEqual(get_language(), 'xyz') self.assertEqual(ngettext('year', 'years', 2), 'years') @override_settings(USE_I18N=True) class WatchForTranslationChangesTests(SimpleTestCase): @override_settings(USE_I18N=False) def test_i18n_disabled(self): mocked_sender = mock.MagicMock() watch_for_translation_changes(mocked_sender) mocked_sender.watch_dir.assert_not_called() def test_i18n_enabled(self): mocked_sender = mock.MagicMock() watch_for_translation_changes(mocked_sender) self.assertGreater(mocked_sender.watch_dir.call_count, 1) def test_i18n_locale_paths(self): mocked_sender = mock.MagicMock() with tempfile.TemporaryDirectory() as app_dir: with self.settings(LOCALE_PATHS=[app_dir]): watch_for_translation_changes(mocked_sender) mocked_sender.watch_dir.assert_any_call(Path(app_dir), '**/*.mo') def test_i18n_app_dirs(self): mocked_sender = mock.MagicMock() with self.settings(INSTALLED_APPS=['tests.i18n.sampleproject']): watch_for_translation_changes(mocked_sender) project_dir = Path(__file__).parent / 'sampleproject' / 'locale' mocked_sender.watch_dir.assert_any_call(project_dir, '**/*.mo') def test_i18n_app_dirs_ignore_django_apps(self): mocked_sender = mock.MagicMock() with self.settings(INSTALLED_APPS=['django.contrib.admin']): watch_for_translation_changes(mocked_sender) mocked_sender.watch_dir.assert_called_once_with(Path('locale'), '**/*.mo') def test_i18n_local_locale(self): mocked_sender = mock.MagicMock() watch_for_translation_changes(mocked_sender) locale_dir = Path(__file__).parent / 'locale' mocked_sender.watch_dir.assert_any_call(locale_dir, '**/*.mo') class TranslationFileChangedTests(SimpleTestCase): def setUp(self): self.gettext_translations = gettext_module._translations.copy() self.trans_real_translations = trans_real._translations.copy() def tearDown(self): gettext._translations = self.gettext_translations trans_real._translations = self.trans_real_translations def test_ignores_non_mo_files(self): gettext_module._translations = {'foo': 'bar'} path = Path('test.py') self.assertIsNone(translation_file_changed(None, path)) self.assertEqual(gettext_module._translations, {'foo': 'bar'}) def test_resets_cache_with_mo_files(self): gettext_module._translations = {'foo': 'bar'} trans_real._translations = {'foo': 'bar'} trans_real._default = 1 trans_real._active = False path = Path('test.mo') self.assertIs(translation_file_changed(None, path), True) self.assertEqual(gettext_module._translations, {}) self.assertEqual(trans_real._translations, {}) self.assertIsNone(trans_real._default) self.assertIsInstance(trans_real._active, Local) class UtilsTests(SimpleTestCase): def test_round_away_from_one(self): tests = [ (0, 0), (0., 0), (0.25, 0), (0.5, 0), (0.75, 0), (1, 1), (1., 1), (1.25, 2), (1.5, 2), (1.75, 2), (-0., 0), (-0.25, -1), (-0.5, -1), (-0.75, -1), (-1, -1), (-1., -1), (-1.25, -2), (-1.5, -2), (-1.75, -2), ] for value, expected in tests: with self.subTest(value=value): self.assertEqual(round_away_from_one(value), expected)
36c0d58a52cfef8ac4d60fddbc1a0bbbb18457619134bda8eccabea6e591dc10
import collections.abc from datetime import datetime from math import ceil from operator import attrgetter from django.core.exceptions import FieldError from django.db import connection, models from django.db.models import ( BooleanField, Exists, ExpressionWrapper, F, Max, OuterRef, Q, ) from django.db.models.functions import Substr from django.test import TestCase, skipUnlessDBFeature from django.test.utils import isolate_apps from .models import ( Article, Author, Freebie, Game, IsNullWithNoneAsRHS, Player, Product, Season, Stock, Tag, ) class LookupTests(TestCase): @classmethod def setUpTestData(cls): # Create a few Authors. cls.au1 = Author.objects.create(name='Author 1', alias='a1') cls.au2 = Author.objects.create(name='Author 2', alias='a2') # Create a few Articles. cls.a1 = Article.objects.create( headline='Article 1', pub_date=datetime(2005, 7, 26), author=cls.au1, slug='a1', ) cls.a2 = Article.objects.create( headline='Article 2', pub_date=datetime(2005, 7, 27), author=cls.au1, slug='a2', ) cls.a3 = Article.objects.create( headline='Article 3', pub_date=datetime(2005, 7, 27), author=cls.au1, slug='a3', ) cls.a4 = Article.objects.create( headline='Article 4', pub_date=datetime(2005, 7, 28), author=cls.au1, slug='a4', ) cls.a5 = Article.objects.create( headline='Article 5', pub_date=datetime(2005, 8, 1, 9, 0), author=cls.au2, slug='a5', ) cls.a6 = Article.objects.create( headline='Article 6', pub_date=datetime(2005, 8, 1, 8, 0), author=cls.au2, slug='a6', ) cls.a7 = Article.objects.create( headline='Article 7', pub_date=datetime(2005, 7, 27), author=cls.au2, slug='a7', ) # Create a few Tags. cls.t1 = Tag.objects.create(name='Tag 1') cls.t1.articles.add(cls.a1, cls.a2, cls.a3) cls.t2 = Tag.objects.create(name='Tag 2') cls.t2.articles.add(cls.a3, cls.a4, cls.a5) cls.t3 = Tag.objects.create(name='Tag 3') cls.t3.articles.add(cls.a5, cls.a6, cls.a7) def test_exists(self): # We can use .exists() to check that there are some self.assertTrue(Article.objects.exists()) for a in Article.objects.all(): a.delete() # There should be none now! self.assertFalse(Article.objects.exists()) def test_lookup_int_as_str(self): # Integer value can be queried using string self.assertSequenceEqual( Article.objects.filter(id__iexact=str(self.a1.id)), [self.a1], ) @skipUnlessDBFeature('supports_date_lookup_using_string') def test_lookup_date_as_str(self): # A date lookup can be performed using a string search self.assertSequenceEqual( Article.objects.filter(pub_date__startswith='2005'), [self.a5, self.a6, self.a4, self.a2, self.a3, self.a7, self.a1], ) def test_iterator(self): # Each QuerySet gets iterator(), which is a generator that "lazily" # returns results using database-level iteration. self.assertIsInstance(Article.objects.iterator(), collections.abc.Iterator) self.assertQuerysetEqual( Article.objects.iterator(), [ 'Article 5', 'Article 6', 'Article 4', 'Article 2', 'Article 3', 'Article 7', 'Article 1', ], transform=attrgetter('headline') ) # iterator() can be used on any QuerySet. self.assertQuerysetEqual( Article.objects.filter(headline__endswith='4').iterator(), ['Article 4'], transform=attrgetter('headline')) def test_count(self): # count() returns the number of objects matching search criteria. self.assertEqual(Article.objects.count(), 7) self.assertEqual(Article.objects.filter(pub_date__exact=datetime(2005, 7, 27)).count(), 3) self.assertEqual(Article.objects.filter(headline__startswith='Blah blah').count(), 0) # count() should respect sliced query sets. articles = Article.objects.all() self.assertEqual(articles.count(), 7) self.assertEqual(articles[:4].count(), 4) self.assertEqual(articles[1:100].count(), 6) self.assertEqual(articles[10:100].count(), 0) # Date and date/time lookups can also be done with strings. self.assertEqual(Article.objects.filter(pub_date__exact='2005-07-27 00:00:00').count(), 3) def test_in_bulk(self): # in_bulk() takes a list of IDs and returns a dictionary mapping IDs to objects. arts = Article.objects.in_bulk([self.a1.id, self.a2.id]) self.assertEqual(arts[self.a1.id], self.a1) self.assertEqual(arts[self.a2.id], self.a2) self.assertEqual( Article.objects.in_bulk(), { self.a1.id: self.a1, self.a2.id: self.a2, self.a3.id: self.a3, self.a4.id: self.a4, self.a5.id: self.a5, self.a6.id: self.a6, self.a7.id: self.a7, } ) self.assertEqual(Article.objects.in_bulk([self.a3.id]), {self.a3.id: self.a3}) self.assertEqual(Article.objects.in_bulk({self.a3.id}), {self.a3.id: self.a3}) self.assertEqual(Article.objects.in_bulk(frozenset([self.a3.id])), {self.a3.id: self.a3}) self.assertEqual(Article.objects.in_bulk((self.a3.id,)), {self.a3.id: self.a3}) self.assertEqual(Article.objects.in_bulk([1000]), {}) self.assertEqual(Article.objects.in_bulk([]), {}) self.assertEqual(Article.objects.in_bulk(iter([self.a1.id])), {self.a1.id: self.a1}) self.assertEqual(Article.objects.in_bulk(iter([])), {}) with self.assertRaises(TypeError): Article.objects.in_bulk(headline__startswith='Blah') def test_in_bulk_lots_of_ids(self): test_range = 2000 max_query_params = connection.features.max_query_params expected_num_queries = ceil(test_range / max_query_params) if max_query_params else 1 Author.objects.bulk_create([Author() for i in range(test_range - Author.objects.count())]) authors = {author.pk: author for author in Author.objects.all()} with self.assertNumQueries(expected_num_queries): self.assertEqual(Author.objects.in_bulk(authors), authors) def test_in_bulk_with_field(self): self.assertEqual( Article.objects.in_bulk([self.a1.slug, self.a2.slug, self.a3.slug], field_name='slug'), { self.a1.slug: self.a1, self.a2.slug: self.a2, self.a3.slug: self.a3, } ) def test_in_bulk_meta_constraint(self): season_2011 = Season.objects.create(year=2011) season_2012 = Season.objects.create(year=2012) Season.objects.create(year=2013) self.assertEqual( Season.objects.in_bulk( [season_2011.year, season_2012.year], field_name='year', ), {season_2011.year: season_2011, season_2012.year: season_2012}, ) def test_in_bulk_non_unique_field(self): msg = "in_bulk()'s field_name must be a unique field but 'author' isn't." with self.assertRaisesMessage(ValueError, msg): Article.objects.in_bulk([self.au1], field_name='author') @skipUnlessDBFeature('can_distinct_on_fields') def test_in_bulk_distinct_field(self): self.assertEqual( Article.objects.order_by('headline').distinct('headline').in_bulk( [self.a1.headline, self.a5.headline], field_name='headline', ), {self.a1.headline: self.a1, self.a5.headline: self.a5}, ) @skipUnlessDBFeature('can_distinct_on_fields') def test_in_bulk_multiple_distinct_field(self): msg = "in_bulk()'s field_name must be a unique field but 'pub_date' isn't." with self.assertRaisesMessage(ValueError, msg): Article.objects.order_by('headline', 'pub_date').distinct( 'headline', 'pub_date', ).in_bulk(field_name='pub_date') @isolate_apps('lookup') def test_in_bulk_non_unique_meta_constaint(self): class Model(models.Model): ean = models.CharField(max_length=100) brand = models.CharField(max_length=100) name = models.CharField(max_length=80) class Meta: constraints = [ models.UniqueConstraint( fields=['ean'], name='partial_ean_unique', condition=models.Q(is_active=True) ), models.UniqueConstraint( fields=['brand', 'name'], name='together_brand_name_unique', ), ] msg = "in_bulk()'s field_name must be a unique field but '%s' isn't." for field_name in ['brand', 'ean']: with self.subTest(field_name=field_name): with self.assertRaisesMessage(ValueError, msg % field_name): Model.objects.in_bulk(field_name=field_name) def test_in_bulk_sliced_queryset(self): msg = "Cannot use 'limit' or 'offset' with in_bulk()." with self.assertRaisesMessage(TypeError, msg): Article.objects.all()[0:5].in_bulk([self.a1.id, self.a2.id]) def test_values(self): # values() returns a list of dictionaries instead of object instances -- # and you can specify which fields you want to retrieve. self.assertSequenceEqual( Article.objects.values('headline'), [ {'headline': 'Article 5'}, {'headline': 'Article 6'}, {'headline': 'Article 4'}, {'headline': 'Article 2'}, {'headline': 'Article 3'}, {'headline': 'Article 7'}, {'headline': 'Article 1'}, ], ) self.assertSequenceEqual( Article.objects.filter(pub_date__exact=datetime(2005, 7, 27)).values('id'), [{'id': self.a2.id}, {'id': self.a3.id}, {'id': self.a7.id}], ) self.assertSequenceEqual( Article.objects.values('id', 'headline'), [ {'id': self.a5.id, 'headline': 'Article 5'}, {'id': self.a6.id, 'headline': 'Article 6'}, {'id': self.a4.id, 'headline': 'Article 4'}, {'id': self.a2.id, 'headline': 'Article 2'}, {'id': self.a3.id, 'headline': 'Article 3'}, {'id': self.a7.id, 'headline': 'Article 7'}, {'id': self.a1.id, 'headline': 'Article 1'}, ], ) # You can use values() with iterator() for memory savings, # because iterator() uses database-level iteration. self.assertSequenceEqual( list(Article.objects.values('id', 'headline').iterator()), [ {'headline': 'Article 5', 'id': self.a5.id}, {'headline': 'Article 6', 'id': self.a6.id}, {'headline': 'Article 4', 'id': self.a4.id}, {'headline': 'Article 2', 'id': self.a2.id}, {'headline': 'Article 3', 'id': self.a3.id}, {'headline': 'Article 7', 'id': self.a7.id}, {'headline': 'Article 1', 'id': self.a1.id}, ], ) # The values() method works with "extra" fields specified in extra(select). self.assertSequenceEqual( Article.objects.extra(select={'id_plus_one': 'id + 1'}).values('id', 'id_plus_one'), [ {'id': self.a5.id, 'id_plus_one': self.a5.id + 1}, {'id': self.a6.id, 'id_plus_one': self.a6.id + 1}, {'id': self.a4.id, 'id_plus_one': self.a4.id + 1}, {'id': self.a2.id, 'id_plus_one': self.a2.id + 1}, {'id': self.a3.id, 'id_plus_one': self.a3.id + 1}, {'id': self.a7.id, 'id_plus_one': self.a7.id + 1}, {'id': self.a1.id, 'id_plus_one': self.a1.id + 1}, ], ) data = { 'id_plus_one': 'id+1', 'id_plus_two': 'id+2', 'id_plus_three': 'id+3', 'id_plus_four': 'id+4', 'id_plus_five': 'id+5', 'id_plus_six': 'id+6', 'id_plus_seven': 'id+7', 'id_plus_eight': 'id+8', } self.assertSequenceEqual( Article.objects.filter(id=self.a1.id).extra(select=data).values(*data), [{ 'id_plus_one': self.a1.id + 1, 'id_plus_two': self.a1.id + 2, 'id_plus_three': self.a1.id + 3, 'id_plus_four': self.a1.id + 4, 'id_plus_five': self.a1.id + 5, 'id_plus_six': self.a1.id + 6, 'id_plus_seven': self.a1.id + 7, 'id_plus_eight': self.a1.id + 8, }], ) # You can specify fields from forward and reverse relations, just like filter(). self.assertSequenceEqual( Article.objects.values('headline', 'author__name'), [ {'headline': self.a5.headline, 'author__name': self.au2.name}, {'headline': self.a6.headline, 'author__name': self.au2.name}, {'headline': self.a4.headline, 'author__name': self.au1.name}, {'headline': self.a2.headline, 'author__name': self.au1.name}, {'headline': self.a3.headline, 'author__name': self.au1.name}, {'headline': self.a7.headline, 'author__name': self.au2.name}, {'headline': self.a1.headline, 'author__name': self.au1.name}, ], ) self.assertSequenceEqual( Author.objects.values('name', 'article__headline').order_by('name', 'article__headline'), [ {'name': self.au1.name, 'article__headline': self.a1.headline}, {'name': self.au1.name, 'article__headline': self.a2.headline}, {'name': self.au1.name, 'article__headline': self.a3.headline}, {'name': self.au1.name, 'article__headline': self.a4.headline}, {'name': self.au2.name, 'article__headline': self.a5.headline}, {'name': self.au2.name, 'article__headline': self.a6.headline}, {'name': self.au2.name, 'article__headline': self.a7.headline}, ], ) self.assertSequenceEqual( ( Author.objects .values('name', 'article__headline', 'article__tag__name') .order_by('name', 'article__headline', 'article__tag__name') ), [ {'name': self.au1.name, 'article__headline': self.a1.headline, 'article__tag__name': self.t1.name}, {'name': self.au1.name, 'article__headline': self.a2.headline, 'article__tag__name': self.t1.name}, {'name': self.au1.name, 'article__headline': self.a3.headline, 'article__tag__name': self.t1.name}, {'name': self.au1.name, 'article__headline': self.a3.headline, 'article__tag__name': self.t2.name}, {'name': self.au1.name, 'article__headline': self.a4.headline, 'article__tag__name': self.t2.name}, {'name': self.au2.name, 'article__headline': self.a5.headline, 'article__tag__name': self.t2.name}, {'name': self.au2.name, 'article__headline': self.a5.headline, 'article__tag__name': self.t3.name}, {'name': self.au2.name, 'article__headline': self.a6.headline, 'article__tag__name': self.t3.name}, {'name': self.au2.name, 'article__headline': self.a7.headline, 'article__tag__name': self.t3.name}, ], ) # However, an exception FieldDoesNotExist will be thrown if you specify # a nonexistent field name in values() (a field that is neither in the # model nor in extra(select)). msg = ( "Cannot resolve keyword 'id_plus_two' into field. Choices are: " "author, author_id, headline, id, id_plus_one, pub_date, slug, tag" ) with self.assertRaisesMessage(FieldError, msg): Article.objects.extra(select={'id_plus_one': 'id + 1'}).values('id', 'id_plus_two') # If you don't specify field names to values(), all are returned. self.assertSequenceEqual( Article.objects.filter(id=self.a5.id).values(), [{ 'id': self.a5.id, 'author_id': self.au2.id, 'headline': 'Article 5', 'pub_date': datetime(2005, 8, 1, 9, 0), 'slug': 'a5', }], ) def test_values_list(self): # values_list() is similar to values(), except that the results are # returned as a list of tuples, rather than a list of dictionaries. # Within each tuple, the order of the elements is the same as the order # of fields in the values_list() call. self.assertSequenceEqual( Article.objects.values_list('headline'), [ ('Article 5',), ('Article 6',), ('Article 4',), ('Article 2',), ('Article 3',), ('Article 7',), ('Article 1',), ], ) self.assertSequenceEqual( Article.objects.values_list('id').order_by('id'), [(self.a1.id,), (self.a2.id,), (self.a3.id,), (self.a4.id,), (self.a5.id,), (self.a6.id,), (self.a7.id,)], ) self.assertSequenceEqual( Article.objects.values_list('id', flat=True).order_by('id'), [self.a1.id, self.a2.id, self.a3.id, self.a4.id, self.a5.id, self.a6.id, self.a7.id], ) self.assertSequenceEqual( Article.objects.extra(select={'id_plus_one': 'id+1'}).order_by('id').values_list('id'), [(self.a1.id,), (self.a2.id,), (self.a3.id,), (self.a4.id,), (self.a5.id,), (self.a6.id,), (self.a7.id,)], ) self.assertSequenceEqual( Article.objects.extra(select={'id_plus_one': 'id+1'}).order_by('id').values_list('id_plus_one', 'id'), [ (self.a1.id + 1, self.a1.id), (self.a2.id + 1, self.a2.id), (self.a3.id + 1, self.a3.id), (self.a4.id + 1, self.a4.id), (self.a5.id + 1, self.a5.id), (self.a6.id + 1, self.a6.id), (self.a7.id + 1, self.a7.id) ], ) self.assertSequenceEqual( Article.objects.extra(select={'id_plus_one': 'id+1'}).order_by('id').values_list('id', 'id_plus_one'), [ (self.a1.id, self.a1.id + 1), (self.a2.id, self.a2.id + 1), (self.a3.id, self.a3.id + 1), (self.a4.id, self.a4.id + 1), (self.a5.id, self.a5.id + 1), (self.a6.id, self.a6.id + 1), (self.a7.id, self.a7.id + 1) ], ) args = ('name', 'article__headline', 'article__tag__name') self.assertSequenceEqual( Author.objects.values_list(*args).order_by(*args), [ (self.au1.name, self.a1.headline, self.t1.name), (self.au1.name, self.a2.headline, self.t1.name), (self.au1.name, self.a3.headline, self.t1.name), (self.au1.name, self.a3.headline, self.t2.name), (self.au1.name, self.a4.headline, self.t2.name), (self.au2.name, self.a5.headline, self.t2.name), (self.au2.name, self.a5.headline, self.t3.name), (self.au2.name, self.a6.headline, self.t3.name), (self.au2.name, self.a7.headline, self.t3.name), ], ) with self.assertRaises(TypeError): Article.objects.values_list('id', 'headline', flat=True) def test_get_next_previous_by(self): # Every DateField and DateTimeField creates get_next_by_FOO() and # get_previous_by_FOO() methods. In the case of identical date values, # these methods will use the ID as a fallback check. This guarantees # that no records are skipped or duplicated. self.assertEqual(repr(self.a1.get_next_by_pub_date()), '<Article: Article 2>') self.assertEqual(repr(self.a2.get_next_by_pub_date()), '<Article: Article 3>') self.assertEqual(repr(self.a2.get_next_by_pub_date(headline__endswith='6')), '<Article: Article 6>') self.assertEqual(repr(self.a3.get_next_by_pub_date()), '<Article: Article 7>') self.assertEqual(repr(self.a4.get_next_by_pub_date()), '<Article: Article 6>') with self.assertRaises(Article.DoesNotExist): self.a5.get_next_by_pub_date() self.assertEqual(repr(self.a6.get_next_by_pub_date()), '<Article: Article 5>') self.assertEqual(repr(self.a7.get_next_by_pub_date()), '<Article: Article 4>') self.assertEqual(repr(self.a7.get_previous_by_pub_date()), '<Article: Article 3>') self.assertEqual(repr(self.a6.get_previous_by_pub_date()), '<Article: Article 4>') self.assertEqual(repr(self.a5.get_previous_by_pub_date()), '<Article: Article 6>') self.assertEqual(repr(self.a4.get_previous_by_pub_date()), '<Article: Article 7>') self.assertEqual(repr(self.a3.get_previous_by_pub_date()), '<Article: Article 2>') self.assertEqual(repr(self.a2.get_previous_by_pub_date()), '<Article: Article 1>') def test_escaping(self): # Underscores, percent signs and backslashes have special meaning in the # underlying SQL code, but Django handles the quoting of them automatically. a8 = Article.objects.create(headline='Article_ with underscore', pub_date=datetime(2005, 11, 20)) self.assertSequenceEqual( Article.objects.filter(headline__startswith='Article'), [a8, self.a5, self.a6, self.a4, self.a2, self.a3, self.a7, self.a1], ) self.assertSequenceEqual( Article.objects.filter(headline__startswith='Article_'), [a8], ) a9 = Article.objects.create(headline='Article% with percent sign', pub_date=datetime(2005, 11, 21)) self.assertSequenceEqual( Article.objects.filter(headline__startswith='Article'), [a9, a8, self.a5, self.a6, self.a4, self.a2, self.a3, self.a7, self.a1], ) self.assertSequenceEqual( Article.objects.filter(headline__startswith='Article%'), [a9], ) a10 = Article.objects.create(headline='Article with \\ backslash', pub_date=datetime(2005, 11, 22)) self.assertSequenceEqual( Article.objects.filter(headline__contains='\\'), [a10], ) def test_exclude(self): pub_date = datetime(2005, 11, 20) a8 = Article.objects.create(headline='Article_ with underscore', pub_date=pub_date) a9 = Article.objects.create(headline='Article% with percent sign', pub_date=pub_date) a10 = Article.objects.create(headline='Article with \\ backslash', pub_date=pub_date) # exclude() is the opposite of filter() when doing lookups: self.assertSequenceEqual( Article.objects.filter(headline__contains='Article').exclude(headline__contains='with'), [self.a5, self.a6, self.a4, self.a2, self.a3, self.a7, self.a1], ) self.assertSequenceEqual( Article.objects.exclude(headline__startswith="Article_"), [a10, a9, self.a5, self.a6, self.a4, self.a2, self.a3, self.a7, self.a1], ) self.assertSequenceEqual( Article.objects.exclude(headline="Article 7"), [a10, a9, a8, self.a5, self.a6, self.a4, self.a2, self.a3, self.a1], ) def test_none(self): # none() returns a QuerySet that behaves like any other QuerySet object self.assertQuerysetEqual(Article.objects.none(), []) self.assertQuerysetEqual(Article.objects.none().filter(headline__startswith='Article'), []) self.assertQuerysetEqual(Article.objects.filter(headline__startswith='Article').none(), []) self.assertEqual(Article.objects.none().count(), 0) self.assertEqual(Article.objects.none().update(headline="This should not take effect"), 0) self.assertQuerysetEqual(Article.objects.none().iterator(), []) def test_in(self): self.assertSequenceEqual( Article.objects.exclude(id__in=[]), [self.a5, self.a6, self.a4, self.a2, self.a3, self.a7, self.a1], ) def test_in_empty_list(self): self.assertSequenceEqual(Article.objects.filter(id__in=[]), []) def test_in_different_database(self): with self.assertRaisesMessage( ValueError, "Subqueries aren't allowed across different databases. Force the " "inner query to be evaluated using `list(inner_query)`." ): list(Article.objects.filter(id__in=Article.objects.using('other').all())) def test_in_keeps_value_ordering(self): query = Article.objects.filter(slug__in=['a%d' % i for i in range(1, 8)]).values('pk').query self.assertIn(' IN (a1, a2, a3, a4, a5, a6, a7) ', str(query)) def test_in_ignore_none(self): with self.assertNumQueries(1) as ctx: self.assertSequenceEqual( Article.objects.filter(id__in=[None, self.a1.id]), [self.a1], ) sql = ctx.captured_queries[0]['sql'] self.assertIn('IN (%s)' % self.a1.pk, sql) def test_in_ignore_solo_none(self): with self.assertNumQueries(0): self.assertSequenceEqual(Article.objects.filter(id__in=[None]), []) def test_in_ignore_none_with_unhashable_items(self): class UnhashableInt(int): __hash__ = None with self.assertNumQueries(1) as ctx: self.assertSequenceEqual( Article.objects.filter(id__in=[None, UnhashableInt(self.a1.id)]), [self.a1], ) sql = ctx.captured_queries[0]['sql'] self.assertIn('IN (%s)' % self.a1.pk, sql) def test_error_messages(self): # Programming errors are pointed out with nice error messages with self.assertRaisesMessage( FieldError, "Cannot resolve keyword 'pub_date_year' into field. Choices are: " "author, author_id, headline, id, pub_date, slug, tag" ): Article.objects.filter(pub_date_year='2005').count() def test_unsupported_lookups(self): with self.assertRaisesMessage( FieldError, "Unsupported lookup 'starts' for CharField or join on the field " "not permitted, perhaps you meant startswith or istartswith?" ): Article.objects.filter(headline__starts='Article') with self.assertRaisesMessage( FieldError, "Unsupported lookup 'is_null' for DateTimeField or join on the field " "not permitted, perhaps you meant isnull?" ): Article.objects.filter(pub_date__is_null=True) with self.assertRaisesMessage( FieldError, "Unsupported lookup 'gobbledygook' for DateTimeField or join on the field " "not permitted." ): Article.objects.filter(pub_date__gobbledygook='blahblah') def test_relation_nested_lookup_error(self): # An invalid nested lookup on a related field raises a useful error. msg = 'Related Field got invalid lookup: editor' with self.assertRaisesMessage(FieldError, msg): Article.objects.filter(author__editor__name='James') msg = 'Related Field got invalid lookup: foo' with self.assertRaisesMessage(FieldError, msg): Tag.objects.filter(articles__foo='bar') def test_regex(self): # Create some articles with a bit more interesting headlines for testing field lookups: for a in Article.objects.all(): a.delete() now = datetime.now() Article.objects.bulk_create([ Article(pub_date=now, headline='f'), Article(pub_date=now, headline='fo'), Article(pub_date=now, headline='foo'), Article(pub_date=now, headline='fooo'), Article(pub_date=now, headline='hey-Foo'), Article(pub_date=now, headline='bar'), Article(pub_date=now, headline='AbBa'), Article(pub_date=now, headline='baz'), Article(pub_date=now, headline='baxZ'), ]) # zero-or-more self.assertQuerysetEqual( Article.objects.filter(headline__regex=r'fo*'), Article.objects.filter(headline__in=['f', 'fo', 'foo', 'fooo']), ) self.assertQuerysetEqual( Article.objects.filter(headline__iregex=r'fo*'), Article.objects.filter(headline__in=['f', 'fo', 'foo', 'fooo', 'hey-Foo']), ) # one-or-more self.assertQuerysetEqual( Article.objects.filter(headline__regex=r'fo+'), Article.objects.filter(headline__in=['fo', 'foo', 'fooo']), ) # wildcard self.assertQuerysetEqual( Article.objects.filter(headline__regex=r'fooo?'), Article.objects.filter(headline__in=['foo', 'fooo']), ) # leading anchor self.assertQuerysetEqual( Article.objects.filter(headline__regex=r'^b'), Article.objects.filter(headline__in=['bar', 'baxZ', 'baz']), ) self.assertQuerysetEqual( Article.objects.filter(headline__iregex=r'^a'), Article.objects.filter(headline='AbBa'), ) # trailing anchor self.assertQuerysetEqual( Article.objects.filter(headline__regex=r'z$'), Article.objects.filter(headline='baz'), ) self.assertQuerysetEqual( Article.objects.filter(headline__iregex=r'z$'), Article.objects.filter(headline__in=['baxZ', 'baz']), ) # character sets self.assertQuerysetEqual( Article.objects.filter(headline__regex=r'ba[rz]'), Article.objects.filter(headline__in=['bar', 'baz']), ) self.assertQuerysetEqual( Article.objects.filter(headline__regex=r'ba.[RxZ]'), Article.objects.filter(headline='baxZ'), ) self.assertQuerysetEqual( Article.objects.filter(headline__iregex=r'ba[RxZ]'), Article.objects.filter(headline__in=['bar', 'baxZ', 'baz']), ) # and more articles: Article.objects.bulk_create([ Article(pub_date=now, headline='foobar'), Article(pub_date=now, headline='foobaz'), Article(pub_date=now, headline='ooF'), Article(pub_date=now, headline='foobarbaz'), Article(pub_date=now, headline='zoocarfaz'), Article(pub_date=now, headline='barfoobaz'), Article(pub_date=now, headline='bazbaRFOO'), ]) # alternation self.assertQuerysetEqual( Article.objects.filter(headline__regex=r'oo(f|b)'), Article.objects.filter(headline__in=[ 'barfoobaz', 'foobar', 'foobarbaz', 'foobaz', ]), ) self.assertQuerysetEqual( Article.objects.filter(headline__iregex=r'oo(f|b)'), Article.objects.filter(headline__in=[ 'barfoobaz', 'foobar', 'foobarbaz', 'foobaz', 'ooF', ]), ) self.assertQuerysetEqual( Article.objects.filter(headline__regex=r'^foo(f|b)'), Article.objects.filter(headline__in=['foobar', 'foobarbaz', 'foobaz']), ) # greedy matching self.assertQuerysetEqual( Article.objects.filter(headline__regex=r'b.*az'), Article.objects.filter(headline__in=[ 'barfoobaz', 'baz', 'bazbaRFOO', 'foobarbaz', 'foobaz', ]), ) self.assertQuerysetEqual( Article.objects.filter(headline__iregex=r'b.*ar'), Article.objects.filter(headline__in=[ 'bar', 'barfoobaz', 'bazbaRFOO', 'foobar', 'foobarbaz', ]), ) @skipUnlessDBFeature('supports_regex_backreferencing') def test_regex_backreferencing(self): # grouping and backreferences now = datetime.now() Article.objects.bulk_create([ Article(pub_date=now, headline='foobar'), Article(pub_date=now, headline='foobaz'), Article(pub_date=now, headline='ooF'), Article(pub_date=now, headline='foobarbaz'), Article(pub_date=now, headline='zoocarfaz'), Article(pub_date=now, headline='barfoobaz'), Article(pub_date=now, headline='bazbaRFOO'), ]) self.assertQuerysetEqual( Article.objects.filter(headline__regex=r'b(.).*b\1').values_list('headline', flat=True), ['barfoobaz', 'bazbaRFOO', 'foobarbaz'], ) def test_regex_null(self): """ A regex lookup does not fail on null/None values """ Season.objects.create(year=2012, gt=None) self.assertQuerysetEqual(Season.objects.filter(gt__regex=r'^$'), []) def test_regex_non_string(self): """ A regex lookup does not fail on non-string fields """ s = Season.objects.create(year=2013, gt=444) self.assertQuerysetEqual(Season.objects.filter(gt__regex=r'^444$'), [s]) def test_regex_non_ascii(self): """ A regex lookup does not trip on non-ASCII characters. """ Player.objects.create(name='\u2660') Player.objects.get(name__regex='\u2660') def test_nonfield_lookups(self): """ A lookup query containing non-fields raises the proper exception. """ msg = "Unsupported lookup 'blahblah' for CharField or join on the field not permitted." with self.assertRaisesMessage(FieldError, msg): Article.objects.filter(headline__blahblah=99) with self.assertRaisesMessage(FieldError, msg): Article.objects.filter(headline__blahblah__exact=99) msg = ( "Cannot resolve keyword 'blahblah' into field. Choices are: " "author, author_id, headline, id, pub_date, slug, tag" ) with self.assertRaisesMessage(FieldError, msg): Article.objects.filter(blahblah=99) def test_lookup_collision(self): """ Genuine field names don't collide with built-in lookup types ('year', 'gt', 'range', 'in' etc.) (#11670). """ # 'gt' is used as a code number for the year, e.g. 111=>2009. season_2009 = Season.objects.create(year=2009, gt=111) season_2009.games.create(home="Houston Astros", away="St. Louis Cardinals") season_2010 = Season.objects.create(year=2010, gt=222) season_2010.games.create(home="Houston Astros", away="Chicago Cubs") season_2010.games.create(home="Houston Astros", away="Milwaukee Brewers") season_2010.games.create(home="Houston Astros", away="St. Louis Cardinals") season_2011 = Season.objects.create(year=2011, gt=333) season_2011.games.create(home="Houston Astros", away="St. Louis Cardinals") season_2011.games.create(home="Houston Astros", away="Milwaukee Brewers") hunter_pence = Player.objects.create(name="Hunter Pence") hunter_pence.games.set(Game.objects.filter(season__year__in=[2009, 2010])) pudge = Player.objects.create(name="Ivan Rodriquez") pudge.games.set(Game.objects.filter(season__year=2009)) pedro_feliz = Player.objects.create(name="Pedro Feliz") pedro_feliz.games.set(Game.objects.filter(season__year__in=[2011])) johnson = Player.objects.create(name="Johnson") johnson.games.set(Game.objects.filter(season__year__in=[2011])) # Games in 2010 self.assertEqual(Game.objects.filter(season__year=2010).count(), 3) self.assertEqual(Game.objects.filter(season__year__exact=2010).count(), 3) self.assertEqual(Game.objects.filter(season__gt=222).count(), 3) self.assertEqual(Game.objects.filter(season__gt__exact=222).count(), 3) # Games in 2011 self.assertEqual(Game.objects.filter(season__year=2011).count(), 2) self.assertEqual(Game.objects.filter(season__year__exact=2011).count(), 2) self.assertEqual(Game.objects.filter(season__gt=333).count(), 2) self.assertEqual(Game.objects.filter(season__gt__exact=333).count(), 2) self.assertEqual(Game.objects.filter(season__year__gt=2010).count(), 2) self.assertEqual(Game.objects.filter(season__gt__gt=222).count(), 2) # Games played in 2010 and 2011 self.assertEqual(Game.objects.filter(season__year__in=[2010, 2011]).count(), 5) self.assertEqual(Game.objects.filter(season__year__gt=2009).count(), 5) self.assertEqual(Game.objects.filter(season__gt__in=[222, 333]).count(), 5) self.assertEqual(Game.objects.filter(season__gt__gt=111).count(), 5) # Players who played in 2009 self.assertEqual(Player.objects.filter(games__season__year=2009).distinct().count(), 2) self.assertEqual(Player.objects.filter(games__season__year__exact=2009).distinct().count(), 2) self.assertEqual(Player.objects.filter(games__season__gt=111).distinct().count(), 2) self.assertEqual(Player.objects.filter(games__season__gt__exact=111).distinct().count(), 2) # Players who played in 2010 self.assertEqual(Player.objects.filter(games__season__year=2010).distinct().count(), 1) self.assertEqual(Player.objects.filter(games__season__year__exact=2010).distinct().count(), 1) self.assertEqual(Player.objects.filter(games__season__gt=222).distinct().count(), 1) self.assertEqual(Player.objects.filter(games__season__gt__exact=222).distinct().count(), 1) # Players who played in 2011 self.assertEqual(Player.objects.filter(games__season__year=2011).distinct().count(), 2) self.assertEqual(Player.objects.filter(games__season__year__exact=2011).distinct().count(), 2) self.assertEqual(Player.objects.filter(games__season__gt=333).distinct().count(), 2) self.assertEqual(Player.objects.filter(games__season__year__gt=2010).distinct().count(), 2) self.assertEqual(Player.objects.filter(games__season__gt__gt=222).distinct().count(), 2) def test_chain_date_time_lookups(self): self.assertCountEqual( Article.objects.filter(pub_date__month__gt=7), [self.a5, self.a6], ) self.assertCountEqual( Article.objects.filter(pub_date__day__gte=27), [self.a2, self.a3, self.a4, self.a7], ) self.assertCountEqual( Article.objects.filter(pub_date__hour__lt=8), [self.a1, self.a2, self.a3, self.a4, self.a7], ) self.assertCountEqual( Article.objects.filter(pub_date__minute__lte=0), [self.a1, self.a2, self.a3, self.a4, self.a5, self.a6, self.a7], ) def test_exact_none_transform(self): """Transforms are used for __exact=None.""" Season.objects.create(year=1, nulled_text_field='not null') self.assertFalse(Season.objects.filter(nulled_text_field__isnull=True)) self.assertTrue(Season.objects.filter(nulled_text_field__nulled__isnull=True)) self.assertTrue(Season.objects.filter(nulled_text_field__nulled__exact=None)) self.assertTrue(Season.objects.filter(nulled_text_field__nulled=None)) def test_exact_sliced_queryset_limit_one(self): self.assertCountEqual( Article.objects.filter(author=Author.objects.all()[:1]), [self.a1, self.a2, self.a3, self.a4] ) def test_exact_sliced_queryset_limit_one_offset(self): self.assertCountEqual( Article.objects.filter(author=Author.objects.all()[1:2]), [self.a5, self.a6, self.a7] ) def test_exact_sliced_queryset_not_limited_to_one(self): msg = ( 'The QuerySet value for an exact lookup must be limited to one ' 'result using slicing.' ) with self.assertRaisesMessage(ValueError, msg): list(Article.objects.filter(author=Author.objects.all()[:2])) with self.assertRaisesMessage(ValueError, msg): list(Article.objects.filter(author=Author.objects.all()[1:])) def test_custom_field_none_rhs(self): """ __exact=value is transformed to __isnull=True if Field.get_prep_value() converts value to None. """ season = Season.objects.create(year=2012, nulled_text_field=None) self.assertTrue(Season.objects.filter(pk=season.pk, nulled_text_field__isnull=True)) self.assertTrue(Season.objects.filter(pk=season.pk, nulled_text_field='')) def test_pattern_lookups_with_substr(self): a = Author.objects.create(name='John Smith', alias='Johx') b = Author.objects.create(name='Rhonda Simpson', alias='sonx') tests = ( ('startswith', [a]), ('istartswith', [a]), ('contains', [a, b]), ('icontains', [a, b]), ('endswith', [b]), ('iendswith', [b]), ) for lookup, result in tests: with self.subTest(lookup=lookup): authors = Author.objects.filter(**{'name__%s' % lookup: Substr('alias', 1, 3)}) self.assertCountEqual(authors, result) def test_custom_lookup_none_rhs(self): """Lookup.can_use_none_as_rhs=True allows None as a lookup value.""" season = Season.objects.create(year=2012, nulled_text_field=None) query = Season.objects.get_queryset().query field = query.model._meta.get_field('nulled_text_field') self.assertIsInstance(query.build_lookup(['isnull_none_rhs'], field, None), IsNullWithNoneAsRHS) self.assertTrue(Season.objects.filter(pk=season.pk, nulled_text_field__isnull_none_rhs=True)) def test_exact_exists(self): qs = Article.objects.filter(pk=OuterRef('pk')) seasons = Season.objects.annotate( pk_exists=Exists(qs), ).filter( pk_exists=Exists(qs), ) self.assertCountEqual(seasons, Season.objects.all()) def test_nested_outerref_lhs(self): tag = Tag.objects.create(name=self.au1.alias) tag.articles.add(self.a1) qs = Tag.objects.annotate( has_author_alias_match=Exists( Article.objects.annotate( author_exists=Exists( Author.objects.filter(alias=OuterRef(OuterRef('name'))) ), ).filter(author_exists=True) ), ) self.assertEqual(qs.get(has_author_alias_match=True), tag) def test_exact_query_rhs_with_selected_columns(self): newest_author = Author.objects.create(name='Author 2') authors_max_ids = Author.objects.filter( name='Author 2', ).values( 'name', ).annotate( max_id=Max('id'), ).values('max_id') authors = Author.objects.filter(id=authors_max_ids[:1]) self.assertEqual(authors.get(), newest_author) def test_isnull_non_boolean_value(self): msg = 'The QuerySet value for an isnull lookup must be True or False.' tests = [ Author.objects.filter(alias__isnull=1), Article.objects.filter(author__isnull=1), Season.objects.filter(games__isnull=1), Freebie.objects.filter(stock__isnull=1), ] for qs in tests: with self.subTest(qs=qs): with self.assertRaisesMessage(ValueError, msg): qs.exists() def test_lookup_rhs(self): product = Product.objects.create(name='GME', qty_target=5000) stock_1 = Stock.objects.create(product=product, short=True, qty_available=180) stock_2 = Stock.objects.create(product=product, short=False, qty_available=5100) Stock.objects.create(product=product, short=False, qty_available=4000) self.assertCountEqual( Stock.objects.filter(short=Q(qty_available__lt=F('product__qty_target'))), [stock_1, stock_2], ) self.assertCountEqual( Stock.objects.filter(short=ExpressionWrapper( Q(qty_available__lt=F('product__qty_target')), output_field=BooleanField(), )), [stock_1, stock_2], )
98fb4fa2d77dfa4bee491fa0cbc3bbeee86c4939a42b6f297270bdc749765b83
import datetime from django.contrib import admin from django.contrib.admin.models import LogEntry from django.contrib.admin.options import IncorrectLookupParameters from django.contrib.admin.templatetags.admin_list import pagination from django.contrib.admin.tests import AdminSeleniumTestCase from django.contrib.admin.views.main import ( ALL_VAR, IS_POPUP_VAR, ORDER_VAR, PAGE_VAR, SEARCH_VAR, TO_FIELD_VAR, ) from django.contrib.auth.models import User from django.contrib.contenttypes.models import ContentType from django.contrib.messages.storage.cookie import CookieStorage from django.db import connection, models from django.db.models import F, Field, IntegerField from django.db.models.functions import Upper from django.db.models.lookups import Contains, Exact from django.template import Context, Template, TemplateSyntaxError from django.test import TestCase, override_settings from django.test.client import RequestFactory from django.test.utils import ( CaptureQueriesContext, isolate_apps, register_lookup, ) from django.urls import reverse from django.utils import formats from .admin import ( BandAdmin, ChildAdmin, ChordsBandAdmin, ConcertAdmin, CustomPaginationAdmin, CustomPaginator, DynamicListDisplayChildAdmin, DynamicListDisplayLinksChildAdmin, DynamicListFilterChildAdmin, DynamicSearchFieldsChildAdmin, EmptyValueChildAdmin, EventAdmin, FilteredChildAdmin, GroupAdmin, InvitationAdmin, NoListDisplayLinksParentAdmin, ParentAdmin, QuartetAdmin, SwallowAdmin, site as custom_site, ) from .models import ( Band, CharPK, Child, ChordsBand, ChordsMusician, Concert, CustomIdUser, Event, Genre, Group, Invitation, Membership, Musician, OrderedObject, Parent, Quartet, Swallow, SwallowOneToOne, UnorderedObject, ) def build_tbody_html(pk, href, extra_fields): return ( '<tbody><tr>' '<td class="action-checkbox">' '<input type="checkbox" name="_selected_action" value="{}" ' 'class="action-select"></td>' '<th class="field-name"><a href="{}">name</a></th>' '{}</tr></tbody>' ).format(pk, href, extra_fields) @override_settings(ROOT_URLCONF="admin_changelist.urls") class ChangeListTests(TestCase): factory = RequestFactory() @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', email='[email protected]', password='xxx') def _create_superuser(self, username): return User.objects.create_superuser(username=username, email='[email protected]', password='xxx') def _mocked_authenticated_request(self, url, user): request = self.factory.get(url) request.user = user return request def test_specified_ordering_by_f_expression(self): class OrderedByFBandAdmin(admin.ModelAdmin): list_display = ['name', 'genres', 'nr_of_members'] ordering = ( F('nr_of_members').desc(nulls_last=True), Upper(F('name')).asc(), F('genres').asc(), ) m = OrderedByFBandAdmin(Band, custom_site) request = self.factory.get('/band/') request.user = self.superuser cl = m.get_changelist_instance(request) self.assertEqual(cl.get_ordering_field_columns(), {3: 'desc', 2: 'asc'}) def test_specified_ordering_by_f_expression_without_asc_desc(self): class OrderedByFBandAdmin(admin.ModelAdmin): list_display = ['name', 'genres', 'nr_of_members'] ordering = (F('nr_of_members'), Upper('name'), F('genres')) m = OrderedByFBandAdmin(Band, custom_site) request = self.factory.get('/band/') request.user = self.superuser cl = m.get_changelist_instance(request) self.assertEqual(cl.get_ordering_field_columns(), {3: 'asc', 2: 'asc'}) def test_select_related_preserved(self): """ Regression test for #10348: ChangeList.get_queryset() shouldn't overwrite a custom select_related provided by ModelAdmin.get_queryset(). """ m = ChildAdmin(Child, custom_site) request = self.factory.get('/child/') request.user = self.superuser cl = m.get_changelist_instance(request) self.assertEqual(cl.queryset.query.select_related, {'parent': {}}) def test_select_related_preserved_when_multi_valued_in_search_fields(self): parent = Parent.objects.create(name='Mary') Child.objects.create(parent=parent, name='Danielle') Child.objects.create(parent=parent, name='Daniel') m = ParentAdmin(Parent, custom_site) request = self.factory.get('/parent/', data={SEARCH_VAR: 'daniel'}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertEqual(cl.queryset.count(), 1) # select_related is preserved. self.assertEqual(cl.queryset.query.select_related, {'child': {}}) def test_select_related_as_tuple(self): ia = InvitationAdmin(Invitation, custom_site) request = self.factory.get('/invitation/') request.user = self.superuser cl = ia.get_changelist_instance(request) self.assertEqual(cl.queryset.query.select_related, {'player': {}}) def test_select_related_as_empty_tuple(self): ia = InvitationAdmin(Invitation, custom_site) ia.list_select_related = () request = self.factory.get('/invitation/') request.user = self.superuser cl = ia.get_changelist_instance(request) self.assertIs(cl.queryset.query.select_related, False) def test_get_select_related_custom_method(self): class GetListSelectRelatedAdmin(admin.ModelAdmin): list_display = ('band', 'player') def get_list_select_related(self, request): return ('band', 'player') ia = GetListSelectRelatedAdmin(Invitation, custom_site) request = self.factory.get('/invitation/') request.user = self.superuser cl = ia.get_changelist_instance(request) self.assertEqual(cl.queryset.query.select_related, {'player': {}, 'band': {}}) def test_result_list_empty_changelist_value(self): """ Regression test for #14982: EMPTY_CHANGELIST_VALUE should be honored for relationship fields """ new_child = Child.objects.create(name='name', parent=None) request = self.factory.get('/child/') request.user = self.superuser m = ChildAdmin(Child, custom_site) cl = m.get_changelist_instance(request) cl.formset = None template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}') context = Context({'cl': cl, 'opts': Child._meta}) table_output = template.render(context) link = reverse('admin:admin_changelist_child_change', args=(new_child.id,)) row_html = build_tbody_html(new_child.id, link, '<td class="field-parent nowrap">-</td>') self.assertNotEqual(table_output.find(row_html), -1, 'Failed to find expected row element: %s' % table_output) def test_result_list_set_empty_value_display_on_admin_site(self): """ Empty value display can be set on AdminSite. """ new_child = Child.objects.create(name='name', parent=None) request = self.factory.get('/child/') request.user = self.superuser # Set a new empty display value on AdminSite. admin.site.empty_value_display = '???' m = ChildAdmin(Child, admin.site) cl = m.get_changelist_instance(request) cl.formset = None template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}') context = Context({'cl': cl, 'opts': Child._meta}) table_output = template.render(context) link = reverse('admin:admin_changelist_child_change', args=(new_child.id,)) row_html = build_tbody_html(new_child.id, link, '<td class="field-parent nowrap">???</td>') self.assertNotEqual(table_output.find(row_html), -1, 'Failed to find expected row element: %s' % table_output) def test_result_list_set_empty_value_display_in_model_admin(self): """ Empty value display can be set in ModelAdmin or individual fields. """ new_child = Child.objects.create(name='name', parent=None) request = self.factory.get('/child/') request.user = self.superuser m = EmptyValueChildAdmin(Child, admin.site) cl = m.get_changelist_instance(request) cl.formset = None template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}') context = Context({'cl': cl, 'opts': Child._meta}) table_output = template.render(context) link = reverse('admin:admin_changelist_child_change', args=(new_child.id,)) row_html = build_tbody_html( new_child.id, link, '<td class="field-age_display">&amp;dagger;</td>' '<td class="field-age">-empty-</td>' ) self.assertNotEqual(table_output.find(row_html), -1, 'Failed to find expected row element: %s' % table_output) def test_result_list_html(self): """ Inclusion tag result_list generates a table when with default ModelAdmin settings. """ new_parent = Parent.objects.create(name='parent') new_child = Child.objects.create(name='name', parent=new_parent) request = self.factory.get('/child/') request.user = self.superuser m = ChildAdmin(Child, custom_site) cl = m.get_changelist_instance(request) cl.formset = None template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}') context = Context({'cl': cl, 'opts': Child._meta}) table_output = template.render(context) link = reverse('admin:admin_changelist_child_change', args=(new_child.id,)) row_html = build_tbody_html(new_child.id, link, '<td class="field-parent nowrap">%s</td>' % new_parent) self.assertNotEqual(table_output.find(row_html), -1, 'Failed to find expected row element: %s' % table_output) def test_result_list_editable_html(self): """ Regression tests for #11791: Inclusion tag result_list generates a table and this checks that the items are nested within the table element tags. Also a regression test for #13599, verifies that hidden fields when list_editable is enabled are rendered in a div outside the table. """ new_parent = Parent.objects.create(name='parent') new_child = Child.objects.create(name='name', parent=new_parent) request = self.factory.get('/child/') request.user = self.superuser m = ChildAdmin(Child, custom_site) # Test with list_editable fields m.list_display = ['id', 'name', 'parent'] m.list_display_links = ['id'] m.list_editable = ['name'] cl = m.get_changelist_instance(request) FormSet = m.get_changelist_formset(request) cl.formset = FormSet(queryset=cl.result_list) template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}') context = Context({'cl': cl, 'opts': Child._meta}) table_output = template.render(context) # make sure that hidden fields are in the correct place hiddenfields_div = ( '<div class="hiddenfields">' '<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">' '</div>' ) % new_child.id self.assertInHTML(hiddenfields_div, table_output, msg_prefix='Failed to find hidden fields') # make sure that list editable fields are rendered in divs correctly editable_name_field = ( '<input name="form-0-name" value="name" class="vTextField" ' 'maxlength="30" type="text" id="id_form-0-name">' ) self.assertInHTML( '<td class="field-name">%s</td>' % editable_name_field, table_output, msg_prefix='Failed to find "name" list_editable field', ) def test_result_list_editable(self): """ Regression test for #14312: list_editable with pagination """ new_parent = Parent.objects.create(name='parent') for i in range(1, 201): Child.objects.create(name='name %s' % i, parent=new_parent) request = self.factory.get('/child/', data={'p': -1}) # Anything outside range request.user = self.superuser m = ChildAdmin(Child, custom_site) # Test with list_editable fields m.list_display = ['id', 'name', 'parent'] m.list_display_links = ['id'] m.list_editable = ['name'] with self.assertRaises(IncorrectLookupParameters): m.get_changelist_instance(request) def test_custom_paginator(self): new_parent = Parent.objects.create(name='parent') for i in range(1, 201): Child.objects.create(name='name %s' % i, parent=new_parent) request = self.factory.get('/child/') request.user = self.superuser m = CustomPaginationAdmin(Child, custom_site) cl = m.get_changelist_instance(request) cl.get_results(request) self.assertIsInstance(cl.paginator, CustomPaginator) def test_no_duplicates_for_m2m_in_list_filter(self): """ Regression test for #13902: When using a ManyToMany in list_filter, results shouldn't appear more than once. Basic ManyToMany. """ blues = Genre.objects.create(name='Blues') band = Band.objects.create(name='B.B. King Review', nr_of_members=11) band.genres.add(blues) band.genres.add(blues) m = BandAdmin(Band, custom_site) request = self.factory.get('/band/', data={'genres': blues.pk}) request.user = self.superuser cl = m.get_changelist_instance(request) cl.get_results(request) # There's only one Group instance self.assertEqual(cl.result_count, 1) # Queryset must be deletable. self.assertIs(cl.queryset.query.distinct, False) cl.queryset.delete() self.assertEqual(cl.queryset.count(), 0) def test_no_duplicates_for_through_m2m_in_list_filter(self): """ Regression test for #13902: When using a ManyToMany in list_filter, results shouldn't appear more than once. With an intermediate model. """ lead = Musician.objects.create(name='Vox') band = Group.objects.create(name='The Hype') Membership.objects.create(group=band, music=lead, role='lead voice') Membership.objects.create(group=band, music=lead, role='bass player') m = GroupAdmin(Group, custom_site) request = self.factory.get('/group/', data={'members': lead.pk}) request.user = self.superuser cl = m.get_changelist_instance(request) cl.get_results(request) # There's only one Group instance self.assertEqual(cl.result_count, 1) # Queryset must be deletable. self.assertIs(cl.queryset.query.distinct, False) cl.queryset.delete() self.assertEqual(cl.queryset.count(), 0) def test_no_duplicates_for_through_m2m_at_second_level_in_list_filter(self): """ When using a ManyToMany in list_filter at the second level behind a ForeignKey, results shouldn't appear more than once. """ lead = Musician.objects.create(name='Vox') band = Group.objects.create(name='The Hype') Concert.objects.create(name='Woodstock', group=band) Membership.objects.create(group=band, music=lead, role='lead voice') Membership.objects.create(group=band, music=lead, role='bass player') m = ConcertAdmin(Concert, custom_site) request = self.factory.get('/concert/', data={'group__members': lead.pk}) request.user = self.superuser cl = m.get_changelist_instance(request) cl.get_results(request) # There's only one Concert instance self.assertEqual(cl.result_count, 1) # Queryset must be deletable. self.assertIs(cl.queryset.query.distinct, False) cl.queryset.delete() self.assertEqual(cl.queryset.count(), 0) def test_no_duplicates_for_inherited_m2m_in_list_filter(self): """ Regression test for #13902: When using a ManyToMany in list_filter, results shouldn't appear more than once. Model managed in the admin inherits from the one that defines the relationship. """ lead = Musician.objects.create(name='John') four = Quartet.objects.create(name='The Beatles') Membership.objects.create(group=four, music=lead, role='lead voice') Membership.objects.create(group=four, music=lead, role='guitar player') m = QuartetAdmin(Quartet, custom_site) request = self.factory.get('/quartet/', data={'members': lead.pk}) request.user = self.superuser cl = m.get_changelist_instance(request) cl.get_results(request) # There's only one Quartet instance self.assertEqual(cl.result_count, 1) # Queryset must be deletable. self.assertIs(cl.queryset.query.distinct, False) cl.queryset.delete() self.assertEqual(cl.queryset.count(), 0) def test_no_duplicates_for_m2m_to_inherited_in_list_filter(self): """ Regression test for #13902: When using a ManyToMany in list_filter, results shouldn't appear more than once. Target of the relationship inherits from another. """ lead = ChordsMusician.objects.create(name='Player A') three = ChordsBand.objects.create(name='The Chords Trio') Invitation.objects.create(band=three, player=lead, instrument='guitar') Invitation.objects.create(band=three, player=lead, instrument='bass') m = ChordsBandAdmin(ChordsBand, custom_site) request = self.factory.get('/chordsband/', data={'members': lead.pk}) request.user = self.superuser cl = m.get_changelist_instance(request) cl.get_results(request) # There's only one ChordsBand instance self.assertEqual(cl.result_count, 1) # Queryset must be deletable. self.assertIs(cl.queryset.query.distinct, False) cl.queryset.delete() self.assertEqual(cl.queryset.count(), 0) def test_no_duplicates_for_non_unique_related_object_in_list_filter(self): """ Regressions tests for #15819: If a field listed in list_filters is a non-unique related object, results shouldn't appear more than once. """ parent = Parent.objects.create(name='Mary') # Two children with the same name Child.objects.create(parent=parent, name='Daniel') Child.objects.create(parent=parent, name='Daniel') m = ParentAdmin(Parent, custom_site) request = self.factory.get('/parent/', data={'child__name': 'Daniel'}) request.user = self.superuser cl = m.get_changelist_instance(request) # Exists() is applied. self.assertEqual(cl.queryset.count(), 1) # Queryset must be deletable. self.assertIs(cl.queryset.query.distinct, False) cl.queryset.delete() self.assertEqual(cl.queryset.count(), 0) def test_changelist_search_form_validation(self): m = ConcertAdmin(Concert, custom_site) tests = [ ({SEARCH_VAR: '\x00'}, 'Null characters are not allowed.'), ({SEARCH_VAR: 'some\x00thing'}, 'Null characters are not allowed.'), ] for case, error in tests: with self.subTest(case=case): request = self.factory.get('/concert/', case) request.user = self.superuser request._messages = CookieStorage(request) m.get_changelist_instance(request) messages = [m.message for m in request._messages] self.assertEqual(1, len(messages)) self.assertEqual(error, messages[0]) def test_no_duplicates_for_non_unique_related_object_in_search_fields(self): """ Regressions tests for #15819: If a field listed in search_fields is a non-unique related object, Exists() must be applied. """ parent = Parent.objects.create(name='Mary') Child.objects.create(parent=parent, name='Danielle') Child.objects.create(parent=parent, name='Daniel') m = ParentAdmin(Parent, custom_site) request = self.factory.get('/parent/', data={SEARCH_VAR: 'daniel'}) request.user = self.superuser cl = m.get_changelist_instance(request) # Exists() is applied. self.assertEqual(cl.queryset.count(), 1) # Queryset must be deletable. self.assertIs(cl.queryset.query.distinct, False) cl.queryset.delete() self.assertEqual(cl.queryset.count(), 0) def test_no_duplicates_for_many_to_many_at_second_level_in_search_fields(self): """ When using a ManyToMany in search_fields at the second level behind a ForeignKey, Exists() must be applied and results shouldn't appear more than once. """ lead = Musician.objects.create(name='Vox') band = Group.objects.create(name='The Hype') Concert.objects.create(name='Woodstock', group=band) Membership.objects.create(group=band, music=lead, role='lead voice') Membership.objects.create(group=band, music=lead, role='bass player') m = ConcertAdmin(Concert, custom_site) request = self.factory.get('/concert/', data={SEARCH_VAR: 'vox'}) request.user = self.superuser cl = m.get_changelist_instance(request) # There's only one Concert instance self.assertEqual(cl.queryset.count(), 1) # Queryset must be deletable. self.assertIs(cl.queryset.query.distinct, False) cl.queryset.delete() self.assertEqual(cl.queryset.count(), 0) def test_pk_in_search_fields(self): band = Group.objects.create(name='The Hype') Concert.objects.create(name='Woodstock', group=band) m = ConcertAdmin(Concert, custom_site) m.search_fields = ['group__pk'] request = self.factory.get('/concert/', data={SEARCH_VAR: band.pk}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertEqual(cl.queryset.count(), 1) request = self.factory.get('/concert/', data={SEARCH_VAR: band.pk + 5}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertEqual(cl.queryset.count(), 0) def test_builtin_lookup_in_search_fields(self): band = Group.objects.create(name='The Hype') concert = Concert.objects.create(name='Woodstock', group=band) m = ConcertAdmin(Concert, custom_site) m.search_fields = ['name__iexact'] request = self.factory.get('/', data={SEARCH_VAR: 'woodstock'}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertCountEqual(cl.queryset, [concert]) request = self.factory.get('/', data={SEARCH_VAR: 'wood'}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertCountEqual(cl.queryset, []) def test_custom_lookup_in_search_fields(self): band = Group.objects.create(name='The Hype') concert = Concert.objects.create(name='Woodstock', group=band) m = ConcertAdmin(Concert, custom_site) m.search_fields = ['group__name__cc'] with register_lookup(Field, Contains, lookup_name='cc'): request = self.factory.get('/', data={SEARCH_VAR: 'Hype'}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertCountEqual(cl.queryset, [concert]) request = self.factory.get('/', data={SEARCH_VAR: 'Woodstock'}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertCountEqual(cl.queryset, []) def test_spanning_relations_with_custom_lookup_in_search_fields(self): hype = Group.objects.create(name='The Hype') concert = Concert.objects.create(name='Woodstock', group=hype) vox = Musician.objects.create(name='Vox', age=20) Membership.objects.create(music=vox, group=hype) # Register a custom lookup on IntegerField to ensure that field # traversing logic in ModelAdmin.get_search_results() works. with register_lookup(IntegerField, Exact, lookup_name='exactly'): m = ConcertAdmin(Concert, custom_site) m.search_fields = ['group__members__age__exactly'] request = self.factory.get('/', data={SEARCH_VAR: '20'}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertCountEqual(cl.queryset, [concert]) request = self.factory.get('/', data={SEARCH_VAR: '21'}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertCountEqual(cl.queryset, []) def test_custom_lookup_with_pk_shortcut(self): self.assertEqual(CharPK._meta.pk.name, 'char_pk') # Not equal to 'pk'. m = admin.ModelAdmin(CustomIdUser, custom_site) abc = CharPK.objects.create(char_pk='abc') abcd = CharPK.objects.create(char_pk='abcd') m = admin.ModelAdmin(CharPK, custom_site) m.search_fields = ['pk__exact'] request = self.factory.get('/', data={SEARCH_VAR: 'abc'}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertCountEqual(cl.queryset, [abc]) request = self.factory.get('/', data={SEARCH_VAR: 'abcd'}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertCountEqual(cl.queryset, [abcd]) def test_no_exists_for_m2m_in_list_filter_without_params(self): """ If a ManyToManyField is in list_filter but isn't in any lookup params, the changelist's query shouldn't have Exists(). """ m = BandAdmin(Band, custom_site) for lookup_params in ({}, {'name': 'test'}): request = self.factory.get('/band/', lookup_params) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertNotIn(' EXISTS', str(cl.queryset.query)) # A ManyToManyField in params does have Exists() applied. request = self.factory.get('/band/', {'genres': '0'}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertIn(' EXISTS', str(cl.queryset.query)) def test_pagination(self): """ Regression tests for #12893: Pagination in admins changelist doesn't use queryset set by modeladmin. """ parent = Parent.objects.create(name='anything') for i in range(1, 31): Child.objects.create(name='name %s' % i, parent=parent) Child.objects.create(name='filtered %s' % i, parent=parent) request = self.factory.get('/child/') request.user = self.superuser # Test default queryset m = ChildAdmin(Child, custom_site) cl = m.get_changelist_instance(request) self.assertEqual(cl.queryset.count(), 60) self.assertEqual(cl.paginator.count, 60) self.assertEqual(list(cl.paginator.page_range), [1, 2, 3, 4, 5, 6]) # Test custom queryset m = FilteredChildAdmin(Child, custom_site) cl = m.get_changelist_instance(request) self.assertEqual(cl.queryset.count(), 30) self.assertEqual(cl.paginator.count, 30) self.assertEqual(list(cl.paginator.page_range), [1, 2, 3]) def test_computed_list_display_localization(self): """ Regression test for #13196: output of functions should be localized in the changelist. """ self.client.force_login(self.superuser) event = Event.objects.create(date=datetime.date.today()) response = self.client.get(reverse('admin:admin_changelist_event_changelist')) self.assertContains(response, formats.localize(event.date)) self.assertNotContains(response, str(event.date)) def test_dynamic_list_display(self): """ Regression tests for #14206: dynamic list_display support. """ parent = Parent.objects.create(name='parent') for i in range(10): Child.objects.create(name='child %s' % i, parent=parent) user_noparents = self._create_superuser('noparents') user_parents = self._create_superuser('parents') # Test with user 'noparents' m = custom_site._registry[Child] request = self._mocked_authenticated_request('/child/', user_noparents) response = m.changelist_view(request) self.assertNotContains(response, 'Parent object') list_display = m.get_list_display(request) list_display_links = m.get_list_display_links(request, list_display) self.assertEqual(list_display, ['name', 'age']) self.assertEqual(list_display_links, ['name']) # Test with user 'parents' m = DynamicListDisplayChildAdmin(Child, custom_site) request = self._mocked_authenticated_request('/child/', user_parents) response = m.changelist_view(request) self.assertContains(response, 'Parent object') custom_site.unregister(Child) list_display = m.get_list_display(request) list_display_links = m.get_list_display_links(request, list_display) self.assertEqual(list_display, ('parent', 'name', 'age')) self.assertEqual(list_display_links, ['parent']) # Test default implementation custom_site.register(Child, ChildAdmin) m = custom_site._registry[Child] request = self._mocked_authenticated_request('/child/', user_noparents) response = m.changelist_view(request) self.assertContains(response, 'Parent object') def test_show_all(self): parent = Parent.objects.create(name='anything') for i in range(1, 31): Child.objects.create(name='name %s' % i, parent=parent) Child.objects.create(name='filtered %s' % i, parent=parent) # Add "show all" parameter to request request = self.factory.get('/child/', data={ALL_VAR: ''}) request.user = self.superuser # Test valid "show all" request (number of total objects is under max) m = ChildAdmin(Child, custom_site) m.list_max_show_all = 200 # 200 is the max we'll pass to ChangeList cl = m.get_changelist_instance(request) cl.get_results(request) self.assertEqual(len(cl.result_list), 60) # Test invalid "show all" request (number of total objects over max) # falls back to paginated pages m = ChildAdmin(Child, custom_site) m.list_max_show_all = 30 # 30 is the max we'll pass to ChangeList for this test cl = m.get_changelist_instance(request) cl.get_results(request) self.assertEqual(len(cl.result_list), 10) def test_dynamic_list_display_links(self): """ Regression tests for #16257: dynamic list_display_links support. """ parent = Parent.objects.create(name='parent') for i in range(1, 10): Child.objects.create(id=i, name='child %s' % i, parent=parent, age=i) m = DynamicListDisplayLinksChildAdmin(Child, custom_site) superuser = self._create_superuser('superuser') request = self._mocked_authenticated_request('/child/', superuser) response = m.changelist_view(request) for i in range(1, 10): link = reverse('admin:admin_changelist_child_change', args=(i,)) self.assertContains(response, '<a href="%s">%s</a>' % (link, i)) list_display = m.get_list_display(request) list_display_links = m.get_list_display_links(request, list_display) self.assertEqual(list_display, ('parent', 'name', 'age')) self.assertEqual(list_display_links, ['age']) def test_no_list_display_links(self): """#15185 -- Allow no links from the 'change list' view grid.""" p = Parent.objects.create(name='parent') m = NoListDisplayLinksParentAdmin(Parent, custom_site) superuser = self._create_superuser('superuser') request = self._mocked_authenticated_request('/parent/', superuser) response = m.changelist_view(request) link = reverse('admin:admin_changelist_parent_change', args=(p.pk,)) self.assertNotContains(response, '<a href="%s">' % link) def test_clear_all_filters_link(self): self.client.force_login(self.superuser) url = reverse('admin:auth_user_changelist') response = self.client.get(url) self.assertNotContains(response, '&#10006; Clear all filters') link = '<a href="%s">&#10006; Clear all filters</a>' for data, href in ( ({'is_staff__exact': '0'}, '?'), ( {'is_staff__exact': '0', 'username__startswith': 'test'}, '?username__startswith=test', ), ( {'is_staff__exact': '0', SEARCH_VAR: 'test'}, '?%s=test' % SEARCH_VAR, ), ( {'is_staff__exact': '0', IS_POPUP_VAR: 'id'}, '?%s=id' % IS_POPUP_VAR, ), ): with self.subTest(data=data): response = self.client.get(url, data=data) self.assertContains(response, link % href) def test_clear_all_filters_link_callable_filter(self): self.client.force_login(self.superuser) url = reverse('admin:admin_changelist_band_changelist') response = self.client.get(url) self.assertNotContains(response, '&#10006; Clear all filters') link = '<a href="%s">&#10006; Clear all filters</a>' for data, href in ( ({'nr_of_members_partition': '5'}, '?'), ( {'nr_of_members_partition': 'more', 'name__startswith': 'test'}, '?name__startswith=test', ), ( {'nr_of_members_partition': '5', IS_POPUP_VAR: 'id'}, '?%s=id' % IS_POPUP_VAR, ), ): with self.subTest(data=data): response = self.client.get(url, data=data) self.assertContains(response, link % href) def test_no_clear_all_filters_link(self): self.client.force_login(self.superuser) url = reverse('admin:auth_user_changelist') link = '>&#10006; Clear all filters</a>' for data in ( {SEARCH_VAR: 'test'}, {ORDER_VAR: '-1'}, {TO_FIELD_VAR: 'id'}, {PAGE_VAR: '1'}, {IS_POPUP_VAR: '1'}, {'username__startswith': 'test'}, ): with self.subTest(data=data): response = self.client.get(url, data=data) self.assertNotContains(response, link) def test_tuple_list_display(self): swallow = Swallow.objects.create(origin='Africa', load='12.34', speed='22.2') swallow2 = Swallow.objects.create(origin='Africa', load='12.34', speed='22.2') swallow_o2o = SwallowOneToOne.objects.create(swallow=swallow2) model_admin = SwallowAdmin(Swallow, custom_site) superuser = self._create_superuser('superuser') request = self._mocked_authenticated_request('/swallow/', superuser) response = model_admin.changelist_view(request) # just want to ensure it doesn't blow up during rendering self.assertContains(response, str(swallow.origin)) self.assertContains(response, str(swallow.load)) self.assertContains(response, str(swallow.speed)) # Reverse one-to-one relations should work. self.assertContains(response, '<td class="field-swallowonetoone">-</td>') self.assertContains(response, '<td class="field-swallowonetoone">%s</td>' % swallow_o2o) def test_multiuser_edit(self): """ Simultaneous edits of list_editable fields on the changelist by different users must not result in one user's edits creating a new object instead of modifying the correct existing object (#11313). """ # To replicate this issue, simulate the following steps: # 1. User1 opens an admin changelist with list_editable fields. # 2. User2 edits object "Foo" such that it moves to another page in # the pagination order and saves. # 3. User1 edits object "Foo" and saves. # 4. The edit made by User1 does not get applied to object "Foo" but # instead is used to create a new object (bug). # For this test, order the changelist by the 'speed' attribute and # display 3 objects per page (SwallowAdmin.list_per_page = 3). # Setup the test to reflect the DB state after step 2 where User2 has # edited the first swallow object's speed from '4' to '1'. a = Swallow.objects.create(origin='Swallow A', load=4, speed=1) b = Swallow.objects.create(origin='Swallow B', load=2, speed=2) c = Swallow.objects.create(origin='Swallow C', load=5, speed=5) d = Swallow.objects.create(origin='Swallow D', load=9, speed=9) superuser = self._create_superuser('superuser') self.client.force_login(superuser) changelist_url = reverse('admin:admin_changelist_swallow_changelist') # Send the POST from User1 for step 3. It's still using the changelist # ordering from before User2's edits in step 2. data = { 'form-TOTAL_FORMS': '3', 'form-INITIAL_FORMS': '3', 'form-MIN_NUM_FORMS': '0', 'form-MAX_NUM_FORMS': '1000', 'form-0-uuid': str(d.pk), 'form-1-uuid': str(c.pk), 'form-2-uuid': str(a.pk), 'form-0-load': '9.0', 'form-0-speed': '9.0', 'form-1-load': '5.0', 'form-1-speed': '5.0', 'form-2-load': '5.0', 'form-2-speed': '4.0', '_save': 'Save', } response = self.client.post(changelist_url, data, follow=True, extra={'o': '-2'}) # The object User1 edited in step 3 is displayed on the changelist and # has the correct edits applied. self.assertContains(response, '1 swallow was changed successfully.') self.assertContains(response, a.origin) a.refresh_from_db() self.assertEqual(a.load, float(data['form-2-load'])) self.assertEqual(a.speed, float(data['form-2-speed'])) b.refresh_from_db() self.assertEqual(b.load, 2) self.assertEqual(b.speed, 2) c.refresh_from_db() self.assertEqual(c.load, float(data['form-1-load'])) self.assertEqual(c.speed, float(data['form-1-speed'])) d.refresh_from_db() self.assertEqual(d.load, float(data['form-0-load'])) self.assertEqual(d.speed, float(data['form-0-speed'])) # No new swallows were created. self.assertEqual(len(Swallow.objects.all()), 4) def test_get_edited_object_ids(self): a = Swallow.objects.create(origin='Swallow A', load=4, speed=1) b = Swallow.objects.create(origin='Swallow B', load=2, speed=2) c = Swallow.objects.create(origin='Swallow C', load=5, speed=5) superuser = self._create_superuser('superuser') self.client.force_login(superuser) changelist_url = reverse('admin:admin_changelist_swallow_changelist') m = SwallowAdmin(Swallow, custom_site) data = { 'form-TOTAL_FORMS': '3', 'form-INITIAL_FORMS': '3', 'form-MIN_NUM_FORMS': '0', 'form-MAX_NUM_FORMS': '1000', 'form-0-uuid': str(a.pk), 'form-1-uuid': str(b.pk), 'form-2-uuid': str(c.pk), 'form-0-load': '9.0', 'form-0-speed': '9.0', 'form-1-load': '5.0', 'form-1-speed': '5.0', 'form-2-load': '5.0', 'form-2-speed': '4.0', '_save': 'Save', } request = self.factory.post(changelist_url, data=data) pks = m._get_edited_object_pks(request, prefix='form') self.assertEqual(sorted(pks), sorted([str(a.pk), str(b.pk), str(c.pk)])) def test_get_list_editable_queryset(self): a = Swallow.objects.create(origin='Swallow A', load=4, speed=1) Swallow.objects.create(origin='Swallow B', load=2, speed=2) data = { 'form-TOTAL_FORMS': '2', 'form-INITIAL_FORMS': '2', 'form-MIN_NUM_FORMS': '0', 'form-MAX_NUM_FORMS': '1000', 'form-0-uuid': str(a.pk), 'form-0-load': '10', '_save': 'Save', } superuser = self._create_superuser('superuser') self.client.force_login(superuser) changelist_url = reverse('admin:admin_changelist_swallow_changelist') m = SwallowAdmin(Swallow, custom_site) request = self.factory.post(changelist_url, data=data) queryset = m._get_list_editable_queryset(request, prefix='form') self.assertEqual(queryset.count(), 1) data['form-0-uuid'] = 'INVALD_PRIMARY_KEY' # The unfiltered queryset is returned if there's invalid data. request = self.factory.post(changelist_url, data=data) queryset = m._get_list_editable_queryset(request, prefix='form') self.assertEqual(queryset.count(), 2) def test_get_list_editable_queryset_with_regex_chars_in_prefix(self): a = Swallow.objects.create(origin='Swallow A', load=4, speed=1) Swallow.objects.create(origin='Swallow B', load=2, speed=2) data = { 'form$-TOTAL_FORMS': '2', 'form$-INITIAL_FORMS': '2', 'form$-MIN_NUM_FORMS': '0', 'form$-MAX_NUM_FORMS': '1000', 'form$-0-uuid': str(a.pk), 'form$-0-load': '10', '_save': 'Save', } superuser = self._create_superuser('superuser') self.client.force_login(superuser) changelist_url = reverse('admin:admin_changelist_swallow_changelist') m = SwallowAdmin(Swallow, custom_site) request = self.factory.post(changelist_url, data=data) queryset = m._get_list_editable_queryset(request, prefix='form$') self.assertEqual(queryset.count(), 1) def test_changelist_view_list_editable_changed_objects_uses_filter(self): """list_editable edits use a filtered queryset to limit memory usage.""" a = Swallow.objects.create(origin='Swallow A', load=4, speed=1) Swallow.objects.create(origin='Swallow B', load=2, speed=2) data = { 'form-TOTAL_FORMS': '2', 'form-INITIAL_FORMS': '2', 'form-MIN_NUM_FORMS': '0', 'form-MAX_NUM_FORMS': '1000', 'form-0-uuid': str(a.pk), 'form-0-load': '10', '_save': 'Save', } superuser = self._create_superuser('superuser') self.client.force_login(superuser) changelist_url = reverse('admin:admin_changelist_swallow_changelist') with CaptureQueriesContext(connection) as context: response = self.client.post(changelist_url, data=data) self.assertEqual(response.status_code, 200) self.assertIn('WHERE', context.captured_queries[4]['sql']) self.assertIn('IN', context.captured_queries[4]['sql']) # Check only the first few characters since the UUID may have dashes. self.assertIn(str(a.pk)[:8], context.captured_queries[4]['sql']) def test_deterministic_order_for_unordered_model(self): """ The primary key is used in the ordering of the changelist's results to guarantee a deterministic order, even when the model doesn't have any default ordering defined (#17198). """ superuser = self._create_superuser('superuser') for counter in range(1, 51): UnorderedObject.objects.create(id=counter, bool=True) class UnorderedObjectAdmin(admin.ModelAdmin): list_per_page = 10 def check_results_order(ascending=False): custom_site.register(UnorderedObject, UnorderedObjectAdmin) model_admin = UnorderedObjectAdmin(UnorderedObject, custom_site) counter = 0 if ascending else 51 for page in range(1, 6): request = self._mocked_authenticated_request('/unorderedobject/?p=%s' % page, superuser) response = model_admin.changelist_view(request) for result in response.context_data['cl'].result_list: counter += 1 if ascending else -1 self.assertEqual(result.id, counter) custom_site.unregister(UnorderedObject) # When no order is defined at all, everything is ordered by '-pk'. check_results_order() # When an order field is defined but multiple records have the same # value for that field, make sure everything gets ordered by -pk as well. UnorderedObjectAdmin.ordering = ['bool'] check_results_order() # When order fields are defined, including the pk itself, use them. UnorderedObjectAdmin.ordering = ['bool', '-pk'] check_results_order() UnorderedObjectAdmin.ordering = ['bool', 'pk'] check_results_order(ascending=True) UnorderedObjectAdmin.ordering = ['-id', 'bool'] check_results_order() UnorderedObjectAdmin.ordering = ['id', 'bool'] check_results_order(ascending=True) def test_deterministic_order_for_model_ordered_by_its_manager(self): """ The primary key is used in the ordering of the changelist's results to guarantee a deterministic order, even when the model has a manager that defines a default ordering (#17198). """ superuser = self._create_superuser('superuser') for counter in range(1, 51): OrderedObject.objects.create(id=counter, bool=True, number=counter) class OrderedObjectAdmin(admin.ModelAdmin): list_per_page = 10 def check_results_order(ascending=False): custom_site.register(OrderedObject, OrderedObjectAdmin) model_admin = OrderedObjectAdmin(OrderedObject, custom_site) counter = 0 if ascending else 51 for page in range(1, 6): request = self._mocked_authenticated_request('/orderedobject/?p=%s' % page, superuser) response = model_admin.changelist_view(request) for result in response.context_data['cl'].result_list: counter += 1 if ascending else -1 self.assertEqual(result.id, counter) custom_site.unregister(OrderedObject) # When no order is defined at all, use the model's default ordering (i.e. 'number') check_results_order(ascending=True) # When an order field is defined but multiple records have the same # value for that field, make sure everything gets ordered by -pk as well. OrderedObjectAdmin.ordering = ['bool'] check_results_order() # When order fields are defined, including the pk itself, use them. OrderedObjectAdmin.ordering = ['bool', '-pk'] check_results_order() OrderedObjectAdmin.ordering = ['bool', 'pk'] check_results_order(ascending=True) OrderedObjectAdmin.ordering = ['-id', 'bool'] check_results_order() OrderedObjectAdmin.ordering = ['id', 'bool'] check_results_order(ascending=True) @isolate_apps('admin_changelist') def test_total_ordering_optimization(self): class Related(models.Model): unique_field = models.BooleanField(unique=True) class Meta: ordering = ('unique_field',) class Model(models.Model): unique_field = models.BooleanField(unique=True) unique_nullable_field = models.BooleanField(unique=True, null=True) related = models.ForeignKey(Related, models.CASCADE) other_related = models.ForeignKey(Related, models.CASCADE) related_unique = models.OneToOneField(Related, models.CASCADE) field = models.BooleanField() other_field = models.BooleanField() null_field = models.BooleanField(null=True) class Meta: unique_together = { ('field', 'other_field'), ('field', 'null_field'), ('related', 'other_related_id'), } class ModelAdmin(admin.ModelAdmin): def get_queryset(self, request): return Model.objects.none() request = self._mocked_authenticated_request('/', self.superuser) site = admin.AdminSite(name='admin') model_admin = ModelAdmin(Model, site) change_list = model_admin.get_changelist_instance(request) tests = ( ([], ['-pk']), # Unique non-nullable field. (['unique_field'], ['unique_field']), (['-unique_field'], ['-unique_field']), # Unique nullable field. (['unique_nullable_field'], ['unique_nullable_field', '-pk']), # Field. (['field'], ['field', '-pk']), # Related field introspection is not implemented. (['related__unique_field'], ['related__unique_field', '-pk']), # Related attname unique. (['related_unique_id'], ['related_unique_id']), # Related ordering introspection is not implemented. (['related_unique'], ['related_unique', '-pk']), # Composite unique. (['field', '-other_field'], ['field', '-other_field']), # Composite unique nullable. (['-field', 'null_field'], ['-field', 'null_field', '-pk']), # Composite unique and nullable. (['-field', 'null_field', 'other_field'], ['-field', 'null_field', 'other_field']), # Composite unique attnames. (['related_id', '-other_related_id'], ['related_id', '-other_related_id']), # Composite unique names. (['related', '-other_related_id'], ['related', '-other_related_id', '-pk']), ) # F() objects composite unique. total_ordering = [F('field'), F('other_field').desc(nulls_last=True)] # F() objects composite unique nullable. non_total_ordering = [F('field'), F('null_field').desc(nulls_last=True)] tests += ( (total_ordering, total_ordering), (non_total_ordering, non_total_ordering + ['-pk']), ) for ordering, expected in tests: with self.subTest(ordering=ordering): self.assertEqual(change_list._get_deterministic_ordering(ordering), expected) @isolate_apps('admin_changelist') def test_total_ordering_optimization_meta_constraints(self): class Related(models.Model): unique_field = models.BooleanField(unique=True) class Meta: ordering = ('unique_field',) class Model(models.Model): field_1 = models.BooleanField() field_2 = models.BooleanField() field_3 = models.BooleanField() field_4 = models.BooleanField() field_5 = models.BooleanField() field_6 = models.BooleanField() nullable_1 = models.BooleanField(null=True) nullable_2 = models.BooleanField(null=True) related_1 = models.ForeignKey(Related, models.CASCADE) related_2 = models.ForeignKey(Related, models.CASCADE) related_3 = models.ForeignKey(Related, models.CASCADE) related_4 = models.ForeignKey(Related, models.CASCADE) class Meta: constraints = [ *[ models.UniqueConstraint(fields=fields, name=''.join(fields)) for fields in ( ['field_1'], ['nullable_1'], ['related_1'], ['related_2_id'], ['field_2', 'field_3'], ['field_2', 'nullable_2'], ['field_2', 'related_3'], ['field_3', 'related_4_id'], ) ], models.CheckConstraint(check=models.Q(id__gt=0), name='foo'), models.UniqueConstraint( fields=['field_5'], condition=models.Q(id__gt=10), name='total_ordering_1', ), models.UniqueConstraint( fields=['field_6'], condition=models.Q(), name='total_ordering', ), ] class ModelAdmin(admin.ModelAdmin): def get_queryset(self, request): return Model.objects.none() request = self._mocked_authenticated_request('/', self.superuser) site = admin.AdminSite(name='admin') model_admin = ModelAdmin(Model, site) change_list = model_admin.get_changelist_instance(request) tests = ( # Unique non-nullable field. (['field_1'], ['field_1']), # Unique nullable field. (['nullable_1'], ['nullable_1', '-pk']), # Related attname unique. (['related_1_id'], ['related_1_id']), (['related_2_id'], ['related_2_id']), # Related ordering introspection is not implemented. (['related_1'], ['related_1', '-pk']), # Composite unique. (['-field_2', 'field_3'], ['-field_2', 'field_3']), # Composite unique nullable. (['field_2', '-nullable_2'], ['field_2', '-nullable_2', '-pk']), # Composite unique and nullable. ( ['field_2', '-nullable_2', 'field_3'], ['field_2', '-nullable_2', 'field_3'], ), # Composite field and related field name. (['field_2', '-related_3'], ['field_2', '-related_3', '-pk']), (['field_3', 'related_4'], ['field_3', 'related_4', '-pk']), # Composite field and related field attname. (['field_2', 'related_3_id'], ['field_2', 'related_3_id']), (['field_3', '-related_4_id'], ['field_3', '-related_4_id']), # Partial unique constraint is ignored. (['field_5'], ['field_5', '-pk']), # Unique constraint with an empty condition. (['field_6'], ['field_6']), ) for ordering, expected in tests: with self.subTest(ordering=ordering): self.assertEqual(change_list._get_deterministic_ordering(ordering), expected) def test_dynamic_list_filter(self): """ Regression tests for ticket #17646: dynamic list_filter support. """ parent = Parent.objects.create(name='parent') for i in range(10): Child.objects.create(name='child %s' % i, parent=parent) user_noparents = self._create_superuser('noparents') user_parents = self._create_superuser('parents') # Test with user 'noparents' m = DynamicListFilterChildAdmin(Child, custom_site) request = self._mocked_authenticated_request('/child/', user_noparents) response = m.changelist_view(request) self.assertEqual(response.context_data['cl'].list_filter, ['name', 'age']) # Test with user 'parents' m = DynamicListFilterChildAdmin(Child, custom_site) request = self._mocked_authenticated_request('/child/', user_parents) response = m.changelist_view(request) self.assertEqual(response.context_data['cl'].list_filter, ('parent', 'name', 'age')) def test_dynamic_search_fields(self): child = self._create_superuser('child') m = DynamicSearchFieldsChildAdmin(Child, custom_site) request = self._mocked_authenticated_request('/child/', child) response = m.changelist_view(request) self.assertEqual(response.context_data['cl'].search_fields, ('name', 'age')) def test_pagination_page_range(self): """ Regression tests for ticket #15653: ensure the number of pages generated for changelist views are correct. """ # instantiating and setting up ChangeList object m = GroupAdmin(Group, custom_site) request = self.factory.get('/group/') request.user = self.superuser cl = m.get_changelist_instance(request) cl.list_per_page = 10 ELLIPSIS = cl.paginator.ELLIPSIS for number, pages, expected in [ (1, 1, []), (1, 2, [1, 2]), (6, 11, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]), (6, 12, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]), (6, 13, [1, 2, 3, 4, 5, 6, 7, 8, 9, ELLIPSIS, 12, 13]), (7, 12, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]), (7, 13, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]), (7, 14, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, ELLIPSIS, 13, 14]), (8, 13, [1, 2, ELLIPSIS, 5, 6, 7, 8, 9, 10, 11, 12, 13]), (8, 14, [1, 2, ELLIPSIS, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]), (8, 15, [1, 2, ELLIPSIS, 5, 6, 7, 8, 9, 10, 11, ELLIPSIS, 14, 15]), ]: with self.subTest(number=number, pages=pages): # assuming exactly `pages * cl.list_per_page` objects Group.objects.all().delete() for i in range(pages * cl.list_per_page): Group.objects.create(name='test band') # setting page number and calculating page range cl.page_num = number cl.get_results(request) self.assertEqual(list(pagination(cl)['page_range']), expected) def test_object_tools_displayed_no_add_permission(self): """ When ModelAdmin.has_add_permission() returns False, the object-tools block is still shown. """ superuser = self._create_superuser('superuser') m = EventAdmin(Event, custom_site) request = self._mocked_authenticated_request('/event/', superuser) self.assertFalse(m.has_add_permission(request)) response = m.changelist_view(request) self.assertIn('<ul class="object-tools">', response.rendered_content) # The "Add" button inside the object-tools shouldn't appear. self.assertNotIn('Add ', response.rendered_content) def test_search_help_text(self): superuser = self._create_superuser('superuser') m = BandAdmin(Band, custom_site) # search_fields without search_help_text. m.search_fields = ['name'] request = self._mocked_authenticated_request('/band/', superuser) response = m.changelist_view(request) self.assertIsNone(response.context_data['cl'].search_help_text) self.assertNotContains(response, '<div class="help">') # search_fields with search_help_text. m.search_help_text = 'Search help text' request = self._mocked_authenticated_request('/band/', superuser) response = m.changelist_view(request) self.assertEqual(response.context_data['cl'].search_help_text, 'Search help text') self.assertContains(response, '<div class="help">Search help text</div>') class GetAdminLogTests(TestCase): def test_custom_user_pk_not_named_id(self): """ {% get_admin_log %} works if the user model's primary key isn't named 'id'. """ context = Context({'user': CustomIdUser()}) template = Template('{% load log %}{% get_admin_log 10 as admin_log for_user user %}') # This template tag just logs. self.assertEqual(template.render(context), '') def test_no_user(self): """{% get_admin_log %} works without specifying a user.""" user = User(username='jondoe', password='secret', email='[email protected]') user.save() ct = ContentType.objects.get_for_model(User) LogEntry.objects.log_action(user.pk, ct.pk, user.pk, repr(user), 1) t = Template( '{% load log %}' '{% get_admin_log 100 as admin_log %}' '{% for entry in admin_log %}' '{{ entry|safe }}' '{% endfor %}' ) self.assertEqual(t.render(Context({})), 'Added “<User: jondoe>”.') def test_missing_args(self): msg = "'get_admin_log' statements require two arguments" with self.assertRaisesMessage(TemplateSyntaxError, msg): Template('{% load log %}{% get_admin_log 10 as %}') def test_non_integer_limit(self): msg = "First argument to 'get_admin_log' must be an integer" with self.assertRaisesMessage(TemplateSyntaxError, msg): Template('{% load log %}{% get_admin_log "10" as admin_log for_user user %}') def test_without_as(self): msg = "Second argument to 'get_admin_log' must be 'as'" with self.assertRaisesMessage(TemplateSyntaxError, msg): Template('{% load log %}{% get_admin_log 10 ad admin_log for_user user %}') def test_without_for_user(self): msg = "Fourth argument to 'get_admin_log' must be 'for_user'" with self.assertRaisesMessage(TemplateSyntaxError, msg): Template('{% load log %}{% get_admin_log 10 as admin_log foruser user %}') @override_settings(ROOT_URLCONF='admin_changelist.urls') class SeleniumTests(AdminSeleniumTestCase): available_apps = ['admin_changelist'] + AdminSeleniumTestCase.available_apps def setUp(self): User.objects.create_superuser(username='super', password='secret', email=None) def test_add_row_selection(self): """ The status line for selected rows gets updated correctly (#22038). """ self.admin_login(username='super', password='secret') self.selenium.get(self.live_server_url + reverse('admin:auth_user_changelist')) form_id = '#changelist-form' # Test amount of rows in the Changelist rows = self.selenium.find_elements_by_css_selector( '%s #result_list tbody tr' % form_id ) self.assertEqual(len(rows), 1) row = rows[0] selection_indicator = self.selenium.find_element_by_css_selector( '%s .action-counter' % form_id ) all_selector = self.selenium.find_element_by_id('action-toggle') row_selector = self.selenium.find_element_by_css_selector( '%s #result_list tbody tr:first-child .action-select' % form_id ) # Test current selection self.assertEqual(selection_indicator.text, "0 of 1 selected") self.assertIs(all_selector.get_property('checked'), False) self.assertEqual(row.get_attribute('class'), '') # Select a row and check again row_selector.click() self.assertEqual(selection_indicator.text, "1 of 1 selected") self.assertIs(all_selector.get_property('checked'), True) self.assertEqual(row.get_attribute('class'), 'selected') # Deselect a row and check again row_selector.click() self.assertEqual(selection_indicator.text, "0 of 1 selected") self.assertIs(all_selector.get_property('checked'), False) self.assertEqual(row.get_attribute('class'), '') def test_modifier_allows_multiple_section(self): """ Selecting a row and then selecting another row whilst holding shift should select all rows in-between. """ from selenium.webdriver.common.action_chains import ActionChains from selenium.webdriver.common.keys import Keys Parent.objects.bulk_create([Parent(name='parent%d' % i) for i in range(5)]) self.admin_login(username='super', password='secret') self.selenium.get(self.live_server_url + reverse('admin:admin_changelist_parent_changelist')) checkboxes = self.selenium.find_elements_by_css_selector('tr input.action-select') self.assertEqual(len(checkboxes), 5) for c in checkboxes: self.assertIs(c.get_property('checked'), False) # Check first row. Hold-shift and check next-to-last row. checkboxes[0].click() ActionChains(self.selenium).key_down(Keys.SHIFT).click(checkboxes[-2]).key_up(Keys.SHIFT).perform() for c in checkboxes[:-2]: self.assertIs(c.get_property('checked'), True) self.assertIs(checkboxes[-1].get_property('checked'), False) def test_select_all_across_pages(self): Parent.objects.bulk_create([Parent(name='parent%d' % i) for i in range(101)]) self.admin_login(username='super', password='secret') self.selenium.get(self.live_server_url + reverse('admin:admin_changelist_parent_changelist')) selection_indicator = self.selenium.find_element_by_css_selector('.action-counter') select_all_indicator = self.selenium.find_element_by_css_selector('.actions .all') question = self.selenium.find_element_by_css_selector('.actions > .question') clear = self.selenium.find_element_by_css_selector('.actions > .clear') select_all = self.selenium.find_element_by_id('action-toggle') select_across = self.selenium.find_element_by_name('select_across') self.assertIs(question.is_displayed(), False) self.assertIs(clear.is_displayed(), False) self.assertIs(select_all.get_property('checked'), False) self.assertEqual(select_across.get_property('value'), '0') self.assertIs(selection_indicator.is_displayed(), True) self.assertEqual(selection_indicator.text, '0 of 100 selected') self.assertIs(select_all_indicator.is_displayed(), False) select_all.click() self.assertIs(question.is_displayed(), True) self.assertIs(clear.is_displayed(), False) self.assertIs(select_all.get_property('checked'), True) self.assertEqual(select_across.get_property('value'), '0') self.assertIs(selection_indicator.is_displayed(), True) self.assertEqual(selection_indicator.text, '100 of 100 selected') self.assertIs(select_all_indicator.is_displayed(), False) question.click() self.assertIs(question.is_displayed(), False) self.assertIs(clear.is_displayed(), True) self.assertIs(select_all.get_property('checked'), True) self.assertEqual(select_across.get_property('value'), '1') self.assertIs(selection_indicator.is_displayed(), False) self.assertIs(select_all_indicator.is_displayed(), True) clear.click() self.assertIs(question.is_displayed(), False) self.assertIs(clear.is_displayed(), False) self.assertIs(select_all.get_property('checked'), False) self.assertEqual(select_across.get_property('value'), '0') self.assertIs(selection_indicator.is_displayed(), True) self.assertEqual(selection_indicator.text, '0 of 100 selected') self.assertIs(select_all_indicator.is_displayed(), False) def test_actions_warn_on_pending_edits(self): Parent.objects.create(name='foo') self.admin_login(username='super', password='secret') self.selenium.get(self.live_server_url + reverse('admin:admin_changelist_parent_changelist')) name_input = self.selenium.find_element_by_id('id_form-0-name') name_input.clear() name_input.send_keys('bar') self.selenium.find_element_by_id('action-toggle').click() self.selenium.find_element_by_name('index').click() # Go alert = self.selenium.switch_to.alert try: self.assertEqual( alert.text, 'You have unsaved changes on individual editable fields. If you ' 'run an action, your unsaved changes will be lost.' ) finally: alert.dismiss() def test_save_with_changes_warns_on_pending_action(self): from selenium.webdriver.support.ui import Select Parent.objects.create(name='parent') self.admin_login(username='super', password='secret') self.selenium.get(self.live_server_url + reverse('admin:admin_changelist_parent_changelist')) name_input = self.selenium.find_element_by_id('id_form-0-name') name_input.clear() name_input.send_keys('other name') Select( self.selenium.find_element_by_name('action') ).select_by_value('delete_selected') self.selenium.find_element_by_name('_save').click() alert = self.selenium.switch_to.alert try: self.assertEqual( alert.text, 'You have selected an action, but you haven’t saved your ' 'changes to individual fields yet. Please click OK to save. ' 'You’ll need to re-run the action.', ) finally: alert.dismiss() def test_save_without_changes_warns_on_pending_action(self): from selenium.webdriver.support.ui import Select Parent.objects.create(name='parent') self.admin_login(username='super', password='secret') self.selenium.get(self.live_server_url + reverse('admin:admin_changelist_parent_changelist')) Select( self.selenium.find_element_by_name('action') ).select_by_value('delete_selected') self.selenium.find_element_by_name('_save').click() alert = self.selenium.switch_to.alert try: self.assertEqual( alert.text, 'You have selected an action, and you haven’t made any ' 'changes on individual fields. You’re probably looking for ' 'the Go button rather than the Save button.', ) finally: alert.dismiss()
5433c79d382a88ab941d275464261656fcbb868cf513b6a044f73b875a4d684e
import importlib import inspect import os import re import sys import tempfile import threading from io import StringIO from pathlib import Path from unittest import mock from django.core import mail from django.core.files.uploadedfile import SimpleUploadedFile from django.db import DatabaseError, connection from django.http import Http404 from django.shortcuts import render from django.template import TemplateDoesNotExist from django.test import RequestFactory, SimpleTestCase, override_settings from django.test.utils import LoggingCaptureMixin from django.urls import path, reverse from django.urls.converters import IntConverter from django.utils.functional import SimpleLazyObject from django.utils.regex_helper import _lazy_re_compile from django.utils.safestring import mark_safe from django.views.debug import ( CallableSettingWrapper, ExceptionCycleWarning, ExceptionReporter, Path as DebugPath, SafeExceptionReporterFilter, default_urlconf, get_default_exception_reporter_filter, technical_404_response, technical_500_response, ) from django.views.decorators.debug import ( sensitive_post_parameters, sensitive_variables, ) from ..views import ( custom_exception_reporter_filter_view, index_page, multivalue_dict_key_error, non_sensitive_view, paranoid_view, sensitive_args_function_caller, sensitive_kwargs_function_caller, sensitive_method_view, sensitive_view, ) class User: def __str__(self): return 'jacob' class WithoutEmptyPathUrls: urlpatterns = [path('url/', index_page, name='url')] class CallableSettingWrapperTests(SimpleTestCase): """ Unittests for CallableSettingWrapper """ def test_repr(self): class WrappedCallable: def __repr__(self): return "repr from the wrapped callable" def __call__(self): pass actual = repr(CallableSettingWrapper(WrappedCallable())) self.assertEqual(actual, "repr from the wrapped callable") @override_settings(DEBUG=True, ROOT_URLCONF='view_tests.urls') class DebugViewTests(SimpleTestCase): def test_files(self): with self.assertLogs('django.request', 'ERROR'): response = self.client.get('/raises/') self.assertEqual(response.status_code, 500) data = { 'file_data.txt': SimpleUploadedFile('file_data.txt', b'haha'), } with self.assertLogs('django.request', 'ERROR'): response = self.client.post('/raises/', data) self.assertContains(response, 'file_data.txt', status_code=500) self.assertNotContains(response, 'haha', status_code=500) def test_400(self): # When DEBUG=True, technical_500_template() is called. with self.assertLogs('django.security', 'WARNING'): response = self.client.get('/raises400/') self.assertContains(response, '<div class="context" id="', status_code=400) def test_400_bad_request(self): # When DEBUG=True, technical_500_template() is called. with self.assertLogs('django.request', 'WARNING') as cm: response = self.client.get('/raises400_bad_request/') self.assertContains(response, '<div class="context" id="', status_code=400) self.assertEqual( cm.records[0].getMessage(), 'Malformed request syntax: /raises400_bad_request/', ) # Ensure no 403.html template exists to test the default case. @override_settings(TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', }]) def test_403(self): response = self.client.get('/raises403/') self.assertContains(response, '<h1>403 Forbidden</h1>', status_code=403) # Set up a test 403.html template. @override_settings(TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'OPTIONS': { 'loaders': [ ('django.template.loaders.locmem.Loader', { '403.html': 'This is a test template for a 403 error ({{ exception }}).', }), ], }, }]) def test_403_template(self): response = self.client.get('/raises403/') self.assertContains(response, 'test template', status_code=403) self.assertContains(response, '(Insufficient Permissions).', status_code=403) def test_404(self): response = self.client.get('/raises404/') self.assertNotContains( response, '<pre class="exception_value">', status_code=404, ) self.assertContains( response, '<p>The current path, <code>not-in-urls</code>, didn’t match any ' 'of these.</p>', status_code=404, html=True, ) def test_404_not_in_urls(self): response = self.client.get('/not-in-urls') self.assertNotContains(response, "Raised by:", status_code=404) self.assertNotContains( response, '<pre class="exception_value">', status_code=404, ) self.assertContains(response, "Django tried these URL patterns", status_code=404) self.assertContains( response, '<p>The current path, <code>not-in-urls</code>, didn’t match any ' 'of these.</p>', status_code=404, html=True, ) # Pattern and view name of a RegexURLPattern appear. self.assertContains(response, r"^regex-post/(?P&lt;pk&gt;[0-9]+)/$", status_code=404) self.assertContains(response, "[name='regex-post']", status_code=404) # Pattern and view name of a RoutePattern appear. self.assertContains(response, r"path-post/&lt;int:pk&gt;/", status_code=404) self.assertContains(response, "[name='path-post']", status_code=404) @override_settings(ROOT_URLCONF=WithoutEmptyPathUrls) def test_404_empty_path_not_in_urls(self): response = self.client.get('/') self.assertContains( response, '<p>The empty path didn’t match any of these.</p>', status_code=404, html=True, ) def test_technical_404(self): response = self.client.get('/technical404/') self.assertContains( response, '<pre class="exception_value">Testing technical 404.</pre>', status_code=404, html=True, ) self.assertContains(response, "Raised by:", status_code=404) self.assertContains(response, "view_tests.views.technical404", status_code=404) self.assertContains( response, '<p>The current path, <code>technical404/</code>, matched the ' 'last one.</p>', status_code=404, html=True, ) def test_classbased_technical_404(self): response = self.client.get('/classbased404/') self.assertContains(response, "Raised by:", status_code=404) self.assertContains(response, "view_tests.views.Http404View", status_code=404) def test_non_l10ned_numeric_ids(self): """ Numeric IDs and fancy traceback context blocks line numbers shouldn't be localized. """ with self.settings(DEBUG=True, USE_L10N=True): with self.assertLogs('django.request', 'ERROR'): response = self.client.get('/raises500/') # We look for a HTML fragment of the form # '<div class="context" id="c38123208">', not '<div class="context" id="c38,123,208"' self.assertContains(response, '<div class="context" id="', status_code=500) match = re.search(b'<div class="context" id="(?P<id>[^"]+)">', response.content) self.assertIsNotNone(match) id_repr = match['id'] self.assertFalse( re.search(b'[^c0-9]', id_repr), "Numeric IDs in debug response HTML page shouldn't be localized (value: %s)." % id_repr.decode() ) def test_template_exceptions(self): with self.assertLogs('django.request', 'ERROR'): try: self.client.get(reverse('template_exception')) except Exception: raising_loc = inspect.trace()[-1][-2][0].strip() self.assertNotEqual( raising_loc.find("raise Exception('boom')"), -1, "Failed to find 'raise Exception' in last frame of " "traceback, instead found: %s" % raising_loc ) def test_template_loader_postmortem(self): """Tests for not existing file""" template_name = "notfound.html" with tempfile.NamedTemporaryFile(prefix=template_name) as tmpfile: tempdir = os.path.dirname(tmpfile.name) template_path = os.path.join(tempdir, template_name) with override_settings(TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [tempdir], }]), self.assertLogs('django.request', 'ERROR'): response = self.client.get(reverse('raises_template_does_not_exist', kwargs={"path": template_name})) self.assertContains(response, "%s (Source does not exist)" % template_path, status_code=500, count=2) # Assert as HTML. self.assertContains( response, '<li><code>django.template.loaders.filesystem.Loader</code>: ' '%s (Source does not exist)</li>' % os.path.join(tempdir, 'notfound.html'), status_code=500, html=True, ) def test_no_template_source_loaders(self): """ Make sure if you don't specify a template, the debug view doesn't blow up. """ with self.assertLogs('django.request', 'ERROR'): with self.assertRaises(TemplateDoesNotExist): self.client.get('/render_no_template/') @override_settings(ROOT_URLCONF='view_tests.default_urls') def test_default_urlconf_template(self): """ Make sure that the default URLconf template is shown instead of the technical 404 page, if the user has not altered their URLconf yet. """ response = self.client.get('/') self.assertContains( response, "<h1>The install worked successfully! Congratulations!</h1>" ) @override_settings(ROOT_URLCONF='view_tests.regression_21530_urls') def test_regression_21530(self): """ Regression test for bug #21530. If the admin app include is replaced with exactly one url pattern, then the technical 404 template should be displayed. The bug here was that an AttributeError caused a 500 response. """ response = self.client.get('/') self.assertContains( response, "Page not found <span>(404)</span>", status_code=404 ) def test_template_encoding(self): """ The templates are loaded directly, not via a template loader, and should be opened as utf-8 charset as is the default specified on template engines. """ with mock.patch.object(DebugPath, 'open') as m: default_urlconf(None) m.assert_called_once_with(encoding='utf-8') m.reset_mock() technical_404_response(mock.MagicMock(), mock.Mock()) m.assert_called_once_with(encoding='utf-8') def test_technical_404_converter_raise_404(self): with mock.patch.object(IntConverter, 'to_python', side_effect=Http404): response = self.client.get('/path-post/1/') self.assertContains(response, 'Page not found', status_code=404) def test_exception_reporter_from_request(self): with self.assertLogs('django.request', 'ERROR'): response = self.client.get('/custom_reporter_class_view/') self.assertContains(response, 'custom traceback text', status_code=500) @override_settings(DEFAULT_EXCEPTION_REPORTER='view_tests.views.CustomExceptionReporter') def test_exception_reporter_from_settings(self): with self.assertLogs('django.request', 'ERROR'): response = self.client.get('/raises500/') self.assertContains(response, 'custom traceback text', status_code=500) @override_settings(DEFAULT_EXCEPTION_REPORTER='view_tests.views.TemplateOverrideExceptionReporter') def test_template_override_exception_reporter(self): with self.assertLogs('django.request', 'ERROR'): response = self.client.get('/raises500/') self.assertContains( response, '<h1>Oh no, an error occurred!</h1>', status_code=500, html=True, ) with self.assertLogs('django.request', 'ERROR'): response = self.client.get('/raises500/', HTTP_ACCEPT='text/plain') self.assertContains(response, 'Oh dear, an error occurred!', status_code=500) class DebugViewQueriesAllowedTests(SimpleTestCase): # May need a query to initialize MySQL connection databases = {'default'} def test_handle_db_exception(self): """ Ensure the debug view works when a database exception is raised by performing an invalid query and passing the exception to the debug view. """ with connection.cursor() as cursor: try: cursor.execute('INVALID SQL') except DatabaseError: exc_info = sys.exc_info() rf = RequestFactory() response = technical_500_response(rf.get('/'), *exc_info) self.assertContains(response, 'OperationalError at /', status_code=500) @override_settings( DEBUG=True, ROOT_URLCONF='view_tests.urls', # No template directories are configured, so no templates will be found. TEMPLATES=[{ 'BACKEND': 'django.template.backends.dummy.TemplateStrings', }], ) class NonDjangoTemplatesDebugViewTests(SimpleTestCase): def test_400(self): # When DEBUG=True, technical_500_template() is called. with self.assertLogs('django.security', 'WARNING'): response = self.client.get('/raises400/') self.assertContains(response, '<div class="context" id="', status_code=400) def test_400_bad_request(self): # When DEBUG=True, technical_500_template() is called. with self.assertLogs('django.request', 'WARNING') as cm: response = self.client.get('/raises400_bad_request/') self.assertContains(response, '<div class="context" id="', status_code=400) self.assertEqual( cm.records[0].getMessage(), 'Malformed request syntax: /raises400_bad_request/', ) def test_403(self): response = self.client.get('/raises403/') self.assertContains(response, '<h1>403 Forbidden</h1>', status_code=403) def test_404(self): response = self.client.get('/raises404/') self.assertEqual(response.status_code, 404) def test_template_not_found_error(self): # Raises a TemplateDoesNotExist exception and shows the debug view. url = reverse('raises_template_does_not_exist', kwargs={"path": "notfound.html"}) with self.assertLogs('django.request', 'ERROR'): response = self.client.get(url) self.assertContains(response, '<div class="context" id="', status_code=500) class ExceptionReporterTests(SimpleTestCase): rf = RequestFactory() def test_request_and_exception(self): "A simple exception report can be generated" try: request = self.rf.get('/test_view/') request.user = User() raise ValueError("Can't find my keys") except ValueError: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(request, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertInHTML('<h1>ValueError at /test_view/</h1>', html) self.assertIn('<pre class="exception_value">Can&#x27;t find my keys</pre>', html) self.assertIn('<th>Request Method:</th>', html) self.assertIn('<th>Request URL:</th>', html) self.assertIn('<h3 id="user-info">USER</h3>', html) self.assertIn('<p>jacob</p>', html) self.assertIn('<th>Exception Type:</th>', html) self.assertIn('<th>Exception Value:</th>', html) self.assertIn('<h2>Traceback ', html) self.assertIn('<h2>Request information</h2>', html) self.assertNotIn('<p>Request data not supplied</p>', html) self.assertIn('<p>No POST data</p>', html) def test_no_request(self): "An exception report can be generated without request" try: raise ValueError("Can't find my keys") except ValueError: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertInHTML('<h1>ValueError</h1>', html) self.assertIn('<pre class="exception_value">Can&#x27;t find my keys</pre>', html) self.assertNotIn('<th>Request Method:</th>', html) self.assertNotIn('<th>Request URL:</th>', html) self.assertNotIn('<h3 id="user-info">USER</h3>', html) self.assertIn('<th>Exception Type:</th>', html) self.assertIn('<th>Exception Value:</th>', html) self.assertIn('<h2>Traceback ', html) self.assertIn('<h2>Request information</h2>', html) self.assertIn('<p>Request data not supplied</p>', html) def test_sharing_traceback(self): try: raise ValueError('Oops') except ValueError: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertIn( '<form action="https://dpaste.com/" name="pasteform" ' 'id="pasteform" method="post">', html, ) def test_eol_support(self): """The ExceptionReporter supports Unix, Windows and Macintosh EOL markers""" LINES = ['print %d' % i for i in range(1, 6)] reporter = ExceptionReporter(None, None, None, None) for newline in ['\n', '\r\n', '\r']: fd, filename = tempfile.mkstemp(text=False) os.write(fd, (newline.join(LINES) + newline).encode()) os.close(fd) try: self.assertEqual( reporter._get_lines_from_file(filename, 3, 2), (1, LINES[1:3], LINES[3], LINES[4:]) ) finally: os.unlink(filename) def test_no_exception(self): "An exception report can be generated for just a request" request = self.rf.get('/test_view/') reporter = ExceptionReporter(request, None, None, None) html = reporter.get_traceback_html() self.assertInHTML('<h1>Report at /test_view/</h1>', html) self.assertIn('<pre class="exception_value">No exception message supplied</pre>', html) self.assertIn('<th>Request Method:</th>', html) self.assertIn('<th>Request URL:</th>', html) self.assertNotIn('<th>Exception Type:</th>', html) self.assertNotIn('<th>Exception Value:</th>', html) self.assertNotIn('<h2>Traceback ', html) self.assertIn('<h2>Request information</h2>', html) self.assertNotIn('<p>Request data not supplied</p>', html) def test_suppressed_context(self): try: try: raise RuntimeError("Can't find my keys") except RuntimeError: raise ValueError("Can't find my keys") from None except ValueError: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertInHTML('<h1>ValueError</h1>', html) self.assertIn('<pre class="exception_value">Can&#x27;t find my keys</pre>', html) self.assertIn('<th>Exception Type:</th>', html) self.assertIn('<th>Exception Value:</th>', html) self.assertIn('<h2>Traceback ', html) self.assertIn('<h2>Request information</h2>', html) self.assertIn('<p>Request data not supplied</p>', html) self.assertNotIn('During handling of the above exception', html) def test_innermost_exception_without_traceback(self): try: try: raise RuntimeError('Oops') except Exception as exc: new_exc = RuntimeError('My context') exc.__context__ = new_exc raise except Exception: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) frames = reporter.get_traceback_frames() self.assertEqual(len(frames), 2) html = reporter.get_traceback_html() self.assertInHTML('<h1>RuntimeError</h1>', html) self.assertIn('<pre class="exception_value">Oops</pre>', html) self.assertIn('<th>Exception Type:</th>', html) self.assertIn('<th>Exception Value:</th>', html) self.assertIn('<h2>Traceback ', html) self.assertIn('<h2>Request information</h2>', html) self.assertIn('<p>Request data not supplied</p>', html) self.assertIn( 'During handling of the above exception (My context), another ' 'exception occurred', html, ) self.assertInHTML('<li class="frame user">None</li>', html) self.assertIn('Traceback (most recent call last):\n None', html) text = reporter.get_traceback_text() self.assertIn('Exception Type: RuntimeError', text) self.assertIn('Exception Value: Oops', text) self.assertIn('Traceback (most recent call last):\n None', text) self.assertIn( 'During handling of the above exception (My context), another ' 'exception occurred', text, ) def test_mid_stack_exception_without_traceback(self): try: try: raise RuntimeError('Inner Oops') except Exception as exc: new_exc = RuntimeError('My context') new_exc.__context__ = exc raise RuntimeError('Oops') from new_exc except Exception: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertInHTML('<h1>RuntimeError</h1>', html) self.assertIn('<pre class="exception_value">Oops</pre>', html) self.assertIn('<th>Exception Type:</th>', html) self.assertIn('<th>Exception Value:</th>', html) self.assertIn('<h2>Traceback ', html) self.assertInHTML('<li class="frame user">Traceback: None</li>', html) self.assertIn( 'During handling of the above exception (Inner Oops), another ' 'exception occurred:\n Traceback: None', html, ) text = reporter.get_traceback_text() self.assertIn('Exception Type: RuntimeError', text) self.assertIn('Exception Value: Oops', text) self.assertIn('Traceback (most recent call last):', text) self.assertIn( 'During handling of the above exception (Inner Oops), another ' 'exception occurred:\n Traceback: None', text, ) def test_reporting_of_nested_exceptions(self): request = self.rf.get('/test_view/') try: try: raise AttributeError(mark_safe('<p>Top level</p>')) except AttributeError as explicit: try: raise ValueError(mark_safe('<p>Second exception</p>')) from explicit except ValueError: raise IndexError(mark_safe('<p>Final exception</p>')) except Exception: # Custom exception handler, just pass it into ExceptionReporter exc_type, exc_value, tb = sys.exc_info() explicit_exc = 'The above exception ({0}) was the direct cause of the following exception:' implicit_exc = 'During handling of the above exception ({0}), another exception occurred:' reporter = ExceptionReporter(request, exc_type, exc_value, tb) html = reporter.get_traceback_html() # Both messages are twice on page -- one rendered as html, # one as plain text (for pastebin) self.assertEqual(2, html.count(explicit_exc.format('&lt;p&gt;Top level&lt;/p&gt;'))) self.assertEqual(2, html.count(implicit_exc.format('&lt;p&gt;Second exception&lt;/p&gt;'))) self.assertEqual(10, html.count('&lt;p&gt;Final exception&lt;/p&gt;')) text = reporter.get_traceback_text() self.assertIn(explicit_exc.format('<p>Top level</p>'), text) self.assertIn(implicit_exc.format('<p>Second exception</p>'), text) self.assertEqual(3, text.count('<p>Final exception</p>')) def test_reporting_frames_without_source(self): try: source = "def funcName():\n raise Error('Whoops')\nfuncName()" namespace = {} code = compile(source, 'generated', 'exec') exec(code, namespace) except Exception: exc_type, exc_value, tb = sys.exc_info() request = self.rf.get('/test_view/') reporter = ExceptionReporter(request, exc_type, exc_value, tb) frames = reporter.get_traceback_frames() last_frame = frames[-1] self.assertEqual(last_frame['context_line'], '<source code not available>') self.assertEqual(last_frame['filename'], 'generated') self.assertEqual(last_frame['function'], 'funcName') self.assertEqual(last_frame['lineno'], 2) html = reporter.get_traceback_html() self.assertIn( '<span class="fname">generated</span>, line 2, in funcName', html, ) self.assertIn( '<code class="fname">generated</code>, line 2, in funcName', html, ) self.assertIn( '"generated", line 2, in funcName\n' ' &lt;source code not available&gt;', html, ) text = reporter.get_traceback_text() self.assertIn( '"generated", line 2, in funcName\n' ' <source code not available>', text, ) def test_reporting_frames_source_not_match(self): try: source = "def funcName():\n raise Error('Whoops')\nfuncName()" namespace = {} code = compile(source, 'generated', 'exec') exec(code, namespace) except Exception: exc_type, exc_value, tb = sys.exc_info() with mock.patch( 'django.views.debug.ExceptionReporter._get_source', return_value=['wrong source'], ): request = self.rf.get('/test_view/') reporter = ExceptionReporter(request, exc_type, exc_value, tb) frames = reporter.get_traceback_frames() last_frame = frames[-1] self.assertEqual(last_frame['context_line'], '<source code not available>') self.assertEqual(last_frame['filename'], 'generated') self.assertEqual(last_frame['function'], 'funcName') self.assertEqual(last_frame['lineno'], 2) html = reporter.get_traceback_html() self.assertIn( '<span class="fname">generated</span>, line 2, in funcName', html, ) self.assertIn( '<code class="fname">generated</code>, line 2, in funcName', html, ) self.assertIn( '"generated", line 2, in funcName\n' ' &lt;source code not available&gt;', html, ) text = reporter.get_traceback_text() self.assertIn( '"generated", line 2, in funcName\n' ' <source code not available>', text, ) def test_reporting_frames_for_cyclic_reference(self): try: def test_func(): try: raise RuntimeError('outer') from RuntimeError('inner') except RuntimeError as exc: raise exc.__cause__ test_func() except Exception: exc_type, exc_value, tb = sys.exc_info() request = self.rf.get('/test_view/') reporter = ExceptionReporter(request, exc_type, exc_value, tb) def generate_traceback_frames(*args, **kwargs): nonlocal tb_frames tb_frames = reporter.get_traceback_frames() tb_frames = None tb_generator = threading.Thread(target=generate_traceback_frames, daemon=True) msg = ( "Cycle in the exception chain detected: exception 'inner' " "encountered again." ) with self.assertWarnsMessage(ExceptionCycleWarning, msg): tb_generator.start() tb_generator.join(timeout=5) if tb_generator.is_alive(): # tb_generator is a daemon that runs until the main thread/process # exits. This is resource heavy when running the full test suite. # Setting the following values to None makes # reporter.get_traceback_frames() exit early. exc_value.__traceback__ = exc_value.__context__ = exc_value.__cause__ = None tb_generator.join() self.fail('Cyclic reference in Exception Reporter.get_traceback_frames()') if tb_frames is None: # can happen if the thread generating traceback got killed # or exception while generating the traceback self.fail('Traceback generation failed') last_frame = tb_frames[-1] self.assertIn('raise exc.__cause__', last_frame['context_line']) self.assertEqual(last_frame['filename'], __file__) self.assertEqual(last_frame['function'], 'test_func') def test_request_and_message(self): "A message can be provided in addition to a request" request = self.rf.get('/test_view/') reporter = ExceptionReporter(request, None, "I'm a little teapot", None) html = reporter.get_traceback_html() self.assertInHTML('<h1>Report at /test_view/</h1>', html) self.assertIn('<pre class="exception_value">I&#x27;m a little teapot</pre>', html) self.assertIn('<th>Request Method:</th>', html) self.assertIn('<th>Request URL:</th>', html) self.assertNotIn('<th>Exception Type:</th>', html) self.assertNotIn('<th>Exception Value:</th>', html) self.assertIn('<h2>Traceback ', html) self.assertIn('<h2>Request information</h2>', html) self.assertNotIn('<p>Request data not supplied</p>', html) def test_message_only(self): reporter = ExceptionReporter(None, None, "I'm a little teapot", None) html = reporter.get_traceback_html() self.assertInHTML('<h1>Report</h1>', html) self.assertIn('<pre class="exception_value">I&#x27;m a little teapot</pre>', html) self.assertNotIn('<th>Request Method:</th>', html) self.assertNotIn('<th>Request URL:</th>', html) self.assertNotIn('<th>Exception Type:</th>', html) self.assertNotIn('<th>Exception Value:</th>', html) self.assertIn('<h2>Traceback ', html) self.assertIn('<h2>Request information</h2>', html) self.assertIn('<p>Request data not supplied</p>', html) def test_non_utf8_values_handling(self): "Non-UTF-8 exceptions/values should not make the output generation choke." try: class NonUtf8Output(Exception): def __repr__(self): return b'EXC\xe9EXC' somevar = b'VAL\xe9VAL' # NOQA raise NonUtf8Output() except Exception: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertIn('VAL\\xe9VAL', html) self.assertIn('EXC\\xe9EXC', html) def test_local_variable_escaping(self): """Safe strings in local variables are escaped.""" try: local = mark_safe('<p>Local variable</p>') raise ValueError(local) except Exception: exc_type, exc_value, tb = sys.exc_info() html = ExceptionReporter(None, exc_type, exc_value, tb).get_traceback_html() self.assertIn('<td class="code"><pre>&#x27;&lt;p&gt;Local variable&lt;/p&gt;&#x27;</pre></td>', html) def test_unprintable_values_handling(self): "Unprintable values should not make the output generation choke." try: class OomOutput: def __repr__(self): raise MemoryError('OOM') oomvalue = OomOutput() # NOQA raise ValueError() except Exception: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertIn('<td class="code"><pre>Error in formatting', html) def test_too_large_values_handling(self): "Large values should not create a large HTML." large = 256 * 1024 repr_of_str_adds = len(repr('')) try: class LargeOutput: def __repr__(self): return repr('A' * large) largevalue = LargeOutput() # NOQA raise ValueError() except Exception: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertEqual(len(html) // 1024 // 128, 0) # still fit in 128Kb self.assertIn('&lt;trimmed %d bytes string&gt;' % (large + repr_of_str_adds,), html) def test_encoding_error(self): """ A UnicodeError displays a portion of the problematic string. HTML in safe strings is escaped. """ try: mark_safe('abcdefghijkl<p>mnὀp</p>qrstuwxyz').encode('ascii') except Exception: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertIn('<h2>Unicode error hint</h2>', html) self.assertIn('The string that could not be encoded/decoded was: ', html) self.assertIn('<strong>&lt;p&gt;mnὀp&lt;/p&gt;</strong>', html) def test_unfrozen_importlib(self): """ importlib is not a frozen app, but its loader thinks it's frozen which results in an ImportError. Refs #21443. """ try: request = self.rf.get('/test_view/') importlib.import_module('abc.def.invalid.name') except Exception: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(request, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertInHTML('<h1>ModuleNotFoundError at /test_view/</h1>', html) def test_ignore_traceback_evaluation_exceptions(self): """ Don't trip over exceptions generated by crafted objects when evaluating them while cleansing (#24455). """ class BrokenEvaluation(Exception): pass def broken_setup(): raise BrokenEvaluation request = self.rf.get('/test_view/') broken_lazy = SimpleLazyObject(broken_setup) try: bool(broken_lazy) except BrokenEvaluation: exc_type, exc_value, tb = sys.exc_info() self.assertIn( "BrokenEvaluation", ExceptionReporter(request, exc_type, exc_value, tb).get_traceback_html(), "Evaluation exception reason not mentioned in traceback" ) @override_settings(ALLOWED_HOSTS='example.com') def test_disallowed_host(self): "An exception report can be generated even for a disallowed host." request = self.rf.get('/', HTTP_HOST='evil.com') reporter = ExceptionReporter(request, None, None, None) html = reporter.get_traceback_html() self.assertIn("http://evil.com/", html) def test_request_with_items_key(self): """ An exception report can be generated for requests with 'items' in request GET, POST, FILES, or COOKIES QueryDicts. """ value = '<td>items</td><td class="code"><pre>&#x27;Oops&#x27;</pre></td>' # GET request = self.rf.get('/test_view/?items=Oops') reporter = ExceptionReporter(request, None, None, None) html = reporter.get_traceback_html() self.assertInHTML(value, html) # POST request = self.rf.post('/test_view/', data={'items': 'Oops'}) reporter = ExceptionReporter(request, None, None, None) html = reporter.get_traceback_html() self.assertInHTML(value, html) # FILES fp = StringIO('filecontent') request = self.rf.post('/test_view/', data={'name': 'filename', 'items': fp}) reporter = ExceptionReporter(request, None, None, None) html = reporter.get_traceback_html() self.assertInHTML( '<td>items</td><td class="code"><pre>&lt;InMemoryUploadedFile: ' 'items (application/octet-stream)&gt;</pre></td>', html ) # COOKIES rf = RequestFactory() rf.cookies['items'] = 'Oops' request = rf.get('/test_view/') reporter = ExceptionReporter(request, None, None, None) html = reporter.get_traceback_html() self.assertInHTML('<td>items</td><td class="code"><pre>&#x27;Oops&#x27;</pre></td>', html) def test_exception_fetching_user(self): """ The error page can be rendered if the current user can't be retrieved (such as when the database is unavailable). """ class ExceptionUser: def __str__(self): raise Exception() request = self.rf.get('/test_view/') request.user = ExceptionUser() try: raise ValueError('Oops') except ValueError: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(request, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertInHTML('<h1>ValueError at /test_view/</h1>', html) self.assertIn('<pre class="exception_value">Oops</pre>', html) self.assertIn('<h3 id="user-info">USER</h3>', html) self.assertIn('<p>[unable to retrieve the current user]</p>', html) text = reporter.get_traceback_text() self.assertIn('USER: [unable to retrieve the current user]', text) def test_template_encoding(self): """ The templates are loaded directly, not via a template loader, and should be opened as utf-8 charset as is the default specified on template engines. """ reporter = ExceptionReporter(None, None, None, None) with mock.patch.object(DebugPath, 'open') as m: reporter.get_traceback_html() m.assert_called_once_with(encoding='utf-8') m.reset_mock() reporter.get_traceback_text() m.assert_called_once_with(encoding='utf-8') @override_settings(ALLOWED_HOSTS=['example.com']) def test_get_raw_insecure_uri(self): factory = RequestFactory(HTTP_HOST='evil.com') tests = [ ('////absolute-uri', 'http://evil.com//absolute-uri'), ('/?foo=bar', 'http://evil.com/?foo=bar'), ('/path/with:colons', 'http://evil.com/path/with:colons'), ] for url, expected in tests: with self.subTest(url=url): request = factory.get(url) reporter = ExceptionReporter(request, None, None, None) self.assertEqual(reporter._get_raw_insecure_uri(), expected) class PlainTextReportTests(SimpleTestCase): rf = RequestFactory() def test_request_and_exception(self): "A simple exception report can be generated" try: request = self.rf.get('/test_view/') request.user = User() raise ValueError("Can't find my keys") except ValueError: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(request, exc_type, exc_value, tb) text = reporter.get_traceback_text() self.assertIn('ValueError at /test_view/', text) self.assertIn("Can't find my keys", text) self.assertIn('Request Method:', text) self.assertIn('Request URL:', text) self.assertIn('USER: jacob', text) self.assertIn('Exception Type:', text) self.assertIn('Exception Value:', text) self.assertIn('Traceback (most recent call last):', text) self.assertIn('Request information:', text) self.assertNotIn('Request data not supplied', text) def test_no_request(self): "An exception report can be generated without request" try: raise ValueError("Can't find my keys") except ValueError: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) text = reporter.get_traceback_text() self.assertIn('ValueError', text) self.assertIn("Can't find my keys", text) self.assertNotIn('Request Method:', text) self.assertNotIn('Request URL:', text) self.assertNotIn('USER:', text) self.assertIn('Exception Type:', text) self.assertIn('Exception Value:', text) self.assertIn('Traceback (most recent call last):', text) self.assertIn('Request data not supplied', text) def test_no_exception(self): "An exception report can be generated for just a request" request = self.rf.get('/test_view/') reporter = ExceptionReporter(request, None, None, None) reporter.get_traceback_text() def test_request_and_message(self): "A message can be provided in addition to a request" request = self.rf.get('/test_view/') reporter = ExceptionReporter(request, None, "I'm a little teapot", None) reporter.get_traceback_text() @override_settings(DEBUG=True) def test_template_exception(self): request = self.rf.get('/test_view/') try: render(request, 'debug/template_error.html') except Exception: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(request, exc_type, exc_value, tb) text = reporter.get_traceback_text() templ_path = Path(Path(__file__).parents[1], 'templates', 'debug', 'template_error.html') self.assertIn( 'Template error:\n' 'In template %(path)s, error at line 2\n' ' \'cycle\' tag requires at least two arguments\n' ' 1 : Template with error:\n' ' 2 : {%% cycle %%} \n' ' 3 : ' % {'path': templ_path}, text ) def test_request_with_items_key(self): """ An exception report can be generated for requests with 'items' in request GET, POST, FILES, or COOKIES QueryDicts. """ # GET request = self.rf.get('/test_view/?items=Oops') reporter = ExceptionReporter(request, None, None, None) text = reporter.get_traceback_text() self.assertIn("items = 'Oops'", text) # POST request = self.rf.post('/test_view/', data={'items': 'Oops'}) reporter = ExceptionReporter(request, None, None, None) text = reporter.get_traceback_text() self.assertIn("items = 'Oops'", text) # FILES fp = StringIO('filecontent') request = self.rf.post('/test_view/', data={'name': 'filename', 'items': fp}) reporter = ExceptionReporter(request, None, None, None) text = reporter.get_traceback_text() self.assertIn('items = <InMemoryUploadedFile:', text) # COOKIES rf = RequestFactory() rf.cookies['items'] = 'Oops' request = rf.get('/test_view/') reporter = ExceptionReporter(request, None, None, None) text = reporter.get_traceback_text() self.assertIn("items = 'Oops'", text) def test_message_only(self): reporter = ExceptionReporter(None, None, "I'm a little teapot", None) reporter.get_traceback_text() @override_settings(ALLOWED_HOSTS='example.com') def test_disallowed_host(self): "An exception report can be generated even for a disallowed host." request = self.rf.get('/', HTTP_HOST='evil.com') reporter = ExceptionReporter(request, None, None, None) text = reporter.get_traceback_text() self.assertIn("http://evil.com/", text) class ExceptionReportTestMixin: # Mixin used in the ExceptionReporterFilterTests and # AjaxResponseExceptionReporterFilter tests below breakfast_data = { 'sausage-key': 'sausage-value', 'baked-beans-key': 'baked-beans-value', 'hash-brown-key': 'hash-brown-value', 'bacon-key': 'bacon-value', } def verify_unsafe_response(self, view, check_for_vars=True, check_for_POST_params=True): """ Asserts that potentially sensitive info are displayed in the response. """ request = self.rf.post('/some_url/', self.breakfast_data) response = view(request) if check_for_vars: # All variables are shown. self.assertContains(response, 'cooked_eggs', status_code=500) self.assertContains(response, 'scrambled', status_code=500) self.assertContains(response, 'sauce', status_code=500) self.assertContains(response, 'worcestershire', status_code=500) if check_for_POST_params: for k, v in self.breakfast_data.items(): # All POST parameters are shown. self.assertContains(response, k, status_code=500) self.assertContains(response, v, status_code=500) def verify_safe_response(self, view, check_for_vars=True, check_for_POST_params=True): """ Asserts that certain sensitive info are not displayed in the response. """ request = self.rf.post('/some_url/', self.breakfast_data) response = view(request) if check_for_vars: # Non-sensitive variable's name and value are shown. self.assertContains(response, 'cooked_eggs', status_code=500) self.assertContains(response, 'scrambled', status_code=500) # Sensitive variable's name is shown but not its value. self.assertContains(response, 'sauce', status_code=500) self.assertNotContains(response, 'worcestershire', status_code=500) if check_for_POST_params: for k in self.breakfast_data: # All POST parameters' names are shown. self.assertContains(response, k, status_code=500) # Non-sensitive POST parameters' values are shown. self.assertContains(response, 'baked-beans-value', status_code=500) self.assertContains(response, 'hash-brown-value', status_code=500) # Sensitive POST parameters' values are not shown. self.assertNotContains(response, 'sausage-value', status_code=500) self.assertNotContains(response, 'bacon-value', status_code=500) def verify_paranoid_response(self, view, check_for_vars=True, check_for_POST_params=True): """ Asserts that no variables or POST parameters are displayed in the response. """ request = self.rf.post('/some_url/', self.breakfast_data) response = view(request) if check_for_vars: # Show variable names but not their values. self.assertContains(response, 'cooked_eggs', status_code=500) self.assertNotContains(response, 'scrambled', status_code=500) self.assertContains(response, 'sauce', status_code=500) self.assertNotContains(response, 'worcestershire', status_code=500) if check_for_POST_params: for k, v in self.breakfast_data.items(): # All POST parameters' names are shown. self.assertContains(response, k, status_code=500) # No POST parameters' values are shown. self.assertNotContains(response, v, status_code=500) def verify_unsafe_email(self, view, check_for_POST_params=True): """ Asserts that potentially sensitive info are displayed in the email report. """ with self.settings(ADMINS=[('Admin', '[email protected]')]): mail.outbox = [] # Empty outbox request = self.rf.post('/some_url/', self.breakfast_data) view(request) self.assertEqual(len(mail.outbox), 1) email = mail.outbox[0] # Frames vars are never shown in plain text email reports. body_plain = str(email.body) self.assertNotIn('cooked_eggs', body_plain) self.assertNotIn('scrambled', body_plain) self.assertNotIn('sauce', body_plain) self.assertNotIn('worcestershire', body_plain) # Frames vars are shown in html email reports. body_html = str(email.alternatives[0][0]) self.assertIn('cooked_eggs', body_html) self.assertIn('scrambled', body_html) self.assertIn('sauce', body_html) self.assertIn('worcestershire', body_html) if check_for_POST_params: for k, v in self.breakfast_data.items(): # All POST parameters are shown. self.assertIn(k, body_plain) self.assertIn(v, body_plain) self.assertIn(k, body_html) self.assertIn(v, body_html) def verify_safe_email(self, view, check_for_POST_params=True): """ Asserts that certain sensitive info are not displayed in the email report. """ with self.settings(ADMINS=[('Admin', '[email protected]')]): mail.outbox = [] # Empty outbox request = self.rf.post('/some_url/', self.breakfast_data) view(request) self.assertEqual(len(mail.outbox), 1) email = mail.outbox[0] # Frames vars are never shown in plain text email reports. body_plain = str(email.body) self.assertNotIn('cooked_eggs', body_plain) self.assertNotIn('scrambled', body_plain) self.assertNotIn('sauce', body_plain) self.assertNotIn('worcestershire', body_plain) # Frames vars are shown in html email reports. body_html = str(email.alternatives[0][0]) self.assertIn('cooked_eggs', body_html) self.assertIn('scrambled', body_html) self.assertIn('sauce', body_html) self.assertNotIn('worcestershire', body_html) if check_for_POST_params: for k in self.breakfast_data: # All POST parameters' names are shown. self.assertIn(k, body_plain) # Non-sensitive POST parameters' values are shown. self.assertIn('baked-beans-value', body_plain) self.assertIn('hash-brown-value', body_plain) self.assertIn('baked-beans-value', body_html) self.assertIn('hash-brown-value', body_html) # Sensitive POST parameters' values are not shown. self.assertNotIn('sausage-value', body_plain) self.assertNotIn('bacon-value', body_plain) self.assertNotIn('sausage-value', body_html) self.assertNotIn('bacon-value', body_html) def verify_paranoid_email(self, view): """ Asserts that no variables or POST parameters are displayed in the email report. """ with self.settings(ADMINS=[('Admin', '[email protected]')]): mail.outbox = [] # Empty outbox request = self.rf.post('/some_url/', self.breakfast_data) view(request) self.assertEqual(len(mail.outbox), 1) email = mail.outbox[0] # Frames vars are never shown in plain text email reports. body = str(email.body) self.assertNotIn('cooked_eggs', body) self.assertNotIn('scrambled', body) self.assertNotIn('sauce', body) self.assertNotIn('worcestershire', body) for k, v in self.breakfast_data.items(): # All POST parameters' names are shown. self.assertIn(k, body) # No POST parameters' values are shown. self.assertNotIn(v, body) @override_settings(ROOT_URLCONF='view_tests.urls') class ExceptionReporterFilterTests(ExceptionReportTestMixin, LoggingCaptureMixin, SimpleTestCase): """ Sensitive information can be filtered out of error reports (#14614). """ rf = RequestFactory() def test_non_sensitive_request(self): """ Everything (request info and frame variables) can bee seen in the default error reports for non-sensitive requests. """ with self.settings(DEBUG=True): self.verify_unsafe_response(non_sensitive_view) self.verify_unsafe_email(non_sensitive_view) with self.settings(DEBUG=False): self.verify_unsafe_response(non_sensitive_view) self.verify_unsafe_email(non_sensitive_view) def test_sensitive_request(self): """ Sensitive POST parameters and frame variables cannot be seen in the default error reports for sensitive requests. """ with self.settings(DEBUG=True): self.verify_unsafe_response(sensitive_view) self.verify_unsafe_email(sensitive_view) with self.settings(DEBUG=False): self.verify_safe_response(sensitive_view) self.verify_safe_email(sensitive_view) def test_paranoid_request(self): """ No POST parameters and frame variables can be seen in the default error reports for "paranoid" requests. """ with self.settings(DEBUG=True): self.verify_unsafe_response(paranoid_view) self.verify_unsafe_email(paranoid_view) with self.settings(DEBUG=False): self.verify_paranoid_response(paranoid_view) self.verify_paranoid_email(paranoid_view) def test_multivalue_dict_key_error(self): """ #21098 -- Sensitive POST parameters cannot be seen in the error reports for if request.POST['nonexistent_key'] throws an error. """ with self.settings(DEBUG=True): self.verify_unsafe_response(multivalue_dict_key_error) self.verify_unsafe_email(multivalue_dict_key_error) with self.settings(DEBUG=False): self.verify_safe_response(multivalue_dict_key_error) self.verify_safe_email(multivalue_dict_key_error) def test_custom_exception_reporter_filter(self): """ It's possible to assign an exception reporter filter to the request to bypass the one set in DEFAULT_EXCEPTION_REPORTER_FILTER. """ with self.settings(DEBUG=True): self.verify_unsafe_response(custom_exception_reporter_filter_view) self.verify_unsafe_email(custom_exception_reporter_filter_view) with self.settings(DEBUG=False): self.verify_unsafe_response(custom_exception_reporter_filter_view) self.verify_unsafe_email(custom_exception_reporter_filter_view) def test_sensitive_method(self): """ The sensitive_variables decorator works with object methods. """ with self.settings(DEBUG=True): self.verify_unsafe_response(sensitive_method_view, check_for_POST_params=False) self.verify_unsafe_email(sensitive_method_view, check_for_POST_params=False) with self.settings(DEBUG=False): self.verify_safe_response(sensitive_method_view, check_for_POST_params=False) self.verify_safe_email(sensitive_method_view, check_for_POST_params=False) def test_sensitive_function_arguments(self): """ Sensitive variables don't leak in the sensitive_variables decorator's frame, when those variables are passed as arguments to the decorated function. """ with self.settings(DEBUG=True): self.verify_unsafe_response(sensitive_args_function_caller) self.verify_unsafe_email(sensitive_args_function_caller) with self.settings(DEBUG=False): self.verify_safe_response(sensitive_args_function_caller, check_for_POST_params=False) self.verify_safe_email(sensitive_args_function_caller, check_for_POST_params=False) def test_sensitive_function_keyword_arguments(self): """ Sensitive variables don't leak in the sensitive_variables decorator's frame, when those variables are passed as keyword arguments to the decorated function. """ with self.settings(DEBUG=True): self.verify_unsafe_response(sensitive_kwargs_function_caller) self.verify_unsafe_email(sensitive_kwargs_function_caller) with self.settings(DEBUG=False): self.verify_safe_response(sensitive_kwargs_function_caller, check_for_POST_params=False) self.verify_safe_email(sensitive_kwargs_function_caller, check_for_POST_params=False) def test_callable_settings(self): """ Callable settings should not be evaluated in the debug page (#21345). """ def callable_setting(): return "This should not be displayed" with self.settings(DEBUG=True, FOOBAR=callable_setting): response = self.client.get('/raises500/') self.assertNotContains(response, "This should not be displayed", status_code=500) def test_callable_settings_forbidding_to_set_attributes(self): """ Callable settings which forbid to set attributes should not break the debug page (#23070). """ class CallableSettingWithSlots: __slots__ = [] def __call__(self): return "This should not be displayed" with self.settings(DEBUG=True, WITH_SLOTS=CallableSettingWithSlots()): response = self.client.get('/raises500/') self.assertNotContains(response, "This should not be displayed", status_code=500) def test_dict_setting_with_non_str_key(self): """ A dict setting containing a non-string key should not break the debug page (#12744). """ with self.settings(DEBUG=True, FOOBAR={42: None}): response = self.client.get('/raises500/') self.assertContains(response, 'FOOBAR', status_code=500) def test_sensitive_settings(self): """ The debug page should not show some sensitive settings (password, secret key, ...). """ sensitive_settings = [ 'SECRET_KEY', 'PASSWORD', 'API_KEY', 'AUTH_TOKEN', ] for setting in sensitive_settings: with self.settings(DEBUG=True, **{setting: "should not be displayed"}): response = self.client.get('/raises500/') self.assertNotContains(response, 'should not be displayed', status_code=500) def test_settings_with_sensitive_keys(self): """ The debug page should filter out some sensitive information found in dict settings. """ sensitive_settings = [ 'SECRET_KEY', 'PASSWORD', 'API_KEY', 'AUTH_TOKEN', ] for setting in sensitive_settings: FOOBAR = { setting: "should not be displayed", 'recursive': {setting: "should not be displayed"}, } with self.settings(DEBUG=True, FOOBAR=FOOBAR): response = self.client.get('/raises500/') self.assertNotContains(response, 'should not be displayed', status_code=500) def test_cleanse_setting_basic(self): reporter_filter = SafeExceptionReporterFilter() self.assertEqual(reporter_filter.cleanse_setting('TEST', 'TEST'), 'TEST') self.assertEqual( reporter_filter.cleanse_setting('PASSWORD', 'super_secret'), reporter_filter.cleansed_substitute, ) def test_cleanse_setting_ignore_case(self): reporter_filter = SafeExceptionReporterFilter() self.assertEqual( reporter_filter.cleanse_setting('password', 'super_secret'), reporter_filter.cleansed_substitute, ) def test_cleanse_setting_recurses_in_dictionary(self): reporter_filter = SafeExceptionReporterFilter() initial = {'login': 'cooper', 'password': 'secret'} self.assertEqual( reporter_filter.cleanse_setting('SETTING_NAME', initial), {'login': 'cooper', 'password': reporter_filter.cleansed_substitute}, ) def test_cleanse_setting_recurses_in_dictionary_with_non_string_key(self): reporter_filter = SafeExceptionReporterFilter() initial = {('localhost', 8000): {'login': 'cooper', 'password': 'secret'}} self.assertEqual( reporter_filter.cleanse_setting('SETTING_NAME', initial), { ('localhost', 8000): { 'login': 'cooper', 'password': reporter_filter.cleansed_substitute, }, }, ) def test_cleanse_setting_recurses_in_list_tuples(self): reporter_filter = SafeExceptionReporterFilter() initial = [ { 'login': 'cooper', 'password': 'secret', 'apps': ( {'name': 'app1', 'api_key': 'a06b-c462cffae87a'}, {'name': 'app2', 'api_key': 'a9f4-f152e97ad808'}, ), 'tokens': ['98b37c57-ec62-4e39', '8690ef7d-8004-4916'], }, {'SECRET_KEY': 'c4d77c62-6196-4f17-a06b-c462cffae87a'}, ] cleansed = [ { 'login': 'cooper', 'password': reporter_filter.cleansed_substitute, 'apps': ( {'name': 'app1', 'api_key': reporter_filter.cleansed_substitute}, {'name': 'app2', 'api_key': reporter_filter.cleansed_substitute}, ), 'tokens': reporter_filter.cleansed_substitute, }, {'SECRET_KEY': reporter_filter.cleansed_substitute}, ] self.assertEqual( reporter_filter.cleanse_setting('SETTING_NAME', initial), cleansed, ) self.assertEqual( reporter_filter.cleanse_setting('SETTING_NAME', tuple(initial)), tuple(cleansed), ) def test_request_meta_filtering(self): request = self.rf.get('/', HTTP_SECRET_HEADER='super_secret') reporter_filter = SafeExceptionReporterFilter() self.assertEqual( reporter_filter.get_safe_request_meta(request)['HTTP_SECRET_HEADER'], reporter_filter.cleansed_substitute, ) def test_exception_report_uses_meta_filtering(self): response = self.client.get('/raises500/', HTTP_SECRET_HEADER='super_secret') self.assertNotIn(b'super_secret', response.content) response = self.client.get( '/raises500/', HTTP_SECRET_HEADER='super_secret', HTTP_ACCEPT='application/json', ) self.assertNotIn(b'super_secret', response.content) class CustomExceptionReporterFilter(SafeExceptionReporterFilter): cleansed_substitute = 'XXXXXXXXXXXXXXXXXXXX' hidden_settings = _lazy_re_compile('API|TOKEN|KEY|SECRET|PASS|SIGNATURE|DATABASE_URL', flags=re.I) @override_settings( ROOT_URLCONF='view_tests.urls', DEFAULT_EXCEPTION_REPORTER_FILTER='%s.CustomExceptionReporterFilter' % __name__, ) class CustomExceptionReporterFilterTests(SimpleTestCase): def setUp(self): get_default_exception_reporter_filter.cache_clear() def tearDown(self): get_default_exception_reporter_filter.cache_clear() def test_setting_allows_custom_subclass(self): self.assertIsInstance( get_default_exception_reporter_filter(), CustomExceptionReporterFilter, ) def test_cleansed_substitute_override(self): reporter_filter = get_default_exception_reporter_filter() self.assertEqual( reporter_filter.cleanse_setting('password', 'super_secret'), reporter_filter.cleansed_substitute, ) def test_hidden_settings_override(self): reporter_filter = get_default_exception_reporter_filter() self.assertEqual( reporter_filter.cleanse_setting('database_url', 'super_secret'), reporter_filter.cleansed_substitute, ) class NonHTMLResponseExceptionReporterFilter(ExceptionReportTestMixin, LoggingCaptureMixin, SimpleTestCase): """ Sensitive information can be filtered out of error reports. The plain text 500 debug-only error page is served when it has been detected the request doesn't accept HTML content. Don't check for (non)existence of frames vars in the traceback information section of the response content because they're not included in these error pages. Refs #14614. """ rf = RequestFactory(HTTP_ACCEPT='application/json') def test_non_sensitive_request(self): """ Request info can bee seen in the default error reports for non-sensitive requests. """ with self.settings(DEBUG=True): self.verify_unsafe_response(non_sensitive_view, check_for_vars=False) with self.settings(DEBUG=False): self.verify_unsafe_response(non_sensitive_view, check_for_vars=False) def test_sensitive_request(self): """ Sensitive POST parameters cannot be seen in the default error reports for sensitive requests. """ with self.settings(DEBUG=True): self.verify_unsafe_response(sensitive_view, check_for_vars=False) with self.settings(DEBUG=False): self.verify_safe_response(sensitive_view, check_for_vars=False) def test_paranoid_request(self): """ No POST parameters can be seen in the default error reports for "paranoid" requests. """ with self.settings(DEBUG=True): self.verify_unsafe_response(paranoid_view, check_for_vars=False) with self.settings(DEBUG=False): self.verify_paranoid_response(paranoid_view, check_for_vars=False) def test_custom_exception_reporter_filter(self): """ It's possible to assign an exception reporter filter to the request to bypass the one set in DEFAULT_EXCEPTION_REPORTER_FILTER. """ with self.settings(DEBUG=True): self.verify_unsafe_response(custom_exception_reporter_filter_view, check_for_vars=False) with self.settings(DEBUG=False): self.verify_unsafe_response(custom_exception_reporter_filter_view, check_for_vars=False) @override_settings(DEBUG=True, ROOT_URLCONF='view_tests.urls') def test_non_html_response_encoding(self): response = self.client.get('/raises500/', HTTP_ACCEPT='application/json') self.assertEqual(response.headers['Content-Type'], 'text/plain; charset=utf-8') class DecoratorsTests(SimpleTestCase): def test_sensitive_variables_not_called(self): msg = ( 'sensitive_variables() must be called to use it as a decorator, ' 'e.g., use @sensitive_variables(), not @sensitive_variables.' ) with self.assertRaisesMessage(TypeError, msg): @sensitive_variables def test_func(password): pass def test_sensitive_post_parameters_not_called(self): msg = ( 'sensitive_post_parameters() must be called to use it as a ' 'decorator, e.g., use @sensitive_post_parameters(), not ' '@sensitive_post_parameters.' ) with self.assertRaisesMessage(TypeError, msg): @sensitive_post_parameters def test_func(request): return index_page(request)
101218f9dc441013f17e99f3d8dbbac12ef8ef9bb795185e48c9f8879c9cfcc9
from datetime import datetime, timedelta, timezone as datetime_timezone import pytz try: import zoneinfo except ImportError: try: from backports import zoneinfo except ImportError: zoneinfo = None from django.conf import settings from django.db.models import ( DateField, DateTimeField, F, IntegerField, Max, OuterRef, Subquery, TimeField, ) from django.db.models.functions import ( Extract, ExtractDay, ExtractHour, ExtractIsoWeekDay, ExtractIsoYear, ExtractMinute, ExtractMonth, ExtractQuarter, ExtractSecond, ExtractWeek, ExtractWeekDay, ExtractYear, Trunc, TruncDate, TruncDay, TruncHour, TruncMinute, TruncMonth, TruncQuarter, TruncSecond, TruncTime, TruncWeek, TruncYear, ) from django.test import ( TestCase, override_settings, skipIfDBFeature, skipUnlessDBFeature, ) from django.utils import timezone from ..models import Author, DTModel, Fan ZONE_CONSTRUCTORS = (pytz.timezone,) if zoneinfo is not None: ZONE_CONSTRUCTORS += (zoneinfo.ZoneInfo,) def truncate_to(value, kind, tzinfo=None): # Convert to target timezone before truncation if tzinfo is not None: value = value.astimezone(tzinfo) def truncate(value, kind): if kind == 'second': return value.replace(microsecond=0) if kind == 'minute': return value.replace(second=0, microsecond=0) if kind == 'hour': return value.replace(minute=0, second=0, microsecond=0) if kind == 'day': if isinstance(value, datetime): return value.replace(hour=0, minute=0, second=0, microsecond=0) return value if kind == 'week': if isinstance(value, datetime): return (value - timedelta(days=value.weekday())).replace(hour=0, minute=0, second=0, microsecond=0) return value - timedelta(days=value.weekday()) if kind == 'month': if isinstance(value, datetime): return value.replace(day=1, hour=0, minute=0, second=0, microsecond=0) return value.replace(day=1) if kind == 'quarter': month_in_quarter = value.month - (value.month - 1) % 3 if isinstance(value, datetime): return value.replace(month=month_in_quarter, day=1, hour=0, minute=0, second=0, microsecond=0) return value.replace(month=month_in_quarter, day=1) # otherwise, truncate to year if isinstance(value, datetime): return value.replace(month=1, day=1, hour=0, minute=0, second=0, microsecond=0) return value.replace(month=1, day=1) value = truncate(value, kind) if tzinfo is not None: # If there was a daylight saving transition, then reset the timezone. value = timezone.make_aware(value.replace(tzinfo=None), tzinfo) return value @override_settings(USE_TZ=False) class DateFunctionTests(TestCase): def create_model(self, start_datetime, end_datetime): return DTModel.objects.create( name=start_datetime.isoformat() if start_datetime else 'None', start_datetime=start_datetime, end_datetime=end_datetime, start_date=start_datetime.date() if start_datetime else None, end_date=end_datetime.date() if end_datetime else None, start_time=start_datetime.time() if start_datetime else None, end_time=end_datetime.time() if end_datetime else None, duration=(end_datetime - start_datetime) if start_datetime and end_datetime else None, ) def test_extract_year_exact_lookup(self): """ Extract year uses a BETWEEN filter to compare the year to allow indexes to be used. """ start_datetime = datetime(2015, 6, 15, 14, 10) end_datetime = datetime(2016, 6, 15, 14, 10) if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) for lookup in ('year', 'iso_year'): with self.subTest(lookup): qs = DTModel.objects.filter(**{'start_datetime__%s__exact' % lookup: 2015}) self.assertEqual(qs.count(), 1) query_string = str(qs.query).lower() self.assertEqual(query_string.count(' between '), 1) self.assertEqual(query_string.count('extract'), 0) # exact is implied and should be the same qs = DTModel.objects.filter(**{'start_datetime__%s' % lookup: 2015}) self.assertEqual(qs.count(), 1) query_string = str(qs.query).lower() self.assertEqual(query_string.count(' between '), 1) self.assertEqual(query_string.count('extract'), 0) # date and datetime fields should behave the same qs = DTModel.objects.filter(**{'start_date__%s' % lookup: 2015}) self.assertEqual(qs.count(), 1) query_string = str(qs.query).lower() self.assertEqual(query_string.count(' between '), 1) self.assertEqual(query_string.count('extract'), 0) # an expression rhs cannot use the between optimization. qs = DTModel.objects.annotate( start_year=ExtractYear('start_datetime'), ).filter(end_datetime__year=F('start_year') + 1) self.assertEqual(qs.count(), 1) query_string = str(qs.query).lower() self.assertEqual(query_string.count(' between '), 0) self.assertEqual(query_string.count('extract'), 3) def test_extract_year_greaterthan_lookup(self): start_datetime = datetime(2015, 6, 15, 14, 10) end_datetime = datetime(2016, 6, 15, 14, 10) if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) for lookup in ('year', 'iso_year'): with self.subTest(lookup): qs = DTModel.objects.filter(**{'start_datetime__%s__gt' % lookup: 2015}) self.assertEqual(qs.count(), 1) self.assertEqual(str(qs.query).lower().count('extract'), 0) qs = DTModel.objects.filter(**{'start_datetime__%s__gte' % lookup: 2015}) self.assertEqual(qs.count(), 2) self.assertEqual(str(qs.query).lower().count('extract'), 0) qs = DTModel.objects.annotate( start_year=ExtractYear('start_datetime'), ).filter(**{'end_datetime__%s__gte' % lookup: F('start_year')}) self.assertEqual(qs.count(), 1) self.assertGreaterEqual(str(qs.query).lower().count('extract'), 2) def test_extract_year_lessthan_lookup(self): start_datetime = datetime(2015, 6, 15, 14, 10) end_datetime = datetime(2016, 6, 15, 14, 10) if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) for lookup in ('year', 'iso_year'): with self.subTest(lookup): qs = DTModel.objects.filter(**{'start_datetime__%s__lt' % lookup: 2016}) self.assertEqual(qs.count(), 1) self.assertEqual(str(qs.query).count('extract'), 0) qs = DTModel.objects.filter(**{'start_datetime__%s__lte' % lookup: 2016}) self.assertEqual(qs.count(), 2) self.assertEqual(str(qs.query).count('extract'), 0) qs = DTModel.objects.annotate( end_year=ExtractYear('end_datetime'), ).filter(**{'start_datetime__%s__lte' % lookup: F('end_year')}) self.assertEqual(qs.count(), 1) self.assertGreaterEqual(str(qs.query).lower().count('extract'), 2) def test_extract_func(self): start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321) end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123) if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) with self.assertRaisesMessage(ValueError, 'lookup_name must be provided'): Extract('start_datetime') msg = 'Extract input expression must be DateField, DateTimeField, TimeField, or DurationField.' with self.assertRaisesMessage(ValueError, msg): list(DTModel.objects.annotate(extracted=Extract('name', 'hour'))) with self.assertRaisesMessage( ValueError, "Cannot extract time component 'second' from DateField 'start_date'."): list(DTModel.objects.annotate(extracted=Extract('start_date', 'second'))) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=Extract('start_datetime', 'year')).order_by('start_datetime'), [(start_datetime, start_datetime.year), (end_datetime, end_datetime.year)], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=Extract('start_datetime', 'quarter')).order_by('start_datetime'), [(start_datetime, 2), (end_datetime, 2)], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=Extract('start_datetime', 'month')).order_by('start_datetime'), [(start_datetime, start_datetime.month), (end_datetime, end_datetime.month)], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=Extract('start_datetime', 'day')).order_by('start_datetime'), [(start_datetime, start_datetime.day), (end_datetime, end_datetime.day)], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=Extract('start_datetime', 'week')).order_by('start_datetime'), [(start_datetime, 25), (end_datetime, 24)], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=Extract('start_datetime', 'week_day')).order_by('start_datetime'), [ (start_datetime, (start_datetime.isoweekday() % 7) + 1), (end_datetime, (end_datetime.isoweekday() % 7) + 1) ], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate( extracted=Extract('start_datetime', 'iso_week_day'), ).order_by('start_datetime'), [ (start_datetime, start_datetime.isoweekday()), (end_datetime, end_datetime.isoweekday()), ], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=Extract('start_datetime', 'hour')).order_by('start_datetime'), [(start_datetime, start_datetime.hour), (end_datetime, end_datetime.hour)], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=Extract('start_datetime', 'minute')).order_by('start_datetime'), [(start_datetime, start_datetime.minute), (end_datetime, end_datetime.minute)], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=Extract('start_datetime', 'second')).order_by('start_datetime'), [(start_datetime, start_datetime.second), (end_datetime, end_datetime.second)], lambda m: (m.start_datetime, m.extracted) ) self.assertEqual(DTModel.objects.filter(start_datetime__year=Extract('start_datetime', 'year')).count(), 2) self.assertEqual(DTModel.objects.filter(start_datetime__hour=Extract('start_datetime', 'hour')).count(), 2) self.assertEqual(DTModel.objects.filter(start_date__month=Extract('start_date', 'month')).count(), 2) self.assertEqual(DTModel.objects.filter(start_time__hour=Extract('start_time', 'hour')).count(), 2) def test_extract_none(self): self.create_model(None, None) for t in (Extract('start_datetime', 'year'), Extract('start_date', 'year'), Extract('start_time', 'hour')): with self.subTest(t): self.assertIsNone(DTModel.objects.annotate(extracted=t).first().extracted) def test_extract_outerref_validation(self): inner_qs = DTModel.objects.filter(name=ExtractMonth(OuterRef('name'))) msg = ( 'Extract input expression must be DateField, DateTimeField, ' 'TimeField, or DurationField.' ) with self.assertRaisesMessage(ValueError, msg): DTModel.objects.annotate(related_name=Subquery(inner_qs.values('name')[:1])) @skipUnlessDBFeature('has_native_duration_field') def test_extract_duration(self): start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321) end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123) if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=Extract('duration', 'second')).order_by('start_datetime'), [ (start_datetime, (end_datetime - start_datetime).seconds % 60), (end_datetime, (start_datetime - end_datetime).seconds % 60) ], lambda m: (m.start_datetime, m.extracted) ) self.assertEqual( DTModel.objects.annotate( duration_days=Extract('duration', 'day'), ).filter(duration_days__gt=200).count(), 1 ) @skipIfDBFeature('has_native_duration_field') def test_extract_duration_without_native_duration_field(self): msg = 'Extract requires native DurationField database support.' with self.assertRaisesMessage(ValueError, msg): list(DTModel.objects.annotate(extracted=Extract('duration', 'second'))) def test_extract_duration_unsupported_lookups(self): msg = "Cannot extract component '%s' from DurationField 'duration'." for lookup in ( 'year', 'iso_year', 'month', 'week', 'week_day', 'iso_week_day', 'quarter', ): with self.subTest(lookup): with self.assertRaisesMessage(ValueError, msg % lookup): DTModel.objects.annotate(extracted=Extract('duration', lookup)) def test_extract_year_func(self): start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321) end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123) if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractYear('start_datetime')).order_by('start_datetime'), [(start_datetime, start_datetime.year), (end_datetime, end_datetime.year)], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractYear('start_date')).order_by('start_datetime'), [(start_datetime, start_datetime.year), (end_datetime, end_datetime.year)], lambda m: (m.start_datetime, m.extracted) ) self.assertEqual(DTModel.objects.filter(start_datetime__year=ExtractYear('start_datetime')).count(), 2) def test_extract_iso_year_func(self): start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321) end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123) if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractIsoYear('start_datetime')).order_by('start_datetime'), [(start_datetime, start_datetime.year), (end_datetime, end_datetime.year)], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractIsoYear('start_date')).order_by('start_datetime'), [(start_datetime, start_datetime.year), (end_datetime, end_datetime.year)], lambda m: (m.start_datetime, m.extracted) ) # Both dates are from the same week year. self.assertEqual(DTModel.objects.filter(start_datetime__iso_year=ExtractIsoYear('start_datetime')).count(), 2) def test_extract_iso_year_func_boundaries(self): end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123) if settings.USE_TZ: end_datetime = timezone.make_aware(end_datetime, is_dst=False) week_52_day_2014 = datetime(2014, 12, 27, 13, 0) # Sunday week_1_day_2014_2015 = datetime(2014, 12, 31, 13, 0) # Wednesday week_53_day_2015 = datetime(2015, 12, 31, 13, 0) # Thursday if settings.USE_TZ: week_1_day_2014_2015 = timezone.make_aware(week_1_day_2014_2015, is_dst=False) week_52_day_2014 = timezone.make_aware(week_52_day_2014, is_dst=False) week_53_day_2015 = timezone.make_aware(week_53_day_2015, is_dst=False) days = [week_52_day_2014, week_1_day_2014_2015, week_53_day_2015] obj_1_iso_2014 = self.create_model(week_52_day_2014, end_datetime) obj_1_iso_2015 = self.create_model(week_1_day_2014_2015, end_datetime) obj_2_iso_2015 = self.create_model(week_53_day_2015, end_datetime) qs = DTModel.objects.filter(start_datetime__in=days).annotate( extracted=ExtractIsoYear('start_datetime'), ).order_by('start_datetime') self.assertQuerysetEqual(qs, [ (week_52_day_2014, 2014), (week_1_day_2014_2015, 2015), (week_53_day_2015, 2015), ], lambda m: (m.start_datetime, m.extracted)) qs = DTModel.objects.filter( start_datetime__iso_year=2015, ).order_by('start_datetime') self.assertSequenceEqual(qs, [obj_1_iso_2015, obj_2_iso_2015]) qs = DTModel.objects.filter( start_datetime__iso_year__gt=2014, ).order_by('start_datetime') self.assertSequenceEqual(qs, [obj_1_iso_2015, obj_2_iso_2015]) qs = DTModel.objects.filter( start_datetime__iso_year__lte=2014, ).order_by('start_datetime') self.assertSequenceEqual(qs, [obj_1_iso_2014]) def test_extract_month_func(self): start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321) end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123) if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractMonth('start_datetime')).order_by('start_datetime'), [(start_datetime, start_datetime.month), (end_datetime, end_datetime.month)], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractMonth('start_date')).order_by('start_datetime'), [(start_datetime, start_datetime.month), (end_datetime, end_datetime.month)], lambda m: (m.start_datetime, m.extracted) ) self.assertEqual(DTModel.objects.filter(start_datetime__month=ExtractMonth('start_datetime')).count(), 2) def test_extract_day_func(self): start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321) end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123) if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractDay('start_datetime')).order_by('start_datetime'), [(start_datetime, start_datetime.day), (end_datetime, end_datetime.day)], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractDay('start_date')).order_by('start_datetime'), [(start_datetime, start_datetime.day), (end_datetime, end_datetime.day)], lambda m: (m.start_datetime, m.extracted) ) self.assertEqual(DTModel.objects.filter(start_datetime__day=ExtractDay('start_datetime')).count(), 2) def test_extract_week_func(self): start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321) end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123) if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractWeek('start_datetime')).order_by('start_datetime'), [(start_datetime, 25), (end_datetime, 24)], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractWeek('start_date')).order_by('start_datetime'), [(start_datetime, 25), (end_datetime, 24)], lambda m: (m.start_datetime, m.extracted) ) # both dates are from the same week. self.assertEqual(DTModel.objects.filter(start_datetime__week=ExtractWeek('start_datetime')).count(), 2) def test_extract_quarter_func(self): start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321) end_datetime = datetime(2016, 8, 15, 14, 10, 50, 123) if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractQuarter('start_datetime')).order_by('start_datetime'), [(start_datetime, 2), (end_datetime, 3)], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractQuarter('start_date')).order_by('start_datetime'), [(start_datetime, 2), (end_datetime, 3)], lambda m: (m.start_datetime, m.extracted) ) self.assertEqual(DTModel.objects.filter(start_datetime__quarter=ExtractQuarter('start_datetime')).count(), 2) def test_extract_quarter_func_boundaries(self): end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123) if settings.USE_TZ: end_datetime = timezone.make_aware(end_datetime, is_dst=False) last_quarter_2014 = datetime(2014, 12, 31, 13, 0) first_quarter_2015 = datetime(2015, 1, 1, 13, 0) if settings.USE_TZ: last_quarter_2014 = timezone.make_aware(last_quarter_2014, is_dst=False) first_quarter_2015 = timezone.make_aware(first_quarter_2015, is_dst=False) dates = [last_quarter_2014, first_quarter_2015] self.create_model(last_quarter_2014, end_datetime) self.create_model(first_quarter_2015, end_datetime) qs = DTModel.objects.filter(start_datetime__in=dates).annotate( extracted=ExtractQuarter('start_datetime'), ).order_by('start_datetime') self.assertQuerysetEqual(qs, [ (last_quarter_2014, 4), (first_quarter_2015, 1), ], lambda m: (m.start_datetime, m.extracted)) def test_extract_week_func_boundaries(self): end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123) if settings.USE_TZ: end_datetime = timezone.make_aware(end_datetime, is_dst=False) week_52_day_2014 = datetime(2014, 12, 27, 13, 0) # Sunday week_1_day_2014_2015 = datetime(2014, 12, 31, 13, 0) # Wednesday week_53_day_2015 = datetime(2015, 12, 31, 13, 0) # Thursday if settings.USE_TZ: week_1_day_2014_2015 = timezone.make_aware(week_1_day_2014_2015, is_dst=False) week_52_day_2014 = timezone.make_aware(week_52_day_2014, is_dst=False) week_53_day_2015 = timezone.make_aware(week_53_day_2015, is_dst=False) days = [week_52_day_2014, week_1_day_2014_2015, week_53_day_2015] self.create_model(week_53_day_2015, end_datetime) self.create_model(week_52_day_2014, end_datetime) self.create_model(week_1_day_2014_2015, end_datetime) qs = DTModel.objects.filter(start_datetime__in=days).annotate( extracted=ExtractWeek('start_datetime'), ).order_by('start_datetime') self.assertQuerysetEqual(qs, [ (week_52_day_2014, 52), (week_1_day_2014_2015, 1), (week_53_day_2015, 53), ], lambda m: (m.start_datetime, m.extracted)) def test_extract_weekday_func(self): start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321) end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123) if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractWeekDay('start_datetime')).order_by('start_datetime'), [ (start_datetime, (start_datetime.isoweekday() % 7) + 1), (end_datetime, (end_datetime.isoweekday() % 7) + 1), ], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractWeekDay('start_date')).order_by('start_datetime'), [ (start_datetime, (start_datetime.isoweekday() % 7) + 1), (end_datetime, (end_datetime.isoweekday() % 7) + 1), ], lambda m: (m.start_datetime, m.extracted) ) self.assertEqual(DTModel.objects.filter(start_datetime__week_day=ExtractWeekDay('start_datetime')).count(), 2) def test_extract_iso_weekday_func(self): start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321) end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123) if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) self.assertQuerysetEqual( DTModel.objects.annotate( extracted=ExtractIsoWeekDay('start_datetime'), ).order_by('start_datetime'), [ (start_datetime, start_datetime.isoweekday()), (end_datetime, end_datetime.isoweekday()), ], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate( extracted=ExtractIsoWeekDay('start_date'), ).order_by('start_datetime'), [ (start_datetime, start_datetime.isoweekday()), (end_datetime, end_datetime.isoweekday()), ], lambda m: (m.start_datetime, m.extracted) ) self.assertEqual( DTModel.objects.filter( start_datetime__week_day=ExtractWeekDay('start_datetime'), ).count(), 2, ) def test_extract_hour_func(self): start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321) end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123) if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractHour('start_datetime')).order_by('start_datetime'), [(start_datetime, start_datetime.hour), (end_datetime, end_datetime.hour)], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractHour('start_time')).order_by('start_datetime'), [(start_datetime, start_datetime.hour), (end_datetime, end_datetime.hour)], lambda m: (m.start_datetime, m.extracted) ) self.assertEqual(DTModel.objects.filter(start_datetime__hour=ExtractHour('start_datetime')).count(), 2) def test_extract_minute_func(self): start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321) end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123) if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractMinute('start_datetime')).order_by('start_datetime'), [(start_datetime, start_datetime.minute), (end_datetime, end_datetime.minute)], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractMinute('start_time')).order_by('start_datetime'), [(start_datetime, start_datetime.minute), (end_datetime, end_datetime.minute)], lambda m: (m.start_datetime, m.extracted) ) self.assertEqual(DTModel.objects.filter(start_datetime__minute=ExtractMinute('start_datetime')).count(), 2) def test_extract_second_func(self): start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321) end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123) if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractSecond('start_datetime')).order_by('start_datetime'), [(start_datetime, start_datetime.second), (end_datetime, end_datetime.second)], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractSecond('start_time')).order_by('start_datetime'), [(start_datetime, start_datetime.second), (end_datetime, end_datetime.second)], lambda m: (m.start_datetime, m.extracted) ) self.assertEqual(DTModel.objects.filter(start_datetime__second=ExtractSecond('start_datetime')).count(), 2) def test_trunc_func(self): start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321) end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123) if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) msg = 'output_field must be either DateField, TimeField, or DateTimeField' with self.assertRaisesMessage(ValueError, msg): list(DTModel.objects.annotate(truncated=Trunc('start_datetime', 'year', output_field=IntegerField()))) with self.assertRaisesMessage(AssertionError, "'name' isn't a DateField, TimeField, or DateTimeField."): list(DTModel.objects.annotate(truncated=Trunc('name', 'year', output_field=DateTimeField()))) with self.assertRaisesMessage(ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"): list(DTModel.objects.annotate(truncated=Trunc('start_date', 'second'))) with self.assertRaisesMessage(ValueError, "Cannot truncate TimeField 'start_time' to DateTimeField"): list(DTModel.objects.annotate(truncated=Trunc('start_time', 'month'))) with self.assertRaisesMessage(ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"): list(DTModel.objects.annotate(truncated=Trunc('start_date', 'month', output_field=DateTimeField()))) with self.assertRaisesMessage(ValueError, "Cannot truncate TimeField 'start_time' to DateTimeField"): list(DTModel.objects.annotate(truncated=Trunc('start_time', 'second', output_field=DateTimeField()))) def test_datetime_kind(kind): self.assertQuerysetEqual( DTModel.objects.annotate( truncated=Trunc('start_datetime', kind, output_field=DateTimeField()) ).order_by('start_datetime'), [ (start_datetime, truncate_to(start_datetime, kind)), (end_datetime, truncate_to(end_datetime, kind)) ], lambda m: (m.start_datetime, m.truncated) ) def test_date_kind(kind): self.assertQuerysetEqual( DTModel.objects.annotate( truncated=Trunc('start_date', kind, output_field=DateField()) ).order_by('start_datetime'), [ (start_datetime, truncate_to(start_datetime.date(), kind)), (end_datetime, truncate_to(end_datetime.date(), kind)) ], lambda m: (m.start_datetime, m.truncated) ) def test_time_kind(kind): self.assertQuerysetEqual( DTModel.objects.annotate( truncated=Trunc('start_time', kind, output_field=TimeField()) ).order_by('start_datetime'), [ (start_datetime, truncate_to(start_datetime.time(), kind)), (end_datetime, truncate_to(end_datetime.time(), kind)) ], lambda m: (m.start_datetime, m.truncated) ) def test_datetime_to_time_kind(kind): self.assertQuerysetEqual( DTModel.objects.annotate( truncated=Trunc('start_datetime', kind, output_field=TimeField()), ).order_by('start_datetime'), [ (start_datetime, truncate_to(start_datetime.time(), kind)), (end_datetime, truncate_to(end_datetime.time(), kind)), ], lambda m: (m.start_datetime, m.truncated), ) test_date_kind('year') test_date_kind('quarter') test_date_kind('month') test_date_kind('week') test_date_kind('day') test_time_kind('hour') test_time_kind('minute') test_time_kind('second') test_datetime_kind('year') test_datetime_kind('quarter') test_datetime_kind('month') test_datetime_kind('week') test_datetime_kind('day') test_datetime_kind('hour') test_datetime_kind('minute') test_datetime_kind('second') test_datetime_to_time_kind('hour') test_datetime_to_time_kind('minute') test_datetime_to_time_kind('second') qs = DTModel.objects.filter(start_datetime__date=Trunc('start_datetime', 'day', output_field=DateField())) self.assertEqual(qs.count(), 2) def test_trunc_none(self): self.create_model(None, None) for t in (Trunc('start_datetime', 'year'), Trunc('start_date', 'year'), Trunc('start_time', 'hour')): with self.subTest(t): self.assertIsNone(DTModel.objects.annotate(truncated=t).first().truncated) def test_trunc_year_func(self): start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321) end_datetime = truncate_to(datetime(2016, 6, 15, 14, 10, 50, 123), 'year') if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=TruncYear('start_datetime')).order_by('start_datetime'), [ (start_datetime, truncate_to(start_datetime, 'year')), (end_datetime, truncate_to(end_datetime, 'year')), ], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=TruncYear('start_date')).order_by('start_datetime'), [ (start_datetime, truncate_to(start_datetime.date(), 'year')), (end_datetime, truncate_to(end_datetime.date(), 'year')), ], lambda m: (m.start_datetime, m.extracted) ) self.assertEqual(DTModel.objects.filter(start_datetime=TruncYear('start_datetime')).count(), 1) with self.assertRaisesMessage(ValueError, "Cannot truncate TimeField 'start_time' to DateTimeField"): list(DTModel.objects.annotate(truncated=TruncYear('start_time'))) with self.assertRaisesMessage(ValueError, "Cannot truncate TimeField 'start_time' to DateTimeField"): list(DTModel.objects.annotate(truncated=TruncYear('start_time', output_field=TimeField()))) def test_trunc_quarter_func(self): start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321) end_datetime = truncate_to(datetime(2016, 10, 15, 14, 10, 50, 123), 'quarter') last_quarter_2015 = truncate_to(datetime(2015, 12, 31, 14, 10, 50, 123), 'quarter') first_quarter_2016 = truncate_to(datetime(2016, 1, 1, 14, 10, 50, 123), 'quarter') if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) last_quarter_2015 = timezone.make_aware(last_quarter_2015, is_dst=False) first_quarter_2016 = timezone.make_aware(first_quarter_2016, is_dst=False) self.create_model(start_datetime=start_datetime, end_datetime=end_datetime) self.create_model(start_datetime=end_datetime, end_datetime=start_datetime) self.create_model(start_datetime=last_quarter_2015, end_datetime=end_datetime) self.create_model(start_datetime=first_quarter_2016, end_datetime=end_datetime) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=TruncQuarter('start_date')).order_by('start_datetime'), [ (start_datetime, truncate_to(start_datetime.date(), 'quarter')), (last_quarter_2015, truncate_to(last_quarter_2015.date(), 'quarter')), (first_quarter_2016, truncate_to(first_quarter_2016.date(), 'quarter')), (end_datetime, truncate_to(end_datetime.date(), 'quarter')), ], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=TruncQuarter('start_datetime')).order_by('start_datetime'), [ (start_datetime, truncate_to(start_datetime, 'quarter')), (last_quarter_2015, truncate_to(last_quarter_2015, 'quarter')), (first_quarter_2016, truncate_to(first_quarter_2016, 'quarter')), (end_datetime, truncate_to(end_datetime, 'quarter')), ], lambda m: (m.start_datetime, m.extracted) ) with self.assertRaisesMessage(ValueError, "Cannot truncate TimeField 'start_time' to DateTimeField"): list(DTModel.objects.annotate(truncated=TruncQuarter('start_time'))) with self.assertRaisesMessage(ValueError, "Cannot truncate TimeField 'start_time' to DateTimeField"): list(DTModel.objects.annotate(truncated=TruncQuarter('start_time', output_field=TimeField()))) def test_trunc_month_func(self): start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321) end_datetime = truncate_to(datetime(2016, 6, 15, 14, 10, 50, 123), 'month') if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=TruncMonth('start_datetime')).order_by('start_datetime'), [ (start_datetime, truncate_to(start_datetime, 'month')), (end_datetime, truncate_to(end_datetime, 'month')), ], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=TruncMonth('start_date')).order_by('start_datetime'), [ (start_datetime, truncate_to(start_datetime.date(), 'month')), (end_datetime, truncate_to(end_datetime.date(), 'month')), ], lambda m: (m.start_datetime, m.extracted) ) self.assertEqual(DTModel.objects.filter(start_datetime=TruncMonth('start_datetime')).count(), 1) with self.assertRaisesMessage(ValueError, "Cannot truncate TimeField 'start_time' to DateTimeField"): list(DTModel.objects.annotate(truncated=TruncMonth('start_time'))) with self.assertRaisesMessage(ValueError, "Cannot truncate TimeField 'start_time' to DateTimeField"): list(DTModel.objects.annotate(truncated=TruncMonth('start_time', output_field=TimeField()))) def test_trunc_week_func(self): start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321) end_datetime = truncate_to(datetime(2016, 6, 15, 14, 10, 50, 123), 'week') if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=TruncWeek('start_datetime')).order_by('start_datetime'), [ (start_datetime, truncate_to(start_datetime, 'week')), (end_datetime, truncate_to(end_datetime, 'week')), ], lambda m: (m.start_datetime, m.extracted) ) self.assertEqual(DTModel.objects.filter(start_datetime=TruncWeek('start_datetime')).count(), 1) with self.assertRaisesMessage(ValueError, "Cannot truncate TimeField 'start_time' to DateTimeField"): list(DTModel.objects.annotate(truncated=TruncWeek('start_time'))) with self.assertRaisesMessage(ValueError, "Cannot truncate TimeField 'start_time' to DateTimeField"): list(DTModel.objects.annotate(truncated=TruncWeek('start_time', output_field=TimeField()))) def test_trunc_date_func(self): start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321) end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123) if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=TruncDate('start_datetime')).order_by('start_datetime'), [ (start_datetime, start_datetime.date()), (end_datetime, end_datetime.date()), ], lambda m: (m.start_datetime, m.extracted) ) self.assertEqual(DTModel.objects.filter(start_datetime__date=TruncDate('start_datetime')).count(), 2) with self.assertRaisesMessage(ValueError, "Cannot truncate TimeField 'start_time' to DateField"): list(DTModel.objects.annotate(truncated=TruncDate('start_time'))) with self.assertRaisesMessage(ValueError, "Cannot truncate TimeField 'start_time' to DateField"): list(DTModel.objects.annotate(truncated=TruncDate('start_time', output_field=TimeField()))) def test_trunc_date_none(self): self.create_model(None, None) self.assertIsNone(DTModel.objects.annotate(truncated=TruncDate('start_datetime')).first().truncated) def test_trunc_time_func(self): start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321) end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123) if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=TruncTime('start_datetime')).order_by('start_datetime'), [ (start_datetime, start_datetime.time()), (end_datetime, end_datetime.time()), ], lambda m: (m.start_datetime, m.extracted) ) self.assertEqual(DTModel.objects.filter(start_datetime__time=TruncTime('start_datetime')).count(), 2) with self.assertRaisesMessage(ValueError, "Cannot truncate DateField 'start_date' to TimeField"): list(DTModel.objects.annotate(truncated=TruncTime('start_date'))) with self.assertRaisesMessage(ValueError, "Cannot truncate DateField 'start_date' to TimeField"): list(DTModel.objects.annotate(truncated=TruncTime('start_date', output_field=DateField()))) def test_trunc_time_none(self): self.create_model(None, None) self.assertIsNone(DTModel.objects.annotate(truncated=TruncTime('start_datetime')).first().truncated) def test_trunc_time_comparison(self): start_datetime = datetime(2015, 6, 15, 14, 30, 26) # 0 microseconds. end_datetime = datetime(2015, 6, 15, 14, 30, 26, 321) if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.assertIs( DTModel.objects.filter( start_datetime__time=start_datetime.time(), end_datetime__time=end_datetime.time(), ).exists(), True, ) self.assertIs( DTModel.objects.annotate( extracted_start=TruncTime('start_datetime'), extracted_end=TruncTime('end_datetime'), ).filter( extracted_start=start_datetime.time(), extracted_end=end_datetime.time(), ).exists(), True, ) def test_trunc_day_func(self): start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321) end_datetime = truncate_to(datetime(2016, 6, 15, 14, 10, 50, 123), 'day') if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=TruncDay('start_datetime')).order_by('start_datetime'), [ (start_datetime, truncate_to(start_datetime, 'day')), (end_datetime, truncate_to(end_datetime, 'day')), ], lambda m: (m.start_datetime, m.extracted) ) self.assertEqual(DTModel.objects.filter(start_datetime=TruncDay('start_datetime')).count(), 1) with self.assertRaisesMessage(ValueError, "Cannot truncate TimeField 'start_time' to DateTimeField"): list(DTModel.objects.annotate(truncated=TruncDay('start_time'))) with self.assertRaisesMessage(ValueError, "Cannot truncate TimeField 'start_time' to DateTimeField"): list(DTModel.objects.annotate(truncated=TruncDay('start_time', output_field=TimeField()))) def test_trunc_hour_func(self): start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321) end_datetime = truncate_to(datetime(2016, 6, 15, 14, 10, 50, 123), 'hour') if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=TruncHour('start_datetime')).order_by('start_datetime'), [ (start_datetime, truncate_to(start_datetime, 'hour')), (end_datetime, truncate_to(end_datetime, 'hour')), ], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=TruncHour('start_time')).order_by('start_datetime'), [ (start_datetime, truncate_to(start_datetime.time(), 'hour')), (end_datetime, truncate_to(end_datetime.time(), 'hour')), ], lambda m: (m.start_datetime, m.extracted) ) self.assertEqual(DTModel.objects.filter(start_datetime=TruncHour('start_datetime')).count(), 1) with self.assertRaisesMessage(ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"): list(DTModel.objects.annotate(truncated=TruncHour('start_date'))) with self.assertRaisesMessage(ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"): list(DTModel.objects.annotate(truncated=TruncHour('start_date', output_field=DateField()))) def test_trunc_minute_func(self): start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321) end_datetime = truncate_to(datetime(2016, 6, 15, 14, 10, 50, 123), 'minute') if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=TruncMinute('start_datetime')).order_by('start_datetime'), [ (start_datetime, truncate_to(start_datetime, 'minute')), (end_datetime, truncate_to(end_datetime, 'minute')), ], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=TruncMinute('start_time')).order_by('start_datetime'), [ (start_datetime, truncate_to(start_datetime.time(), 'minute')), (end_datetime, truncate_to(end_datetime.time(), 'minute')), ], lambda m: (m.start_datetime, m.extracted) ) self.assertEqual(DTModel.objects.filter(start_datetime=TruncMinute('start_datetime')).count(), 1) with self.assertRaisesMessage(ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"): list(DTModel.objects.annotate(truncated=TruncMinute('start_date'))) with self.assertRaisesMessage(ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"): list(DTModel.objects.annotate(truncated=TruncMinute('start_date', output_field=DateField()))) def test_trunc_second_func(self): start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321) end_datetime = truncate_to(datetime(2016, 6, 15, 14, 10, 50, 123), 'second') if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=TruncSecond('start_datetime')).order_by('start_datetime'), [ (start_datetime, truncate_to(start_datetime, 'second')), (end_datetime, truncate_to(end_datetime, 'second')) ], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=TruncSecond('start_time')).order_by('start_datetime'), [ (start_datetime, truncate_to(start_datetime.time(), 'second')), (end_datetime, truncate_to(end_datetime.time(), 'second')) ], lambda m: (m.start_datetime, m.extracted) ) self.assertEqual(DTModel.objects.filter(start_datetime=TruncSecond('start_datetime')).count(), 1) with self.assertRaisesMessage(ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"): list(DTModel.objects.annotate(truncated=TruncSecond('start_date'))) with self.assertRaisesMessage(ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"): list(DTModel.objects.annotate(truncated=TruncSecond('start_date', output_field=DateField()))) def test_trunc_subquery_with_parameters(self): author_1 = Author.objects.create(name='J. R. R. Tolkien') author_2 = Author.objects.create(name='G. R. R. Martin') fan_since_1 = datetime(2016, 2, 3, 15, 0, 0) fan_since_2 = datetime(2015, 2, 3, 15, 0, 0) fan_since_3 = datetime(2017, 2, 3, 15, 0, 0) if settings.USE_TZ: fan_since_1 = timezone.make_aware(fan_since_1, is_dst=False) fan_since_2 = timezone.make_aware(fan_since_2, is_dst=False) fan_since_3 = timezone.make_aware(fan_since_3, is_dst=False) Fan.objects.create(author=author_1, name='Tom', fan_since=fan_since_1) Fan.objects.create(author=author_1, name='Emma', fan_since=fan_since_2) Fan.objects.create(author=author_2, name='Isabella', fan_since=fan_since_3) inner = Fan.objects.filter( author=OuterRef('pk'), name__in=('Emma', 'Isabella', 'Tom') ).values('author').annotate(newest_fan=Max('fan_since')).values('newest_fan') outer = Author.objects.annotate( newest_fan_year=TruncYear(Subquery(inner, output_field=DateTimeField())) ) tz = timezone.utc if settings.USE_TZ else None self.assertSequenceEqual( outer.order_by('name').values('name', 'newest_fan_year'), [ {'name': 'G. R. R. Martin', 'newest_fan_year': datetime(2017, 1, 1, 0, 0, tzinfo=tz)}, {'name': 'J. R. R. Tolkien', 'newest_fan_year': datetime(2016, 1, 1, 0, 0, tzinfo=tz)}, ] ) def test_extract_outerref(self): datetime_1 = datetime(2000, 1, 1) datetime_2 = datetime(2001, 3, 5) datetime_3 = datetime(2002, 1, 3) if settings.USE_TZ: datetime_1 = timezone.make_aware(datetime_1, is_dst=False) datetime_2 = timezone.make_aware(datetime_2, is_dst=False) datetime_3 = timezone.make_aware(datetime_3, is_dst=False) obj_1 = self.create_model(datetime_1, datetime_3) obj_2 = self.create_model(datetime_2, datetime_1) obj_3 = self.create_model(datetime_3, datetime_2) inner_qs = DTModel.objects.filter( start_datetime__year=2000, start_datetime__month=ExtractMonth(OuterRef('end_datetime')), ) qs = DTModel.objects.annotate( related_pk=Subquery(inner_qs.values('pk')[:1]), ) self.assertSequenceEqual(qs.order_by('name').values('pk', 'related_pk'), [ {'pk': obj_1.pk, 'related_pk': obj_1.pk}, {'pk': obj_2.pk, 'related_pk': obj_1.pk}, {'pk': obj_3.pk, 'related_pk': None}, ]) @override_settings(USE_TZ=True, TIME_ZONE='UTC') class DateFunctionWithTimeZoneTests(DateFunctionTests): def get_timezones(self, key): for constructor in ZONE_CONSTRUCTORS: yield constructor(key) def test_extract_func_with_timezone(self): start_datetime = datetime(2015, 6, 15, 23, 30, 1, 321) end_datetime = datetime(2015, 6, 16, 13, 11, 27, 123) start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) delta_tzinfo_pos = datetime_timezone(timedelta(hours=5)) delta_tzinfo_neg = datetime_timezone(timedelta(hours=-5, minutes=17)) for melb in self.get_timezones('Australia/Melbourne'): with self.subTest(repr(melb)): qs = DTModel.objects.annotate( day=Extract('start_datetime', 'day'), day_melb=Extract('start_datetime', 'day', tzinfo=melb), week=Extract('start_datetime', 'week', tzinfo=melb), isoyear=ExtractIsoYear('start_datetime', tzinfo=melb), weekday=ExtractWeekDay('start_datetime'), weekday_melb=ExtractWeekDay('start_datetime', tzinfo=melb), isoweekday=ExtractIsoWeekDay('start_datetime'), isoweekday_melb=ExtractIsoWeekDay('start_datetime', tzinfo=melb), quarter=ExtractQuarter('start_datetime', tzinfo=melb), hour=ExtractHour('start_datetime'), hour_melb=ExtractHour('start_datetime', tzinfo=melb), hour_with_delta_pos=ExtractHour('start_datetime', tzinfo=delta_tzinfo_pos), hour_with_delta_neg=ExtractHour('start_datetime', tzinfo=delta_tzinfo_neg), minute_with_delta_neg=ExtractMinute('start_datetime', tzinfo=delta_tzinfo_neg), ).order_by('start_datetime') utc_model = qs.get() self.assertEqual(utc_model.day, 15) self.assertEqual(utc_model.day_melb, 16) self.assertEqual(utc_model.week, 25) self.assertEqual(utc_model.isoyear, 2015) self.assertEqual(utc_model.weekday, 2) self.assertEqual(utc_model.weekday_melb, 3) self.assertEqual(utc_model.isoweekday, 1) self.assertEqual(utc_model.isoweekday_melb, 2) self.assertEqual(utc_model.quarter, 2) self.assertEqual(utc_model.hour, 23) self.assertEqual(utc_model.hour_melb, 9) self.assertEqual(utc_model.hour_with_delta_pos, 4) self.assertEqual(utc_model.hour_with_delta_neg, 18) self.assertEqual(utc_model.minute_with_delta_neg, 47) with timezone.override(melb): melb_model = qs.get() self.assertEqual(melb_model.day, 16) self.assertEqual(melb_model.day_melb, 16) self.assertEqual(melb_model.week, 25) self.assertEqual(melb_model.isoyear, 2015) self.assertEqual(melb_model.weekday, 3) self.assertEqual(melb_model.isoweekday, 2) self.assertEqual(melb_model.quarter, 2) self.assertEqual(melb_model.weekday_melb, 3) self.assertEqual(melb_model.isoweekday_melb, 2) self.assertEqual(melb_model.hour, 9) self.assertEqual(melb_model.hour_melb, 9) def test_extract_func_explicit_timezone_priority(self): start_datetime = datetime(2015, 6, 15, 23, 30, 1, 321) end_datetime = datetime(2015, 6, 16, 13, 11, 27, 123) start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) for melb in self.get_timezones('Australia/Melbourne'): with self.subTest(repr(melb)): with timezone.override(melb): model = DTModel.objects.annotate( day_melb=Extract('start_datetime', 'day'), day_utc=Extract('start_datetime', 'day', tzinfo=timezone.utc), ).order_by('start_datetime').get() self.assertEqual(model.day_melb, 16) self.assertEqual(model.day_utc, 15) def test_extract_invalid_field_with_timezone(self): for melb in self.get_timezones('Australia/Melbourne'): with self.subTest(repr(melb)): msg = 'tzinfo can only be used with DateTimeField.' with self.assertRaisesMessage(ValueError, msg): DTModel.objects.annotate( day_melb=Extract('start_date', 'day', tzinfo=melb), ).get() with self.assertRaisesMessage(ValueError, msg): DTModel.objects.annotate( hour_melb=Extract('start_time', 'hour', tzinfo=melb), ).get() def test_trunc_timezone_applied_before_truncation(self): start_datetime = datetime(2016, 1, 1, 1, 30, 50, 321) end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123) start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) for melb, pacific in zip( self.get_timezones('Australia/Melbourne'), self.get_timezones('America/Los_Angeles') ): with self.subTest((repr(melb), repr(pacific))): model = DTModel.objects.annotate( melb_year=TruncYear('start_datetime', tzinfo=melb), pacific_year=TruncYear('start_datetime', tzinfo=pacific), melb_date=TruncDate('start_datetime', tzinfo=melb), pacific_date=TruncDate('start_datetime', tzinfo=pacific), melb_time=TruncTime('start_datetime', tzinfo=melb), pacific_time=TruncTime('start_datetime', tzinfo=pacific), ).order_by('start_datetime').get() melb_start_datetime = start_datetime.astimezone(melb) pacific_start_datetime = start_datetime.astimezone(pacific) self.assertEqual(model.start_datetime, start_datetime) self.assertEqual(model.melb_year, truncate_to(start_datetime, 'year', melb)) self.assertEqual(model.pacific_year, truncate_to(start_datetime, 'year', pacific)) self.assertEqual(model.start_datetime.year, 2016) self.assertEqual(model.melb_year.year, 2016) self.assertEqual(model.pacific_year.year, 2015) self.assertEqual(model.melb_date, melb_start_datetime.date()) self.assertEqual(model.pacific_date, pacific_start_datetime.date()) self.assertEqual(model.melb_time, melb_start_datetime.time()) self.assertEqual(model.pacific_time, pacific_start_datetime.time()) def test_trunc_ambiguous_and_invalid_times(self): sao = pytz.timezone('America/Sao_Paulo') utc = timezone.utc start_datetime = datetime(2016, 10, 16, 13, tzinfo=utc) end_datetime = datetime(2016, 2, 21, 1, tzinfo=utc) self.create_model(start_datetime, end_datetime) with timezone.override(sao): with self.assertRaisesMessage(pytz.NonExistentTimeError, '2016-10-16 00:00:00'): model = DTModel.objects.annotate(truncated_start=TruncDay('start_datetime')).get() with self.assertRaisesMessage(pytz.AmbiguousTimeError, '2016-02-20 23:00:00'): model = DTModel.objects.annotate(truncated_end=TruncHour('end_datetime')).get() model = DTModel.objects.annotate( truncated_start=TruncDay('start_datetime', is_dst=False), truncated_end=TruncHour('end_datetime', is_dst=False), ).get() self.assertEqual(model.truncated_start.dst(), timedelta(0)) self.assertEqual(model.truncated_end.dst(), timedelta(0)) model = DTModel.objects.annotate( truncated_start=TruncDay('start_datetime', is_dst=True), truncated_end=TruncHour('end_datetime', is_dst=True), ).get() self.assertEqual(model.truncated_start.dst(), timedelta(0, 3600)) self.assertEqual(model.truncated_end.dst(), timedelta(0, 3600)) def test_trunc_func_with_timezone(self): """ If the truncated datetime transitions to a different offset (daylight saving) then the returned value will have that new timezone/offset. """ start_datetime = datetime(2015, 6, 15, 14, 30, 50, 321) end_datetime = datetime(2016, 6, 15, 14, 10, 50, 123) start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) for melb in self.get_timezones('Australia/Melbourne'): with self.subTest(repr(melb)): def test_datetime_kind(kind): self.assertQuerysetEqual( DTModel.objects.annotate( truncated=Trunc( 'start_datetime', kind, output_field=DateTimeField(), tzinfo=melb ) ).order_by('start_datetime'), [ (start_datetime, truncate_to(start_datetime.astimezone(melb), kind, melb)), (end_datetime, truncate_to(end_datetime.astimezone(melb), kind, melb)) ], lambda m: (m.start_datetime, m.truncated) ) def test_datetime_to_date_kind(kind): self.assertQuerysetEqual( DTModel.objects.annotate( truncated=Trunc( 'start_datetime', kind, output_field=DateField(), tzinfo=melb, ), ).order_by('start_datetime'), [ ( start_datetime, truncate_to(start_datetime.astimezone(melb).date(), kind), ), ( end_datetime, truncate_to(end_datetime.astimezone(melb).date(), kind), ), ], lambda m: (m.start_datetime, m.truncated), ) def test_datetime_to_time_kind(kind): self.assertQuerysetEqual( DTModel.objects.annotate( truncated=Trunc( 'start_datetime', kind, output_field=TimeField(), tzinfo=melb, ) ).order_by('start_datetime'), [ ( start_datetime, truncate_to(start_datetime.astimezone(melb).time(), kind), ), ( end_datetime, truncate_to(end_datetime.astimezone(melb).time(), kind), ), ], lambda m: (m.start_datetime, m.truncated), ) test_datetime_to_date_kind('year') test_datetime_to_date_kind('quarter') test_datetime_to_date_kind('month') test_datetime_to_date_kind('week') test_datetime_to_date_kind('day') test_datetime_to_time_kind('hour') test_datetime_to_time_kind('minute') test_datetime_to_time_kind('second') test_datetime_kind('year') test_datetime_kind('quarter') test_datetime_kind('month') test_datetime_kind('week') test_datetime_kind('day') test_datetime_kind('hour') test_datetime_kind('minute') test_datetime_kind('second') qs = DTModel.objects.filter( start_datetime__date=Trunc('start_datetime', 'day', output_field=DateField()) ) self.assertEqual(qs.count(), 2) def test_trunc_invalid_field_with_timezone(self): for melb in self.get_timezones('Australia/Melbourne'): with self.subTest(repr(melb)): msg = 'tzinfo can only be used with DateTimeField.' with self.assertRaisesMessage(ValueError, msg): DTModel.objects.annotate( day_melb=Trunc('start_date', 'day', tzinfo=melb), ).get() with self.assertRaisesMessage(ValueError, msg): DTModel.objects.annotate( hour_melb=Trunc('start_time', 'hour', tzinfo=melb), ).get()
2877effffdf0955575a3026fff789305aa0926876d80d113b8015a42225383c2
import copy import inspect import warnings from functools import partialmethod from itertools import chain import django from django.apps import apps from django.conf import settings from django.core import checks from django.core.exceptions import ( NON_FIELD_ERRORS, FieldDoesNotExist, FieldError, MultipleObjectsReturned, ObjectDoesNotExist, ValidationError, ) from django.db import ( DEFAULT_DB_ALIAS, DJANGO_VERSION_PICKLE_KEY, DatabaseError, connection, connections, router, transaction, ) from django.db.models import ( NOT_PROVIDED, ExpressionWrapper, IntegerField, Max, Value, ) from django.db.models.constants import LOOKUP_SEP from django.db.models.constraints import CheckConstraint, UniqueConstraint from django.db.models.deletion import CASCADE, Collector from django.db.models.fields.related import ( ForeignObjectRel, OneToOneField, lazy_related_operation, resolve_relation, ) from django.db.models.functions import Coalesce from django.db.models.manager import Manager from django.db.models.options import Options from django.db.models.query import F, Q from django.db.models.signals import ( class_prepared, post_init, post_save, pre_init, pre_save, ) from django.db.models.utils import make_model_tuple from django.utils.encoding import force_str from django.utils.hashable import make_hashable from django.utils.text import capfirst, get_text_list from django.utils.translation import gettext_lazy as _ class Deferred: def __repr__(self): return '<Deferred field>' def __str__(self): return '<Deferred field>' DEFERRED = Deferred() def subclass_exception(name, bases, module, attached_to): """ Create exception subclass. Used by ModelBase below. The exception is created in a way that allows it to be pickled, assuming that the returned exception class will be added as an attribute to the 'attached_to' class. """ return type(name, bases, { '__module__': module, '__qualname__': '%s.%s' % (attached_to.__qualname__, name), }) def _has_contribute_to_class(value): # Only call contribute_to_class() if it's bound. return not inspect.isclass(value) and hasattr(value, 'contribute_to_class') class ModelBase(type): """Metaclass for all models.""" def __new__(cls, name, bases, attrs, **kwargs): super_new = super().__new__ # Also ensure initialization is only performed for subclasses of Model # (excluding Model class itself). parents = [b for b in bases if isinstance(b, ModelBase)] if not parents: return super_new(cls, name, bases, attrs) # Create the class. module = attrs.pop('__module__') new_attrs = {'__module__': module} classcell = attrs.pop('__classcell__', None) if classcell is not None: new_attrs['__classcell__'] = classcell attr_meta = attrs.pop('Meta', None) # Pass all attrs without a (Django-specific) contribute_to_class() # method to type.__new__() so that they're properly initialized # (i.e. __set_name__()). contributable_attrs = {} for obj_name, obj in attrs.items(): if _has_contribute_to_class(obj): contributable_attrs[obj_name] = obj else: new_attrs[obj_name] = obj new_class = super_new(cls, name, bases, new_attrs, **kwargs) abstract = getattr(attr_meta, 'abstract', False) meta = attr_meta or getattr(new_class, 'Meta', None) base_meta = getattr(new_class, '_meta', None) app_label = None # Look for an application configuration to attach the model to. app_config = apps.get_containing_app_config(module) if getattr(meta, 'app_label', None) is None: if app_config is None: if not abstract: raise RuntimeError( "Model class %s.%s doesn't declare an explicit " "app_label and isn't in an application in " "INSTALLED_APPS." % (module, name) ) else: app_label = app_config.label new_class.add_to_class('_meta', Options(meta, app_label)) if not abstract: new_class.add_to_class( 'DoesNotExist', subclass_exception( 'DoesNotExist', tuple( x.DoesNotExist for x in parents if hasattr(x, '_meta') and not x._meta.abstract ) or (ObjectDoesNotExist,), module, attached_to=new_class)) new_class.add_to_class( 'MultipleObjectsReturned', subclass_exception( 'MultipleObjectsReturned', tuple( x.MultipleObjectsReturned for x in parents if hasattr(x, '_meta') and not x._meta.abstract ) or (MultipleObjectsReturned,), module, attached_to=new_class)) if base_meta and not base_meta.abstract: # Non-abstract child classes inherit some attributes from their # non-abstract parent (unless an ABC comes before it in the # method resolution order). if not hasattr(meta, 'ordering'): new_class._meta.ordering = base_meta.ordering if not hasattr(meta, 'get_latest_by'): new_class._meta.get_latest_by = base_meta.get_latest_by is_proxy = new_class._meta.proxy # If the model is a proxy, ensure that the base class # hasn't been swapped out. if is_proxy and base_meta and base_meta.swapped: raise TypeError("%s cannot proxy the swapped model '%s'." % (name, base_meta.swapped)) # Add remaining attributes (those with a contribute_to_class() method) # to the class. for obj_name, obj in contributable_attrs.items(): new_class.add_to_class(obj_name, obj) # All the fields of any type declared on this model new_fields = chain( new_class._meta.local_fields, new_class._meta.local_many_to_many, new_class._meta.private_fields ) field_names = {f.name for f in new_fields} # Basic setup for proxy models. if is_proxy: base = None for parent in [kls for kls in parents if hasattr(kls, '_meta')]: if parent._meta.abstract: if parent._meta.fields: raise TypeError( "Abstract base class containing model fields not " "permitted for proxy model '%s'." % name ) else: continue if base is None: base = parent elif parent._meta.concrete_model is not base._meta.concrete_model: raise TypeError("Proxy model '%s' has more than one non-abstract model base class." % name) if base is None: raise TypeError("Proxy model '%s' has no non-abstract model base class." % name) new_class._meta.setup_proxy(base) new_class._meta.concrete_model = base._meta.concrete_model else: new_class._meta.concrete_model = new_class # Collect the parent links for multi-table inheritance. parent_links = {} for base in reversed([new_class] + parents): # Conceptually equivalent to `if base is Model`. if not hasattr(base, '_meta'): continue # Skip concrete parent classes. if base != new_class and not base._meta.abstract: continue # Locate OneToOneField instances. for field in base._meta.local_fields: if isinstance(field, OneToOneField) and field.remote_field.parent_link: related = resolve_relation(new_class, field.remote_field.model) parent_links[make_model_tuple(related)] = field # Track fields inherited from base models. inherited_attributes = set() # Do the appropriate setup for any model parents. for base in new_class.mro(): if base not in parents or not hasattr(base, '_meta'): # Things without _meta aren't functional models, so they're # uninteresting parents. inherited_attributes.update(base.__dict__) continue parent_fields = base._meta.local_fields + base._meta.local_many_to_many if not base._meta.abstract: # Check for clashes between locally declared fields and those # on the base classes. for field in parent_fields: if field.name in field_names: raise FieldError( 'Local field %r in class %r clashes with field of ' 'the same name from base class %r.' % ( field.name, name, base.__name__, ) ) else: inherited_attributes.add(field.name) # Concrete classes... base = base._meta.concrete_model base_key = make_model_tuple(base) if base_key in parent_links: field = parent_links[base_key] elif not is_proxy: attr_name = '%s_ptr' % base._meta.model_name field = OneToOneField( base, on_delete=CASCADE, name=attr_name, auto_created=True, parent_link=True, ) if attr_name in field_names: raise FieldError( "Auto-generated field '%s' in class %r for " "parent_link to base class %r clashes with " "declared field of the same name." % ( attr_name, name, base.__name__, ) ) # Only add the ptr field if it's not already present; # e.g. migrations will already have it specified if not hasattr(new_class, attr_name): new_class.add_to_class(attr_name, field) else: field = None new_class._meta.parents[base] = field else: base_parents = base._meta.parents.copy() # Add fields from abstract base class if it wasn't overridden. for field in parent_fields: if (field.name not in field_names and field.name not in new_class.__dict__ and field.name not in inherited_attributes): new_field = copy.deepcopy(field) new_class.add_to_class(field.name, new_field) # Replace parent links defined on this base by the new # field. It will be appropriately resolved if required. if field.one_to_one: for parent, parent_link in base_parents.items(): if field == parent_link: base_parents[parent] = new_field # Pass any non-abstract parent classes onto child. new_class._meta.parents.update(base_parents) # Inherit private fields (like GenericForeignKey) from the parent # class for field in base._meta.private_fields: if field.name in field_names: if not base._meta.abstract: raise FieldError( 'Local field %r in class %r clashes with field of ' 'the same name from base class %r.' % ( field.name, name, base.__name__, ) ) else: field = copy.deepcopy(field) if not base._meta.abstract: field.mti_inherited = True new_class.add_to_class(field.name, field) # Copy indexes so that index names are unique when models extend an # abstract model. new_class._meta.indexes = [copy.deepcopy(idx) for idx in new_class._meta.indexes] if abstract: # Abstract base models can't be instantiated and don't appear in # the list of models for an app. We do the final setup for them a # little differently from normal models. attr_meta.abstract = False new_class.Meta = attr_meta return new_class new_class._prepare() new_class._meta.apps.register_model(new_class._meta.app_label, new_class) return new_class def add_to_class(cls, name, value): if _has_contribute_to_class(value): value.contribute_to_class(cls, name) else: setattr(cls, name, value) def _prepare(cls): """Create some methods once self._meta has been populated.""" opts = cls._meta opts._prepare(cls) if opts.order_with_respect_to: cls.get_next_in_order = partialmethod(cls._get_next_or_previous_in_order, is_next=True) cls.get_previous_in_order = partialmethod(cls._get_next_or_previous_in_order, is_next=False) # Defer creating accessors on the foreign class until it has been # created and registered. If remote_field is None, we're ordering # with respect to a GenericForeignKey and don't know what the # foreign class is - we'll add those accessors later in # contribute_to_class(). if opts.order_with_respect_to.remote_field: wrt = opts.order_with_respect_to remote = wrt.remote_field.model lazy_related_operation(make_foreign_order_accessors, cls, remote) # Give the class a docstring -- its definition. if cls.__doc__ is None: cls.__doc__ = "%s(%s)" % (cls.__name__, ", ".join(f.name for f in opts.fields)) get_absolute_url_override = settings.ABSOLUTE_URL_OVERRIDES.get(opts.label_lower) if get_absolute_url_override: setattr(cls, 'get_absolute_url', get_absolute_url_override) if not opts.managers: if any(f.name == 'objects' for f in opts.fields): raise ValueError( "Model %s must specify a custom Manager, because it has a " "field named 'objects'." % cls.__name__ ) manager = Manager() manager.auto_created = True cls.add_to_class('objects', manager) # Set the name of _meta.indexes. This can't be done in # Options.contribute_to_class() because fields haven't been added to # the model at that point. for index in cls._meta.indexes: if not index.name: index.set_name_with_model(cls) class_prepared.send(sender=cls) @property def _base_manager(cls): return cls._meta.base_manager @property def _default_manager(cls): return cls._meta.default_manager class ModelStateFieldsCacheDescriptor: def __get__(self, instance, cls=None): if instance is None: return self res = instance.fields_cache = {} return res class ModelState: """Store model instance state.""" db = None # If true, uniqueness validation checks will consider this a new, unsaved # object. Necessary for correct validation of new instances of objects with # explicit (non-auto) PKs. This impacts validation only; it has no effect # on the actual save. adding = True fields_cache = ModelStateFieldsCacheDescriptor() class Model(metaclass=ModelBase): def __init__(self, *args, **kwargs): # Alias some things as locals to avoid repeat global lookups cls = self.__class__ opts = self._meta _setattr = setattr _DEFERRED = DEFERRED if opts.abstract: raise TypeError('Abstract models cannot be instantiated.') pre_init.send(sender=cls, args=args, kwargs=kwargs) # Set up the storage for instance state self._state = ModelState() # There is a rather weird disparity here; if kwargs, it's set, then args # overrides it. It should be one or the other; don't duplicate the work # The reason for the kwargs check is that standard iterator passes in by # args, and instantiation for iteration is 33% faster. if len(args) > len(opts.concrete_fields): # Daft, but matches old exception sans the err msg. raise IndexError("Number of args exceeds number of fields") if not kwargs: fields_iter = iter(opts.concrete_fields) # The ordering of the zip calls matter - zip throws StopIteration # when an iter throws it. So if the first iter throws it, the second # is *not* consumed. We rely on this, so don't change the order # without changing the logic. for val, field in zip(args, fields_iter): if val is _DEFERRED: continue _setattr(self, field.attname, val) else: # Slower, kwargs-ready version. fields_iter = iter(opts.fields) for val, field in zip(args, fields_iter): if val is _DEFERRED: continue _setattr(self, field.attname, val) if kwargs.pop(field.name, NOT_PROVIDED) is not NOT_PROVIDED: raise TypeError( f"{cls.__qualname__}() got both positional and " f"keyword arguments for field '{field.name}'." ) # Now we're left with the unprocessed fields that *must* come from # keywords, or default. for field in fields_iter: is_related_object = False # Virtual field if field.attname not in kwargs and field.column is None: continue if kwargs: if isinstance(field.remote_field, ForeignObjectRel): try: # Assume object instance was passed in. rel_obj = kwargs.pop(field.name) is_related_object = True except KeyError: try: # Object instance wasn't passed in -- must be an ID. val = kwargs.pop(field.attname) except KeyError: val = field.get_default() else: try: val = kwargs.pop(field.attname) except KeyError: # This is done with an exception rather than the # default argument on pop because we don't want # get_default() to be evaluated, and then not used. # Refs #12057. val = field.get_default() else: val = field.get_default() if is_related_object: # If we are passed a related instance, set it using the # field.name instead of field.attname (e.g. "user" instead of # "user_id") so that the object gets properly cached (and type # checked) by the RelatedObjectDescriptor. if rel_obj is not _DEFERRED: _setattr(self, field.name, rel_obj) else: if val is not _DEFERRED: _setattr(self, field.attname, val) if kwargs: property_names = opts._property_names for prop in tuple(kwargs): try: # Any remaining kwargs must correspond to properties or # virtual fields. if prop in property_names or opts.get_field(prop): if kwargs[prop] is not _DEFERRED: _setattr(self, prop, kwargs[prop]) del kwargs[prop] except (AttributeError, FieldDoesNotExist): pass for kwarg in kwargs: raise TypeError("%s() got an unexpected keyword argument '%s'" % (cls.__name__, kwarg)) super().__init__() post_init.send(sender=cls, instance=self) @classmethod def from_db(cls, db, field_names, values): if len(values) != len(cls._meta.concrete_fields): values_iter = iter(values) values = [ next(values_iter) if f.attname in field_names else DEFERRED for f in cls._meta.concrete_fields ] new = cls(*values) new._state.adding = False new._state.db = db return new def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self) def __str__(self): return '%s object (%s)' % (self.__class__.__name__, self.pk) def __eq__(self, other): if not isinstance(other, Model): return NotImplemented if self._meta.concrete_model != other._meta.concrete_model: return False my_pk = self.pk if my_pk is None: return self is other return my_pk == other.pk def __hash__(self): if self.pk is None: raise TypeError("Model instances without primary key value are unhashable") return hash(self.pk) def __reduce__(self): data = self.__getstate__() data[DJANGO_VERSION_PICKLE_KEY] = django.__version__ class_id = self._meta.app_label, self._meta.object_name return model_unpickle, (class_id,), data def __getstate__(self): """Hook to allow choosing the attributes to pickle.""" state = self.__dict__.copy() state['_state'] = copy.copy(state['_state']) state['_state'].fields_cache = state['_state'].fields_cache.copy() return state def __setstate__(self, state): pickled_version = state.get(DJANGO_VERSION_PICKLE_KEY) if pickled_version: if pickled_version != django.__version__: warnings.warn( "Pickled model instance's Django version %s does not " "match the current version %s." % (pickled_version, django.__version__), RuntimeWarning, stacklevel=2, ) else: warnings.warn( "Pickled model instance's Django version is not specified.", RuntimeWarning, stacklevel=2, ) self.__dict__.update(state) def _get_pk_val(self, meta=None): meta = meta or self._meta return getattr(self, meta.pk.attname) def _set_pk_val(self, value): for parent_link in self._meta.parents.values(): if parent_link and parent_link != self._meta.pk: setattr(self, parent_link.target_field.attname, value) return setattr(self, self._meta.pk.attname, value) pk = property(_get_pk_val, _set_pk_val) def get_deferred_fields(self): """ Return a set containing names of deferred fields for this instance. """ return { f.attname for f in self._meta.concrete_fields if f.attname not in self.__dict__ } def refresh_from_db(self, using=None, fields=None): """ Reload field values from the database. By default, the reloading happens from the database this instance was loaded from, or by the read router if this instance wasn't loaded from any database. The using parameter will override the default. Fields can be used to specify which fields to reload. The fields should be an iterable of field attnames. If fields is None, then all non-deferred fields are reloaded. When accessing deferred fields of an instance, the deferred loading of the field will call this method. """ if fields is None: self._prefetched_objects_cache = {} else: prefetched_objects_cache = getattr(self, '_prefetched_objects_cache', ()) for field in fields: if field in prefetched_objects_cache: del prefetched_objects_cache[field] fields.remove(field) if not fields: return if any(LOOKUP_SEP in f for f in fields): raise ValueError( 'Found "%s" in fields argument. Relations and transforms ' 'are not allowed in fields.' % LOOKUP_SEP) hints = {'instance': self} db_instance_qs = self.__class__._base_manager.db_manager(using, hints=hints).filter(pk=self.pk) # Use provided fields, if not set then reload all non-deferred fields. deferred_fields = self.get_deferred_fields() if fields is not None: fields = list(fields) db_instance_qs = db_instance_qs.only(*fields) elif deferred_fields: fields = [f.attname for f in self._meta.concrete_fields if f.attname not in deferred_fields] db_instance_qs = db_instance_qs.only(*fields) db_instance = db_instance_qs.get() non_loaded_fields = db_instance.get_deferred_fields() for field in self._meta.concrete_fields: if field.attname in non_loaded_fields: # This field wasn't refreshed - skip ahead. continue setattr(self, field.attname, getattr(db_instance, field.attname)) # Clear cached foreign keys. if field.is_relation and field.is_cached(self): field.delete_cached_value(self) # Clear cached relations. for field in self._meta.related_objects: if field.is_cached(self): field.delete_cached_value(self) self._state.db = db_instance._state.db def serializable_value(self, field_name): """ Return the value of the field name for this instance. If the field is a foreign key, return the id value instead of the object. If there's no Field object with this name on the model, return the model attribute's value. Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method. """ try: field = self._meta.get_field(field_name) except FieldDoesNotExist: return getattr(self, field_name) return getattr(self, field.attname) def save(self, force_insert=False, force_update=False, using=None, update_fields=None): """ Save the current instance. Override this in a subclass if you want to control the saving process. The 'force_insert' and 'force_update' parameters can be used to insist that the "save" must be an SQL insert or update (or equivalent for non-SQL backends), respectively. Normally, they should not be set. """ self._prepare_related_fields_for_save(operation_name='save') using = using or router.db_for_write(self.__class__, instance=self) if force_insert and (force_update or update_fields): raise ValueError("Cannot force both insert and updating in model saving.") deferred_fields = self.get_deferred_fields() if update_fields is not None: # If update_fields is empty, skip the save. We do also check for # no-op saves later on for inheritance cases. This bailout is # still needed for skipping signal sending. if not update_fields: return update_fields = frozenset(update_fields) field_names = set() for field in self._meta.concrete_fields: if not field.primary_key: field_names.add(field.name) if field.name != field.attname: field_names.add(field.attname) non_model_fields = update_fields.difference(field_names) if non_model_fields: raise ValueError( 'The following fields do not exist in this model, are m2m ' 'fields, or are non-concrete fields: %s' % ', '.join(non_model_fields) ) # If saving to the same database, and this model is deferred, then # automatically do an "update_fields" save on the loaded fields. elif not force_insert and deferred_fields and using == self._state.db: field_names = set() for field in self._meta.concrete_fields: if not field.primary_key and not hasattr(field, 'through'): field_names.add(field.attname) loaded_fields = field_names.difference(deferred_fields) if loaded_fields: update_fields = frozenset(loaded_fields) self.save_base(using=using, force_insert=force_insert, force_update=force_update, update_fields=update_fields) save.alters_data = True def save_base(self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None): """ Handle the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending. The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading. """ using = using or router.db_for_write(self.__class__, instance=self) assert not (force_insert and (force_update or update_fields)) assert update_fields is None or update_fields cls = origin = self.__class__ # Skip proxies, but keep the origin as the proxy model. if cls._meta.proxy: cls = cls._meta.concrete_model meta = cls._meta if not meta.auto_created: pre_save.send( sender=origin, instance=self, raw=raw, using=using, update_fields=update_fields, ) # A transaction isn't needed if one query is issued. if meta.parents: context_manager = transaction.atomic(using=using, savepoint=False) else: context_manager = transaction.mark_for_rollback_on_error(using=using) with context_manager: parent_inserted = False if not raw: parent_inserted = self._save_parents(cls, using, update_fields) updated = self._save_table( raw, cls, force_insert or parent_inserted, force_update, using, update_fields, ) # Store the database on which the object was saved self._state.db = using # Once saved, this is no longer a to-be-added instance. self._state.adding = False # Signal that the save is complete if not meta.auto_created: post_save.send( sender=origin, instance=self, created=(not updated), update_fields=update_fields, raw=raw, using=using, ) save_base.alters_data = True def _save_parents(self, cls, using, update_fields): """Save all the parents of cls using values from self.""" meta = cls._meta inserted = False for parent, field in meta.parents.items(): # Make sure the link fields are synced between parent and self. if (field and getattr(self, parent._meta.pk.attname) is None and getattr(self, field.attname) is not None): setattr(self, parent._meta.pk.attname, getattr(self, field.attname)) parent_inserted = self._save_parents(cls=parent, using=using, update_fields=update_fields) updated = self._save_table( cls=parent, using=using, update_fields=update_fields, force_insert=parent_inserted, ) if not updated: inserted = True # Set the parent's PK value to self. if field: setattr(self, field.attname, self._get_pk_val(parent._meta)) # Since we didn't have an instance of the parent handy set # attname directly, bypassing the descriptor. Invalidate # the related object cache, in case it's been accidentally # populated. A fresh instance will be re-built from the # database if necessary. if field.is_cached(self): field.delete_cached_value(self) return inserted def _save_table(self, raw=False, cls=None, force_insert=False, force_update=False, using=None, update_fields=None): """ Do the heavy-lifting involved in saving. Update or insert the data for a single table. """ meta = cls._meta non_pks = [f for f in meta.local_concrete_fields if not f.primary_key] if update_fields: non_pks = [f for f in non_pks if f.name in update_fields or f.attname in update_fields] pk_val = self._get_pk_val(meta) if pk_val is None: pk_val = meta.pk.get_pk_value_on_save(self) setattr(self, meta.pk.attname, pk_val) pk_set = pk_val is not None if not pk_set and (force_update or update_fields): raise ValueError("Cannot force an update in save() with no primary key.") updated = False # Skip an UPDATE when adding an instance and primary key has a default. if ( not raw and not force_insert and self._state.adding and meta.pk.default and meta.pk.default is not NOT_PROVIDED ): force_insert = True # If possible, try an UPDATE. If that doesn't update anything, do an INSERT. if pk_set and not force_insert: base_qs = cls._base_manager.using(using) values = [(f, None, (getattr(self, f.attname) if raw else f.pre_save(self, False))) for f in non_pks] forced_update = update_fields or force_update updated = self._do_update(base_qs, using, pk_val, values, update_fields, forced_update) if force_update and not updated: raise DatabaseError("Forced update did not affect any rows.") if update_fields and not updated: raise DatabaseError("Save with update_fields did not affect any rows.") if not updated: if meta.order_with_respect_to: # If this is a model with an order_with_respect_to # autopopulate the _order field field = meta.order_with_respect_to filter_args = field.get_filter_kwargs_for_object(self) self._order = cls._base_manager.using(using).filter(**filter_args).aggregate( _order__max=Coalesce( ExpressionWrapper(Max('_order') + Value(1), output_field=IntegerField()), Value(0), ), )['_order__max'] fields = meta.local_concrete_fields if not pk_set: fields = [f for f in fields if f is not meta.auto_field] returning_fields = meta.db_returning_fields results = self._do_insert(cls._base_manager, using, fields, returning_fields, raw) if results: for value, field in zip(results[0], returning_fields): setattr(self, field.attname, value) return updated def _do_update(self, base_qs, using, pk_val, values, update_fields, forced_update): """ Try to update the model. Return True if the model was updated (if an update query was done and a matching row was found in the DB). """ filtered = base_qs.filter(pk=pk_val) if not values: # We can end up here when saving a model in inheritance chain where # update_fields doesn't target any field in current model. In that # case we just say the update succeeded. Another case ending up here # is a model with just PK - in that case check that the PK still # exists. return update_fields is not None or filtered.exists() if self._meta.select_on_save and not forced_update: return ( filtered.exists() and # It may happen that the object is deleted from the DB right after # this check, causing the subsequent UPDATE to return zero matching # rows. The same result can occur in some rare cases when the # database returns zero despite the UPDATE being executed # successfully (a row is matched and updated). In order to # distinguish these two cases, the object's existence in the # database is again checked for if the UPDATE query returns 0. (filtered._update(values) > 0 or filtered.exists()) ) return filtered._update(values) > 0 def _do_insert(self, manager, using, fields, returning_fields, raw): """ Do an INSERT. If returning_fields is defined then this method should return the newly created data for the model. """ return manager._insert( [self], fields=fields, returning_fields=returning_fields, using=using, raw=raw, ) def _prepare_related_fields_for_save(self, operation_name): # Ensure that a model instance without a PK hasn't been assigned to # a ForeignKey or OneToOneField on this model. If the field is # nullable, allowing the save would result in silent data loss. for field in self._meta.concrete_fields: # If the related field isn't cached, then an instance hasn't been # assigned and there's no need to worry about this check. if field.is_relation and field.is_cached(self): obj = getattr(self, field.name, None) if not obj: continue # A pk may have been assigned manually to a model instance not # saved to the database (or auto-generated in a case like # UUIDField), but we allow the save to proceed and rely on the # database to raise an IntegrityError if applicable. If # constraints aren't supported by the database, there's the # unavoidable risk of data corruption. if obj.pk is None: # Remove the object from a related instance cache. if not field.remote_field.multiple: field.remote_field.delete_cached_value(obj) raise ValueError( "%s() prohibited to prevent data loss due to unsaved " "related object '%s'." % (operation_name, field.name) ) elif getattr(self, field.attname) in field.empty_values: # Use pk from related object if it has been saved after # an assignment. setattr(self, field.attname, obj.pk) # If the relationship's pk/to_field was changed, clear the # cached relationship. if getattr(obj, field.target_field.attname) != getattr(self, field.attname): field.delete_cached_value(self) def delete(self, using=None, keep_parents=False): using = using or router.db_for_write(self.__class__, instance=self) assert self.pk is not None, ( "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname) ) collector = Collector(using=using) collector.collect([self], keep_parents=keep_parents) return collector.delete() delete.alters_data = True def _get_FIELD_display(self, field): value = getattr(self, field.attname) choices_dict = dict(make_hashable(field.flatchoices)) # force_str() to coerce lazy strings. return force_str(choices_dict.get(make_hashable(value), value), strings_only=True) def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs): if not self.pk: raise ValueError("get_next/get_previous cannot be used on unsaved objects.") op = 'gt' if is_next else 'lt' order = '' if is_next else '-' param = getattr(self, field.attname) q = Q(**{'%s__%s' % (field.name, op): param}) q = q | Q(**{field.name: param, 'pk__%s' % op: self.pk}) qs = self.__class__._default_manager.using(self._state.db).filter(**kwargs).filter(q).order_by( '%s%s' % (order, field.name), '%spk' % order ) try: return qs[0] except IndexError: raise self.DoesNotExist("%s matching query does not exist." % self.__class__._meta.object_name) def _get_next_or_previous_in_order(self, is_next): cachename = "__%s_order_cache" % is_next if not hasattr(self, cachename): op = 'gt' if is_next else 'lt' order = '_order' if is_next else '-_order' order_field = self._meta.order_with_respect_to filter_args = order_field.get_filter_kwargs_for_object(self) obj = self.__class__._default_manager.filter(**filter_args).filter(**{ '_order__%s' % op: self.__class__._default_manager.values('_order').filter(**{ self._meta.pk.name: self.pk }) }).order_by(order)[:1].get() setattr(self, cachename, obj) return getattr(self, cachename) def prepare_database_save(self, field): if self.pk is None: raise ValueError("Unsaved model instance %r cannot be used in an ORM query." % self) return getattr(self, field.remote_field.get_related_field().attname) def clean(self): """ Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS. """ pass def validate_unique(self, exclude=None): """ Check unique constraints on the model and raise ValidationError if any failed. """ unique_checks, date_checks = self._get_unique_checks(exclude=exclude) errors = self._perform_unique_checks(unique_checks) date_errors = self._perform_date_checks(date_checks) for k, v in date_errors.items(): errors.setdefault(k, []).extend(v) if errors: raise ValidationError(errors) def _get_unique_checks(self, exclude=None): """ Return a list of checks to perform. Since validate_unique() could be called from a ModelForm, some fields may have been excluded; we can't perform a unique check on a model that is missing fields involved in that check. Fields that did not validate should also be excluded, but they need to be passed in via the exclude argument. """ if exclude is None: exclude = [] unique_checks = [] unique_togethers = [(self.__class__, self._meta.unique_together)] constraints = [(self.__class__, self._meta.total_unique_constraints)] for parent_class in self._meta.get_parent_list(): if parent_class._meta.unique_together: unique_togethers.append((parent_class, parent_class._meta.unique_together)) if parent_class._meta.total_unique_constraints: constraints.append( (parent_class, parent_class._meta.total_unique_constraints) ) for model_class, unique_together in unique_togethers: for check in unique_together: if not any(name in exclude for name in check): # Add the check if the field isn't excluded. unique_checks.append((model_class, tuple(check))) for model_class, model_constraints in constraints: for constraint in model_constraints: if not any(name in exclude for name in constraint.fields): unique_checks.append((model_class, constraint.fields)) # These are checks for the unique_for_<date/year/month>. date_checks = [] # Gather a list of checks for fields declared as unique and add them to # the list of checks. fields_with_class = [(self.__class__, self._meta.local_fields)] for parent_class in self._meta.get_parent_list(): fields_with_class.append((parent_class, parent_class._meta.local_fields)) for model_class, fields in fields_with_class: for f in fields: name = f.name if name in exclude: continue if f.unique: unique_checks.append((model_class, (name,))) if f.unique_for_date and f.unique_for_date not in exclude: date_checks.append((model_class, 'date', name, f.unique_for_date)) if f.unique_for_year and f.unique_for_year not in exclude: date_checks.append((model_class, 'year', name, f.unique_for_year)) if f.unique_for_month and f.unique_for_month not in exclude: date_checks.append((model_class, 'month', name, f.unique_for_month)) return unique_checks, date_checks def _perform_unique_checks(self, unique_checks): errors = {} for model_class, unique_check in unique_checks: # Try to look up an existing object with the same values as this # object's values for all the unique field. lookup_kwargs = {} for field_name in unique_check: f = self._meta.get_field(field_name) lookup_value = getattr(self, f.attname) # TODO: Handle multiple backends with different feature flags. if (lookup_value is None or (lookup_value == '' and connection.features.interprets_empty_strings_as_nulls)): # no value, skip the lookup continue if f.primary_key and not self._state.adding: # no need to check for unique primary key when editing continue lookup_kwargs[str(field_name)] = lookup_value # some fields were skipped, no reason to do the check if len(unique_check) != len(lookup_kwargs): continue qs = model_class._default_manager.filter(**lookup_kwargs) # Exclude the current object from the query if we are editing an # instance (as opposed to creating a new one) # Note that we need to use the pk as defined by model_class, not # self.pk. These can be different fields because model inheritance # allows single model to have effectively multiple primary keys. # Refs #17615. model_class_pk = self._get_pk_val(model_class._meta) if not self._state.adding and model_class_pk is not None: qs = qs.exclude(pk=model_class_pk) if qs.exists(): if len(unique_check) == 1: key = unique_check[0] else: key = NON_FIELD_ERRORS errors.setdefault(key, []).append(self.unique_error_message(model_class, unique_check)) return errors def _perform_date_checks(self, date_checks): errors = {} for model_class, lookup_type, field, unique_for in date_checks: lookup_kwargs = {} # there's a ticket to add a date lookup, we can remove this special # case if that makes it's way in date = getattr(self, unique_for) if date is None: continue if lookup_type == 'date': lookup_kwargs['%s__day' % unique_for] = date.day lookup_kwargs['%s__month' % unique_for] = date.month lookup_kwargs['%s__year' % unique_for] = date.year else: lookup_kwargs['%s__%s' % (unique_for, lookup_type)] = getattr(date, lookup_type) lookup_kwargs[field] = getattr(self, field) qs = model_class._default_manager.filter(**lookup_kwargs) # Exclude the current object from the query if we are editing an # instance (as opposed to creating a new one) if not self._state.adding and self.pk is not None: qs = qs.exclude(pk=self.pk) if qs.exists(): errors.setdefault(field, []).append( self.date_error_message(lookup_type, field, unique_for) ) return errors def date_error_message(self, lookup_type, field_name, unique_for): opts = self._meta field = opts.get_field(field_name) return ValidationError( message=field.error_messages['unique_for_date'], code='unique_for_date', params={ 'model': self, 'model_name': capfirst(opts.verbose_name), 'lookup_type': lookup_type, 'field': field_name, 'field_label': capfirst(field.verbose_name), 'date_field': unique_for, 'date_field_label': capfirst(opts.get_field(unique_for).verbose_name), } ) def unique_error_message(self, model_class, unique_check): opts = model_class._meta params = { 'model': self, 'model_class': model_class, 'model_name': capfirst(opts.verbose_name), 'unique_check': unique_check, } # A unique field if len(unique_check) == 1: field = opts.get_field(unique_check[0]) params['field_label'] = capfirst(field.verbose_name) return ValidationError( message=field.error_messages['unique'], code='unique', params=params, ) # unique_together else: field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check] params['field_labels'] = get_text_list(field_labels, _('and')) return ValidationError( message=_("%(model_name)s with this %(field_labels)s already exists."), code='unique_together', params=params, ) def full_clean(self, exclude=None, validate_unique=True): """ Call clean_fields(), clean(), and validate_unique() on the model. Raise a ValidationError for any errors that occur. """ errors = {} if exclude is None: exclude = [] else: exclude = list(exclude) try: self.clean_fields(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) # Form.clean() is run even if other validation fails, so do the # same with Model.clean() for consistency. try: self.clean() except ValidationError as e: errors = e.update_error_dict(errors) # Run unique checks, but only for fields that passed validation. if validate_unique: for name in errors: if name != NON_FIELD_ERRORS and name not in exclude: exclude.append(name) try: self.validate_unique(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) if errors: raise ValidationError(errors) def clean_fields(self, exclude=None): """ Clean all fields and raise a ValidationError containing a dict of all validation errors if any occur. """ if exclude is None: exclude = [] errors = {} for f in self._meta.fields: if f.name in exclude: continue # Skip validation for empty fields with blank=True. The developer # is responsible for making sure they have a valid value. raw_value = getattr(self, f.attname) if f.blank and raw_value in f.empty_values: continue try: setattr(self, f.attname, f.clean(raw_value, self)) except ValidationError as e: errors[f.name] = e.error_list if errors: raise ValidationError(errors) @classmethod def check(cls, **kwargs): errors = [*cls._check_swappable(), *cls._check_model(), *cls._check_managers(**kwargs)] if not cls._meta.swapped: databases = kwargs.get('databases') or [] errors += [ *cls._check_fields(**kwargs), *cls._check_m2m_through_same_relationship(), *cls._check_long_column_names(databases), ] clash_errors = ( *cls._check_id_field(), *cls._check_field_name_clashes(), *cls._check_model_name_db_lookup_clashes(), *cls._check_property_name_related_field_accessor_clashes(), *cls._check_single_primary_key(), ) errors.extend(clash_errors) # If there are field name clashes, hide consequent column name # clashes. if not clash_errors: errors.extend(cls._check_column_name_clashes()) errors += [ *cls._check_index_together(), *cls._check_unique_together(), *cls._check_indexes(databases), *cls._check_ordering(), *cls._check_constraints(databases), *cls._check_default_pk(), ] return errors @classmethod def _check_default_pk(cls): if ( cls._meta.pk.auto_created and # Inherited PKs are checked in parents models. not ( isinstance(cls._meta.pk, OneToOneField) and cls._meta.pk.remote_field.parent_link ) and not settings.is_overridden('DEFAULT_AUTO_FIELD') and not cls._meta.app_config._is_default_auto_field_overridden ): return [ checks.Warning( f"Auto-created primary key used when not defining a " f"primary key type, by default " f"'{settings.DEFAULT_AUTO_FIELD}'.", hint=( f"Configure the DEFAULT_AUTO_FIELD setting or the " f"{cls._meta.app_config.__class__.__qualname__}." f"default_auto_field attribute to point to a subclass " f"of AutoField, e.g. 'django.db.models.BigAutoField'." ), obj=cls, id='models.W042', ), ] return [] @classmethod def _check_swappable(cls): """Check if the swapped model exists.""" errors = [] if cls._meta.swapped: try: apps.get_model(cls._meta.swapped) except ValueError: errors.append( checks.Error( "'%s' is not of the form 'app_label.app_name'." % cls._meta.swappable, id='models.E001', ) ) except LookupError: app_label, model_name = cls._meta.swapped.split('.') errors.append( checks.Error( "'%s' references '%s.%s', which has not been " "installed, or is abstract." % ( cls._meta.swappable, app_label, model_name ), id='models.E002', ) ) return errors @classmethod def _check_model(cls): errors = [] if cls._meta.proxy: if cls._meta.local_fields or cls._meta.local_many_to_many: errors.append( checks.Error( "Proxy model '%s' contains model fields." % cls.__name__, id='models.E017', ) ) return errors @classmethod def _check_managers(cls, **kwargs): """Perform all manager checks.""" errors = [] for manager in cls._meta.managers: errors.extend(manager.check(**kwargs)) return errors @classmethod def _check_fields(cls, **kwargs): """Perform all field checks.""" errors = [] for field in cls._meta.local_fields: errors.extend(field.check(**kwargs)) for field in cls._meta.local_many_to_many: errors.extend(field.check(from_model=cls, **kwargs)) return errors @classmethod def _check_m2m_through_same_relationship(cls): """ Check if no relationship model is used by more than one m2m field. """ errors = [] seen_intermediary_signatures = [] fields = cls._meta.local_many_to_many # Skip when the target model wasn't found. fields = (f for f in fields if isinstance(f.remote_field.model, ModelBase)) # Skip when the relationship model wasn't found. fields = (f for f in fields if isinstance(f.remote_field.through, ModelBase)) for f in fields: signature = (f.remote_field.model, cls, f.remote_field.through, f.remote_field.through_fields) if signature in seen_intermediary_signatures: errors.append( checks.Error( "The model has two identical many-to-many relations " "through the intermediate model '%s'." % f.remote_field.through._meta.label, obj=cls, id='models.E003', ) ) else: seen_intermediary_signatures.append(signature) return errors @classmethod def _check_id_field(cls): """Check if `id` field is a primary key.""" fields = [f for f in cls._meta.local_fields if f.name == 'id' and f != cls._meta.pk] # fields is empty or consists of the invalid "id" field if fields and not fields[0].primary_key and cls._meta.pk.name == 'id': return [ checks.Error( "'id' can only be used as a field name if the field also " "sets 'primary_key=True'.", obj=cls, id='models.E004', ) ] else: return [] @classmethod def _check_field_name_clashes(cls): """Forbid field shadowing in multi-table inheritance.""" errors = [] used_fields = {} # name or attname -> field # Check that multi-inheritance doesn't cause field name shadowing. for parent in cls._meta.get_parent_list(): for f in parent._meta.local_fields: clash = used_fields.get(f.name) or used_fields.get(f.attname) or None if clash: errors.append( checks.Error( "The field '%s' from parent model " "'%s' clashes with the field '%s' " "from parent model '%s'." % ( clash.name, clash.model._meta, f.name, f.model._meta ), obj=cls, id='models.E005', ) ) used_fields[f.name] = f used_fields[f.attname] = f # Check that fields defined in the model don't clash with fields from # parents, including auto-generated fields like multi-table inheritance # child accessors. for parent in cls._meta.get_parent_list(): for f in parent._meta.get_fields(): if f not in used_fields: used_fields[f.name] = f for f in cls._meta.local_fields: clash = used_fields.get(f.name) or used_fields.get(f.attname) or None # Note that we may detect clash between user-defined non-unique # field "id" and automatically added unique field "id", both # defined at the same model. This special case is considered in # _check_id_field and here we ignore it. id_conflict = f.name == "id" and clash and clash.name == "id" and clash.model == cls if clash and not id_conflict: errors.append( checks.Error( "The field '%s' clashes with the field '%s' " "from model '%s'." % ( f.name, clash.name, clash.model._meta ), obj=f, id='models.E006', ) ) used_fields[f.name] = f used_fields[f.attname] = f return errors @classmethod def _check_column_name_clashes(cls): # Store a list of column names which have already been used by other fields. used_column_names = [] errors = [] for f in cls._meta.local_fields: _, column_name = f.get_attname_column() # Ensure the column name is not already in use. if column_name and column_name in used_column_names: errors.append( checks.Error( "Field '%s' has column name '%s' that is used by " "another field." % (f.name, column_name), hint="Specify a 'db_column' for the field.", obj=cls, id='models.E007' ) ) else: used_column_names.append(column_name) return errors @classmethod def _check_model_name_db_lookup_clashes(cls): errors = [] model_name = cls.__name__ if model_name.startswith('_') or model_name.endswith('_'): errors.append( checks.Error( "The model name '%s' cannot start or end with an underscore " "as it collides with the query lookup syntax." % model_name, obj=cls, id='models.E023' ) ) elif LOOKUP_SEP in model_name: errors.append( checks.Error( "The model name '%s' cannot contain double underscores as " "it collides with the query lookup syntax." % model_name, obj=cls, id='models.E024' ) ) return errors @classmethod def _check_property_name_related_field_accessor_clashes(cls): errors = [] property_names = cls._meta._property_names related_field_accessors = ( f.get_attname() for f in cls._meta._get_fields(reverse=False) if f.is_relation and f.related_model is not None ) for accessor in related_field_accessors: if accessor in property_names: errors.append( checks.Error( "The property '%s' clashes with a related field " "accessor." % accessor, obj=cls, id='models.E025', ) ) return errors @classmethod def _check_single_primary_key(cls): errors = [] if sum(1 for f in cls._meta.local_fields if f.primary_key) > 1: errors.append( checks.Error( "The model cannot have more than one field with " "'primary_key=True'.", obj=cls, id='models.E026', ) ) return errors @classmethod def _check_index_together(cls): """Check the value of "index_together" option.""" if not isinstance(cls._meta.index_together, (tuple, list)): return [ checks.Error( "'index_together' must be a list or tuple.", obj=cls, id='models.E008', ) ] elif any(not isinstance(fields, (tuple, list)) for fields in cls._meta.index_together): return [ checks.Error( "All 'index_together' elements must be lists or tuples.", obj=cls, id='models.E009', ) ] else: errors = [] for fields in cls._meta.index_together: errors.extend(cls._check_local_fields(fields, "index_together")) return errors @classmethod def _check_unique_together(cls): """Check the value of "unique_together" option.""" if not isinstance(cls._meta.unique_together, (tuple, list)): return [ checks.Error( "'unique_together' must be a list or tuple.", obj=cls, id='models.E010', ) ] elif any(not isinstance(fields, (tuple, list)) for fields in cls._meta.unique_together): return [ checks.Error( "All 'unique_together' elements must be lists or tuples.", obj=cls, id='models.E011', ) ] else: errors = [] for fields in cls._meta.unique_together: errors.extend(cls._check_local_fields(fields, "unique_together")) return errors @classmethod def _check_indexes(cls, databases): """Check fields, names, and conditions of indexes.""" errors = [] references = set() for index in cls._meta.indexes: # Index name can't start with an underscore or a number, restricted # for cross-database compatibility with Oracle. if index.name[0] == '_' or index.name[0].isdigit(): errors.append( checks.Error( "The index name '%s' cannot start with an underscore " "or a number." % index.name, obj=cls, id='models.E033', ), ) if len(index.name) > index.max_name_length: errors.append( checks.Error( "The index name '%s' cannot be longer than %d " "characters." % (index.name, index.max_name_length), obj=cls, id='models.E034', ), ) if index.contains_expressions: for expression in index.expressions: references.update( ref[0] for ref in cls._get_expr_references(expression) ) for db in databases: if not router.allow_migrate_model(db, cls): continue connection = connections[db] if not ( connection.features.supports_partial_indexes or 'supports_partial_indexes' in cls._meta.required_db_features ) and any(index.condition is not None for index in cls._meta.indexes): errors.append( checks.Warning( '%s does not support indexes with conditions.' % connection.display_name, hint=( "Conditions will be ignored. Silence this warning " "if you don't care about it." ), obj=cls, id='models.W037', ) ) if not ( connection.features.supports_covering_indexes or 'supports_covering_indexes' in cls._meta.required_db_features ) and any(index.include for index in cls._meta.indexes): errors.append( checks.Warning( '%s does not support indexes with non-key columns.' % connection.display_name, hint=( "Non-key columns will be ignored. Silence this " "warning if you don't care about it." ), obj=cls, id='models.W040', ) ) if not ( connection.features.supports_expression_indexes or 'supports_expression_indexes' in cls._meta.required_db_features ) and any(index.contains_expressions for index in cls._meta.indexes): errors.append( checks.Warning( '%s does not support indexes on expressions.' % connection.display_name, hint=( "An index won't be created. Silence this warning " "if you don't care about it." ), obj=cls, id='models.W043', ) ) fields = [field for index in cls._meta.indexes for field, _ in index.fields_orders] fields += [include for index in cls._meta.indexes for include in index.include] fields += references errors.extend(cls._check_local_fields(fields, 'indexes')) return errors @classmethod def _check_local_fields(cls, fields, option): from django.db import models # In order to avoid hitting the relation tree prematurely, we use our # own fields_map instead of using get_field() forward_fields_map = {} for field in cls._meta._get_fields(reverse=False): forward_fields_map[field.name] = field if hasattr(field, 'attname'): forward_fields_map[field.attname] = field errors = [] for field_name in fields: try: field = forward_fields_map[field_name] except KeyError: errors.append( checks.Error( "'%s' refers to the nonexistent field '%s'." % ( option, field_name, ), obj=cls, id='models.E012', ) ) else: if isinstance(field.remote_field, models.ManyToManyRel): errors.append( checks.Error( "'%s' refers to a ManyToManyField '%s', but " "ManyToManyFields are not permitted in '%s'." % ( option, field_name, option, ), obj=cls, id='models.E013', ) ) elif field not in cls._meta.local_fields: errors.append( checks.Error( "'%s' refers to field '%s' which is not local to model '%s'." % (option, field_name, cls._meta.object_name), hint="This issue may be caused by multi-table inheritance.", obj=cls, id='models.E016', ) ) return errors @classmethod def _check_ordering(cls): """ Check "ordering" option -- is it a list of strings and do all fields exist? """ if cls._meta._ordering_clash: return [ checks.Error( "'ordering' and 'order_with_respect_to' cannot be used together.", obj=cls, id='models.E021', ), ] if cls._meta.order_with_respect_to or not cls._meta.ordering: return [] if not isinstance(cls._meta.ordering, (list, tuple)): return [ checks.Error( "'ordering' must be a tuple or list (even if you want to order by only one field).", obj=cls, id='models.E014', ) ] errors = [] fields = cls._meta.ordering # Skip expressions and '?' fields. fields = (f for f in fields if isinstance(f, str) and f != '?') # Convert "-field" to "field". fields = ((f[1:] if f.startswith('-') else f) for f in fields) # Separate related fields and non-related fields. _fields = [] related_fields = [] for f in fields: if LOOKUP_SEP in f: related_fields.append(f) else: _fields.append(f) fields = _fields # Check related fields. for field in related_fields: _cls = cls fld = None for part in field.split(LOOKUP_SEP): try: # pk is an alias that won't be found by opts.get_field. if part == 'pk': fld = _cls._meta.pk else: fld = _cls._meta.get_field(part) if fld.is_relation: _cls = fld.get_path_info()[-1].to_opts.model else: _cls = None except (FieldDoesNotExist, AttributeError): if fld is None or ( fld.get_transform(part) is None and fld.get_lookup(part) is None ): errors.append( checks.Error( "'ordering' refers to the nonexistent field, " "related field, or lookup '%s'." % field, obj=cls, id='models.E015', ) ) # Skip ordering on pk. This is always a valid order_by field # but is an alias and therefore won't be found by opts.get_field. fields = {f for f in fields if f != 'pk'} # Check for invalid or nonexistent fields in ordering. invalid_fields = [] # Any field name that is not present in field_names does not exist. # Also, ordering by m2m fields is not allowed. opts = cls._meta valid_fields = set(chain.from_iterable( (f.name, f.attname) if not (f.auto_created and not f.concrete) else (f.field.related_query_name(),) for f in chain(opts.fields, opts.related_objects) )) invalid_fields.extend(fields - valid_fields) for invalid_field in invalid_fields: errors.append( checks.Error( "'ordering' refers to the nonexistent field, related " "field, or lookup '%s'." % invalid_field, obj=cls, id='models.E015', ) ) return errors @classmethod def _check_long_column_names(cls, databases): """ Check that any auto-generated column names are shorter than the limits for each database in which the model will be created. """ if not databases: return [] errors = [] allowed_len = None db_alias = None # Find the minimum max allowed length among all specified db_aliases. for db in databases: # skip databases where the model won't be created if not router.allow_migrate_model(db, cls): continue connection = connections[db] max_name_length = connection.ops.max_name_length() if max_name_length is None or connection.features.truncates_names: continue else: if allowed_len is None: allowed_len = max_name_length db_alias = db elif max_name_length < allowed_len: allowed_len = max_name_length db_alias = db if allowed_len is None: return errors for f in cls._meta.local_fields: _, column_name = f.get_attname_column() # Check if auto-generated name for the field is too long # for the database. if f.db_column is None and column_name is not None and len(column_name) > allowed_len: errors.append( checks.Error( 'Autogenerated column name too long for field "%s". ' 'Maximum length is "%s" for database "%s".' % (column_name, allowed_len, db_alias), hint="Set the column name manually using 'db_column'.", obj=cls, id='models.E018', ) ) for f in cls._meta.local_many_to_many: # Skip nonexistent models. if isinstance(f.remote_field.through, str): continue # Check if auto-generated name for the M2M field is too long # for the database. for m2m in f.remote_field.through._meta.local_fields: _, rel_name = m2m.get_attname_column() if m2m.db_column is None and rel_name is not None and len(rel_name) > allowed_len: errors.append( checks.Error( 'Autogenerated column name too long for M2M field ' '"%s". Maximum length is "%s" for database "%s".' % (rel_name, allowed_len, db_alias), hint=( "Use 'through' to create a separate model for " "M2M and then set column_name using 'db_column'." ), obj=cls, id='models.E019', ) ) return errors @classmethod def _get_expr_references(cls, expr): if isinstance(expr, Q): for child in expr.children: if isinstance(child, tuple): lookup, value = child yield tuple(lookup.split(LOOKUP_SEP)) yield from cls._get_expr_references(value) else: yield from cls._get_expr_references(child) elif isinstance(expr, F): yield tuple(expr.name.split(LOOKUP_SEP)) elif hasattr(expr, 'get_source_expressions'): for src_expr in expr.get_source_expressions(): yield from cls._get_expr_references(src_expr) @classmethod def _check_constraints(cls, databases): errors = [] for db in databases: if not router.allow_migrate_model(db, cls): continue connection = connections[db] if not ( connection.features.supports_table_check_constraints or 'supports_table_check_constraints' in cls._meta.required_db_features ) and any( isinstance(constraint, CheckConstraint) for constraint in cls._meta.constraints ): errors.append( checks.Warning( '%s does not support check constraints.' % connection.display_name, hint=( "A constraint won't be created. Silence this " "warning if you don't care about it." ), obj=cls, id='models.W027', ) ) if not ( connection.features.supports_partial_indexes or 'supports_partial_indexes' in cls._meta.required_db_features ) and any( isinstance(constraint, UniqueConstraint) and constraint.condition is not None for constraint in cls._meta.constraints ): errors.append( checks.Warning( '%s does not support unique constraints with ' 'conditions.' % connection.display_name, hint=( "A constraint won't be created. Silence this " "warning if you don't care about it." ), obj=cls, id='models.W036', ) ) if not ( connection.features.supports_deferrable_unique_constraints or 'supports_deferrable_unique_constraints' in cls._meta.required_db_features ) and any( isinstance(constraint, UniqueConstraint) and constraint.deferrable is not None for constraint in cls._meta.constraints ): errors.append( checks.Warning( '%s does not support deferrable unique constraints.' % connection.display_name, hint=( "A constraint won't be created. Silence this " "warning if you don't care about it." ), obj=cls, id='models.W038', ) ) if not ( connection.features.supports_covering_indexes or 'supports_covering_indexes' in cls._meta.required_db_features ) and any( isinstance(constraint, UniqueConstraint) and constraint.include for constraint in cls._meta.constraints ): errors.append( checks.Warning( '%s does not support unique constraints with non-key ' 'columns.' % connection.display_name, hint=( "A constraint won't be created. Silence this " "warning if you don't care about it." ), obj=cls, id='models.W039', ) ) if not ( connection.features.supports_expression_indexes or 'supports_expression_indexes' in cls._meta.required_db_features ) and any( isinstance(constraint, UniqueConstraint) and constraint.contains_expressions for constraint in cls._meta.constraints ): errors.append( checks.Warning( '%s does not support unique constraints on ' 'expressions.' % connection.display_name, hint=( "A constraint won't be created. Silence this " "warning if you don't care about it." ), obj=cls, id='models.W044', ) ) fields = set(chain.from_iterable( (*constraint.fields, *constraint.include) for constraint in cls._meta.constraints if isinstance(constraint, UniqueConstraint) )) references = set() for constraint in cls._meta.constraints: if isinstance(constraint, UniqueConstraint): if ( connection.features.supports_partial_indexes or 'supports_partial_indexes' not in cls._meta.required_db_features ) and isinstance(constraint.condition, Q): references.update(cls._get_expr_references(constraint.condition)) if ( connection.features.supports_expression_indexes or 'supports_expression_indexes' not in cls._meta.required_db_features ) and constraint.contains_expressions: for expression in constraint.expressions: references.update(cls._get_expr_references(expression)) elif isinstance(constraint, CheckConstraint): if ( connection.features.supports_table_check_constraints or 'supports_table_check_constraints' not in cls._meta.required_db_features ) and isinstance(constraint.check, Q): references.update(cls._get_expr_references(constraint.check)) for field_name, *lookups in references: # pk is an alias that won't be found by opts.get_field. if field_name != 'pk': fields.add(field_name) if not lookups: # If it has no lookups it cannot result in a JOIN. continue try: if field_name == 'pk': field = cls._meta.pk else: field = cls._meta.get_field(field_name) if not field.is_relation or field.many_to_many or field.one_to_many: continue except FieldDoesNotExist: continue # JOIN must happen at the first lookup. first_lookup = lookups[0] if ( hasattr(field, 'get_transform') and hasattr(field, 'get_lookup') and field.get_transform(first_lookup) is None and field.get_lookup(first_lookup) is None ): errors.append( checks.Error( "'constraints' refers to the joined field '%s'." % LOOKUP_SEP.join([field_name] + lookups), obj=cls, id='models.E041', ) ) errors.extend(cls._check_local_fields(fields, 'constraints')) return errors ############################################ # HELPER FUNCTIONS (CURRIED MODEL METHODS) # ############################################ # ORDERING METHODS ######################### def method_set_order(self, ordered_obj, id_list, using=None): if using is None: using = DEFAULT_DB_ALIAS order_wrt = ordered_obj._meta.order_with_respect_to filter_args = order_wrt.get_forward_related_filter(self) ordered_obj.objects.db_manager(using).filter(**filter_args).bulk_update([ ordered_obj(pk=pk, _order=order) for order, pk in enumerate(id_list) ], ['_order']) def method_get_order(self, ordered_obj): order_wrt = ordered_obj._meta.order_with_respect_to filter_args = order_wrt.get_forward_related_filter(self) pk_name = ordered_obj._meta.pk.name return ordered_obj.objects.filter(**filter_args).values_list(pk_name, flat=True) def make_foreign_order_accessors(model, related_model): setattr( related_model, 'get_%s_order' % model.__name__.lower(), partialmethod(method_get_order, model) ) setattr( related_model, 'set_%s_order' % model.__name__.lower(), partialmethod(method_set_order, model) ) ######## # MISC # ######## def model_unpickle(model_id): """Used to unpickle Model subclasses with deferred fields.""" if isinstance(model_id, tuple): model = apps.get_model(*model_id) else: # Backwards compat - the model was cached directly in earlier versions. model = model_id return model.__new__(model) model_unpickle.__safe_for_unpickle__ = True
c79e9137176aa991683886e4470c07459bfcfd6bea4fcbc8742962e6e7ebe87b
import copy import datetime import functools import inspect from decimal import Decimal from uuid import UUID from django.core.exceptions import EmptyResultSet, FieldError from django.db import DatabaseError, NotSupportedError, connection from django.db.models import fields from django.db.models.constants import LOOKUP_SEP from django.db.models.query_utils import Q from django.utils.deconstruct import deconstructible from django.utils.functional import cached_property from django.utils.hashable import make_hashable class SQLiteNumericMixin: """ Some expressions with output_field=DecimalField() must be cast to numeric to be properly filtered. """ def as_sqlite(self, compiler, connection, **extra_context): sql, params = self.as_sql(compiler, connection, **extra_context) try: if self.output_field.get_internal_type() == 'DecimalField': sql = 'CAST(%s AS NUMERIC)' % sql except FieldError: pass return sql, params class Combinable: """ Provide the ability to combine one or two objects with some connector. For example F('foo') + F('bar'). """ # Arithmetic connectors ADD = '+' SUB = '-' MUL = '*' DIV = '/' POW = '^' # The following is a quoted % operator - it is quoted because it can be # used in strings that also have parameter substitution. MOD = '%%' # Bitwise operators - note that these are generated by .bitand() # and .bitor(), the '&' and '|' are reserved for boolean operator # usage. BITAND = '&' BITOR = '|' BITLEFTSHIFT = '<<' BITRIGHTSHIFT = '>>' BITXOR = '#' def _combine(self, other, connector, reversed): if not hasattr(other, 'resolve_expression'): # everything must be resolvable to an expression other = Value(other) if reversed: return CombinedExpression(other, connector, self) return CombinedExpression(self, connector, other) ############# # OPERATORS # ############# def __neg__(self): return self._combine(-1, self.MUL, False) def __add__(self, other): return self._combine(other, self.ADD, False) def __sub__(self, other): return self._combine(other, self.SUB, False) def __mul__(self, other): return self._combine(other, self.MUL, False) def __truediv__(self, other): return self._combine(other, self.DIV, False) def __mod__(self, other): return self._combine(other, self.MOD, False) def __pow__(self, other): return self._combine(other, self.POW, False) def __and__(self, other): if getattr(self, 'conditional', False) and getattr(other, 'conditional', False): return Q(self) & Q(other) raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) def bitand(self, other): return self._combine(other, self.BITAND, False) def bitleftshift(self, other): return self._combine(other, self.BITLEFTSHIFT, False) def bitrightshift(self, other): return self._combine(other, self.BITRIGHTSHIFT, False) def bitxor(self, other): return self._combine(other, self.BITXOR, False) def __or__(self, other): if getattr(self, 'conditional', False) and getattr(other, 'conditional', False): return Q(self) | Q(other) raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) def bitor(self, other): return self._combine(other, self.BITOR, False) def __radd__(self, other): return self._combine(other, self.ADD, True) def __rsub__(self, other): return self._combine(other, self.SUB, True) def __rmul__(self, other): return self._combine(other, self.MUL, True) def __rtruediv__(self, other): return self._combine(other, self.DIV, True) def __rmod__(self, other): return self._combine(other, self.MOD, True) def __rpow__(self, other): return self._combine(other, self.POW, True) def __rand__(self, other): raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) def __ror__(self, other): raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) @deconstructible class BaseExpression: """Base class for all query expressions.""" # aggregate specific fields is_summary = False _output_field_resolved_to_none = False # Can the expression be used in a WHERE clause? filterable = True # Can the expression can be used as a source expression in Window? window_compatible = False def __init__(self, output_field=None): if output_field is not None: self.output_field = output_field def __getstate__(self): state = self.__dict__.copy() state.pop('convert_value', None) return state def get_db_converters(self, connection): return ( [] if self.convert_value is self._convert_value_noop else [self.convert_value] ) + self.output_field.get_db_converters(connection) def get_source_expressions(self): return [] def set_source_expressions(self, exprs): assert not exprs def _parse_expressions(self, *expressions): return [ arg if hasattr(arg, 'resolve_expression') else ( F(arg) if isinstance(arg, str) else Value(arg) ) for arg in expressions ] def as_sql(self, compiler, connection): """ Responsible for returning a (sql, [params]) tuple to be included in the current query. Different backends can provide their own implementation, by providing an `as_{vendor}` method and patching the Expression: ``` def override_as_sql(self, compiler, connection): # custom logic return super().as_sql(compiler, connection) setattr(Expression, 'as_' + connection.vendor, override_as_sql) ``` Arguments: * compiler: the query compiler responsible for generating the query. Must have a compile method, returning a (sql, [params]) tuple. Calling compiler(value) will return a quoted `value`. * connection: the database connection used for the current query. Return: (sql, params) Where `sql` is a string containing ordered sql parameters to be replaced with the elements of the list `params`. """ raise NotImplementedError("Subclasses must implement as_sql()") @cached_property def contains_aggregate(self): return any(expr and expr.contains_aggregate for expr in self.get_source_expressions()) @cached_property def contains_over_clause(self): return any(expr and expr.contains_over_clause for expr in self.get_source_expressions()) @cached_property def contains_column_references(self): return any(expr and expr.contains_column_references for expr in self.get_source_expressions()) def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): """ Provide the chance to do any preprocessing or validation before being added to the query. Arguments: * query: the backend query implementation * allow_joins: boolean allowing or denying use of joins in this query * reuse: a set of reusable joins for multijoins * summarize: a terminal aggregate clause * for_save: whether this expression about to be used in a save or update Return: an Expression to be added to the query. """ c = self.copy() c.is_summary = summarize c.set_source_expressions([ expr.resolve_expression(query, allow_joins, reuse, summarize) if expr else None for expr in c.get_source_expressions() ]) return c @property def conditional(self): return isinstance(self.output_field, fields.BooleanField) @property def field(self): return self.output_field @cached_property def output_field(self): """Return the output type of this expressions.""" output_field = self._resolve_output_field() if output_field is None: self._output_field_resolved_to_none = True raise FieldError('Cannot resolve expression type, unknown output_field') return output_field @cached_property def _output_field_or_none(self): """ Return the output field of this expression, or None if _resolve_output_field() didn't return an output type. """ try: return self.output_field except FieldError: if not self._output_field_resolved_to_none: raise def _resolve_output_field(self): """ Attempt to infer the output type of the expression. If the output fields of all source fields match then, simply infer the same type here. This isn't always correct, but it makes sense most of the time. Consider the difference between `2 + 2` and `2 / 3`. Inferring the type here is a convenience for the common case. The user should supply their own output_field with more complex computations. If a source's output field resolves to None, exclude it from this check. If all sources are None, then an error is raised higher up the stack in the output_field property. """ sources_iter = (source for source in self.get_source_fields() if source is not None) for output_field in sources_iter: for source in sources_iter: if not isinstance(output_field, source.__class__): raise FieldError( 'Expression contains mixed types: %s, %s. You must ' 'set output_field.' % ( output_field.__class__.__name__, source.__class__.__name__, ) ) return output_field @staticmethod def _convert_value_noop(value, expression, connection): return value @cached_property def convert_value(self): """ Expressions provide their own converters because users have the option of manually specifying the output_field which may be a different type from the one the database returns. """ field = self.output_field internal_type = field.get_internal_type() if internal_type == 'FloatField': return lambda value, expression, connection: None if value is None else float(value) elif internal_type.endswith('IntegerField'): return lambda value, expression, connection: None if value is None else int(value) elif internal_type == 'DecimalField': return lambda value, expression, connection: None if value is None else Decimal(value) return self._convert_value_noop def get_lookup(self, lookup): return self.output_field.get_lookup(lookup) def get_transform(self, name): return self.output_field.get_transform(name) def relabeled_clone(self, change_map): clone = self.copy() clone.set_source_expressions([ e.relabeled_clone(change_map) if e is not None else None for e in self.get_source_expressions() ]) return clone def copy(self): return copy.copy(self) def get_group_by_cols(self, alias=None): if not self.contains_aggregate: return [self] cols = [] for source in self.get_source_expressions(): cols.extend(source.get_group_by_cols()) return cols def get_source_fields(self): """Return the underlying field types used by this aggregate.""" return [e._output_field_or_none for e in self.get_source_expressions()] def asc(self, **kwargs): return OrderBy(self, **kwargs) def desc(self, **kwargs): return OrderBy(self, descending=True, **kwargs) def reverse_ordering(self): return self def flatten(self): """ Recursively yield this expression and all subexpressions, in depth-first order. """ yield self for expr in self.get_source_expressions(): if expr: if hasattr(expr, 'flatten'): yield from expr.flatten() else: yield expr def select_format(self, compiler, sql, params): """ Custom format for select clauses. For example, EXISTS expressions need to be wrapped in CASE WHEN on Oracle. """ if hasattr(self.output_field, 'select_format'): return self.output_field.select_format(compiler, sql, params) return sql, params @cached_property def identity(self): constructor_signature = inspect.signature(self.__init__) args, kwargs = self._constructor_args signature = constructor_signature.bind_partial(*args, **kwargs) signature.apply_defaults() arguments = signature.arguments.items() identity = [self.__class__] for arg, value in arguments: if isinstance(value, fields.Field): if value.name and value.model: value = (value.model._meta.label, value.name) else: value = type(value) else: value = make_hashable(value) identity.append((arg, value)) return tuple(identity) def __eq__(self, other): if not isinstance(other, BaseExpression): return NotImplemented return other.identity == self.identity def __hash__(self): return hash(self.identity) class Expression(BaseExpression, Combinable): """An expression that can be combined with other expressions.""" pass _connector_combinators = { connector: [ (fields.IntegerField, fields.IntegerField, fields.IntegerField), (fields.IntegerField, fields.DecimalField, fields.DecimalField), (fields.DecimalField, fields.IntegerField, fields.DecimalField), (fields.IntegerField, fields.FloatField, fields.FloatField), (fields.FloatField, fields.IntegerField, fields.FloatField), ] for connector in (Combinable.ADD, Combinable.SUB, Combinable.MUL, Combinable.DIV) } @functools.lru_cache(maxsize=128) def _resolve_combined_type(connector, lhs_type, rhs_type): combinators = _connector_combinators.get(connector, ()) for combinator_lhs_type, combinator_rhs_type, combined_type in combinators: if issubclass(lhs_type, combinator_lhs_type) and issubclass(rhs_type, combinator_rhs_type): return combined_type class CombinedExpression(SQLiteNumericMixin, Expression): def __init__(self, lhs, connector, rhs, output_field=None): super().__init__(output_field=output_field) self.connector = connector self.lhs = lhs self.rhs = rhs def __repr__(self): return "<{}: {}>".format(self.__class__.__name__, self) def __str__(self): return "{} {} {}".format(self.lhs, self.connector, self.rhs) def get_source_expressions(self): return [self.lhs, self.rhs] def set_source_expressions(self, exprs): self.lhs, self.rhs = exprs def _resolve_output_field(self): try: return super()._resolve_output_field() except FieldError: combined_type = _resolve_combined_type( self.connector, type(self.lhs.output_field), type(self.rhs.output_field), ) if combined_type is None: raise return combined_type() def as_sql(self, compiler, connection): expressions = [] expression_params = [] sql, params = compiler.compile(self.lhs) expressions.append(sql) expression_params.extend(params) sql, params = compiler.compile(self.rhs) expressions.append(sql) expression_params.extend(params) # order of precedence expression_wrapper = '(%s)' sql = connection.ops.combine_expression(self.connector, expressions) return expression_wrapper % sql, expression_params def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): lhs = self.lhs.resolve_expression(query, allow_joins, reuse, summarize, for_save) rhs = self.rhs.resolve_expression(query, allow_joins, reuse, summarize, for_save) if not isinstance(self, (DurationExpression, TemporalSubtraction)): try: lhs_type = lhs.output_field.get_internal_type() except (AttributeError, FieldError): lhs_type = None try: rhs_type = rhs.output_field.get_internal_type() except (AttributeError, FieldError): rhs_type = None if 'DurationField' in {lhs_type, rhs_type} and lhs_type != rhs_type: return DurationExpression(self.lhs, self.connector, self.rhs).resolve_expression( query, allow_joins, reuse, summarize, for_save, ) datetime_fields = {'DateField', 'DateTimeField', 'TimeField'} if self.connector == self.SUB and lhs_type in datetime_fields and lhs_type == rhs_type: return TemporalSubtraction(self.lhs, self.rhs).resolve_expression( query, allow_joins, reuse, summarize, for_save, ) c = self.copy() c.is_summary = summarize c.lhs = lhs c.rhs = rhs return c class DurationExpression(CombinedExpression): def compile(self, side, compiler, connection): try: output = side.output_field except FieldError: pass else: if output.get_internal_type() == 'DurationField': sql, params = compiler.compile(side) return connection.ops.format_for_duration_arithmetic(sql), params return compiler.compile(side) def as_sql(self, compiler, connection): if connection.features.has_native_duration_field: return super().as_sql(compiler, connection) connection.ops.check_expression_support(self) expressions = [] expression_params = [] sql, params = self.compile(self.lhs, compiler, connection) expressions.append(sql) expression_params.extend(params) sql, params = self.compile(self.rhs, compiler, connection) expressions.append(sql) expression_params.extend(params) # order of precedence expression_wrapper = '(%s)' sql = connection.ops.combine_duration_expression(self.connector, expressions) return expression_wrapper % sql, expression_params def as_sqlite(self, compiler, connection, **extra_context): sql, params = self.as_sql(compiler, connection, **extra_context) if self.connector in {Combinable.MUL, Combinable.DIV}: try: lhs_type = self.lhs.output_field.get_internal_type() rhs_type = self.rhs.output_field.get_internal_type() except (AttributeError, FieldError): pass else: allowed_fields = { 'DecimalField', 'DurationField', 'FloatField', 'IntegerField', } if lhs_type not in allowed_fields or rhs_type not in allowed_fields: raise DatabaseError( f'Invalid arguments for operator {self.connector}.' ) return sql, params class TemporalSubtraction(CombinedExpression): output_field = fields.DurationField() def __init__(self, lhs, rhs): super().__init__(lhs, self.SUB, rhs) def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) lhs = compiler.compile(self.lhs) rhs = compiler.compile(self.rhs) return connection.ops.subtract_temporals(self.lhs.output_field.get_internal_type(), lhs, rhs) @deconstructible class F(Combinable): """An object capable of resolving references to existing query objects.""" def __init__(self, name): """ Arguments: * name: the name of the field this expression references """ self.name = name def __repr__(self): return "{}({})".format(self.__class__.__name__, self.name) def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): return query.resolve_ref(self.name, allow_joins, reuse, summarize) def asc(self, **kwargs): return OrderBy(self, **kwargs) def desc(self, **kwargs): return OrderBy(self, descending=True, **kwargs) def __eq__(self, other): return self.__class__ == other.__class__ and self.name == other.name def __hash__(self): return hash(self.name) class ResolvedOuterRef(F): """ An object that contains a reference to an outer query. In this case, the reference to the outer query has been resolved because the inner query has been used as a subquery. """ contains_aggregate = False def as_sql(self, *args, **kwargs): raise ValueError( 'This queryset contains a reference to an outer query and may ' 'only be used in a subquery.' ) def resolve_expression(self, *args, **kwargs): col = super().resolve_expression(*args, **kwargs) # FIXME: Rename possibly_multivalued to multivalued and fix detection # for non-multivalued JOINs (e.g. foreign key fields). This should take # into account only many-to-many and one-to-many relationships. col.possibly_multivalued = LOOKUP_SEP in self.name return col def relabeled_clone(self, relabels): return self def get_group_by_cols(self, alias=None): return [] class OuterRef(F): contains_aggregate = False def resolve_expression(self, *args, **kwargs): if isinstance(self.name, self.__class__): return self.name return ResolvedOuterRef(self.name) def relabeled_clone(self, relabels): return self class Func(SQLiteNumericMixin, Expression): """An SQL function call.""" function = None template = '%(function)s(%(expressions)s)' arg_joiner = ', ' arity = None # The number of arguments the function accepts. def __init__(self, *expressions, output_field=None, **extra): if self.arity is not None and len(expressions) != self.arity: raise TypeError( "'%s' takes exactly %s %s (%s given)" % ( self.__class__.__name__, self.arity, "argument" if self.arity == 1 else "arguments", len(expressions), ) ) super().__init__(output_field=output_field) self.source_expressions = self._parse_expressions(*expressions) self.extra = extra def __repr__(self): args = self.arg_joiner.join(str(arg) for arg in self.source_expressions) extra = {**self.extra, **self._get_repr_options()} if extra: extra = ', '.join(str(key) + '=' + str(val) for key, val in sorted(extra.items())) return "{}({}, {})".format(self.__class__.__name__, args, extra) return "{}({})".format(self.__class__.__name__, args) def _get_repr_options(self): """Return a dict of extra __init__() options to include in the repr.""" return {} def get_source_expressions(self): return self.source_expressions def set_source_expressions(self, exprs): self.source_expressions = exprs def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = self.copy() c.is_summary = summarize for pos, arg in enumerate(c.source_expressions): c.source_expressions[pos] = arg.resolve_expression(query, allow_joins, reuse, summarize, for_save) return c def as_sql(self, compiler, connection, function=None, template=None, arg_joiner=None, **extra_context): connection.ops.check_expression_support(self) sql_parts = [] params = [] for arg in self.source_expressions: arg_sql, arg_params = compiler.compile(arg) sql_parts.append(arg_sql) params.extend(arg_params) data = {**self.extra, **extra_context} # Use the first supplied value in this order: the parameter to this # method, a value supplied in __init__()'s **extra (the value in # `data`), or the value defined on the class. if function is not None: data['function'] = function else: data.setdefault('function', self.function) template = template or data.get('template', self.template) arg_joiner = arg_joiner or data.get('arg_joiner', self.arg_joiner) data['expressions'] = data['field'] = arg_joiner.join(sql_parts) return template % data, params def copy(self): copy = super().copy() copy.source_expressions = self.source_expressions[:] copy.extra = self.extra.copy() return copy class Value(SQLiteNumericMixin, Expression): """Represent a wrapped value as a node within an expression.""" # Provide a default value for `for_save` in order to allow unresolved # instances to be compiled until a decision is taken in #25425. for_save = False def __init__(self, value, output_field=None): """ Arguments: * value: the value this expression represents. The value will be added into the sql parameter list and properly quoted. * output_field: an instance of the model field type that this expression will return, such as IntegerField() or CharField(). """ super().__init__(output_field=output_field) self.value = value def __repr__(self): return "{}({})".format(self.__class__.__name__, self.value) def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) val = self.value output_field = self._output_field_or_none if output_field is not None: if self.for_save: val = output_field.get_db_prep_save(val, connection=connection) else: val = output_field.get_db_prep_value(val, connection=connection) if hasattr(output_field, 'get_placeholder'): return output_field.get_placeholder(val, compiler, connection), [val] if val is None: # cx_Oracle does not always convert None to the appropriate # NULL type (like in case expressions using numbers), so we # use a literal SQL NULL return 'NULL', [] return '%s', [val] def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = super().resolve_expression(query, allow_joins, reuse, summarize, for_save) c.for_save = for_save return c def get_group_by_cols(self, alias=None): return [] def _resolve_output_field(self): if isinstance(self.value, str): return fields.CharField() if isinstance(self.value, bool): return fields.BooleanField() if isinstance(self.value, int): return fields.IntegerField() if isinstance(self.value, float): return fields.FloatField() if isinstance(self.value, datetime.datetime): return fields.DateTimeField() if isinstance(self.value, datetime.date): return fields.DateField() if isinstance(self.value, datetime.time): return fields.TimeField() if isinstance(self.value, datetime.timedelta): return fields.DurationField() if isinstance(self.value, Decimal): return fields.DecimalField() if isinstance(self.value, bytes): return fields.BinaryField() if isinstance(self.value, UUID): return fields.UUIDField() class RawSQL(Expression): def __init__(self, sql, params, output_field=None): if output_field is None: output_field = fields.Field() self.sql, self.params = sql, params super().__init__(output_field=output_field) def __repr__(self): return "{}({}, {})".format(self.__class__.__name__, self.sql, self.params) def as_sql(self, compiler, connection): return '(%s)' % self.sql, self.params def get_group_by_cols(self, alias=None): return [self] def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): # Resolve parents fields used in raw SQL. for parent in query.model._meta.get_parent_list(): for parent_field in parent._meta.local_fields: _, column_name = parent_field.get_attname_column() if column_name.lower() in self.sql.lower(): query.resolve_ref(parent_field.name, allow_joins, reuse, summarize) break return super().resolve_expression(query, allow_joins, reuse, summarize, for_save) class Star(Expression): def __repr__(self): return "'*'" def as_sql(self, compiler, connection): return '*', [] class Col(Expression): contains_column_references = True possibly_multivalued = False def __init__(self, alias, target, output_field=None): if output_field is None: output_field = target super().__init__(output_field=output_field) self.alias, self.target = alias, target def __repr__(self): alias, target = self.alias, self.target identifiers = (alias, str(target)) if alias else (str(target),) return '{}({})'.format(self.__class__.__name__, ', '.join(identifiers)) def as_sql(self, compiler, connection): alias, column = self.alias, self.target.column identifiers = (alias, column) if alias else (column,) sql = '.'.join(map(compiler.quote_name_unless_alias, identifiers)) return sql, [] def relabeled_clone(self, relabels): if self.alias is None: return self return self.__class__(relabels.get(self.alias, self.alias), self.target, self.output_field) def get_group_by_cols(self, alias=None): return [self] def get_db_converters(self, connection): if self.target == self.output_field: return self.output_field.get_db_converters(connection) return (self.output_field.get_db_converters(connection) + self.target.get_db_converters(connection)) class Ref(Expression): """ Reference to column alias of the query. For example, Ref('sum_cost') in qs.annotate(sum_cost=Sum('cost')) query. """ def __init__(self, refs, source): super().__init__() self.refs, self.source = refs, source def __repr__(self): return "{}({}, {})".format(self.__class__.__name__, self.refs, self.source) def get_source_expressions(self): return [self.source] def set_source_expressions(self, exprs): self.source, = exprs def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): # The sub-expression `source` has already been resolved, as this is # just a reference to the name of `source`. return self def relabeled_clone(self, relabels): return self def as_sql(self, compiler, connection): return connection.ops.quote_name(self.refs), [] def get_group_by_cols(self, alias=None): return [self] class ExpressionList(Func): """ An expression containing multiple expressions. Can be used to provide a list of expressions as an argument to another expression, like an ordering clause. """ template = '%(expressions)s' def __init__(self, *expressions, **extra): if not expressions: raise ValueError('%s requires at least one expression.' % self.__class__.__name__) super().__init__(*expressions, **extra) def __str__(self): return self.arg_joiner.join(str(arg) for arg in self.source_expressions) def as_sqlite(self, compiler, connection, **extra_context): # Casting to numeric is unnecessary. return self.as_sql(compiler, connection, **extra_context) class ExpressionWrapper(Expression): """ An expression that can wrap another expression so that it can provide extra context to the inner expression, such as the output_field. """ def __init__(self, expression, output_field): super().__init__(output_field=output_field) self.expression = expression def set_source_expressions(self, exprs): self.expression = exprs[0] def get_source_expressions(self): return [self.expression] def get_group_by_cols(self, alias=None): if isinstance(self.expression, Expression): expression = self.expression.copy() expression.output_field = self.output_field return expression.get_group_by_cols(alias=alias) # For non-expressions e.g. an SQL WHERE clause, the entire # `expression` must be included in the GROUP BY clause. return super().get_group_by_cols() def as_sql(self, compiler, connection): return compiler.compile(self.expression) def __repr__(self): return "{}({})".format(self.__class__.__name__, self.expression) class When(Expression): template = 'WHEN %(condition)s THEN %(result)s' # This isn't a complete conditional expression, must be used in Case(). conditional = False def __init__(self, condition=None, then=None, **lookups): if lookups: if condition is None: condition, lookups = Q(**lookups), None elif getattr(condition, 'conditional', False): condition, lookups = Q(condition, **lookups), None if condition is None or not getattr(condition, 'conditional', False) or lookups: raise TypeError( 'When() supports a Q object, a boolean expression, or lookups ' 'as a condition.' ) if isinstance(condition, Q) and not condition: raise ValueError("An empty Q() can't be used as a When() condition.") super().__init__(output_field=None) self.condition = condition self.result = self._parse_expressions(then)[0] def __str__(self): return "WHEN %r THEN %r" % (self.condition, self.result) def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, self) def get_source_expressions(self): return [self.condition, self.result] def set_source_expressions(self, exprs): self.condition, self.result = exprs def get_source_fields(self): # We're only interested in the fields of the result expressions. return [self.result._output_field_or_none] def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = self.copy() c.is_summary = summarize if hasattr(c.condition, 'resolve_expression'): c.condition = c.condition.resolve_expression(query, allow_joins, reuse, summarize, False) c.result = c.result.resolve_expression(query, allow_joins, reuse, summarize, for_save) return c def as_sql(self, compiler, connection, template=None, **extra_context): connection.ops.check_expression_support(self) template_params = extra_context sql_params = [] condition_sql, condition_params = compiler.compile(self.condition) template_params['condition'] = condition_sql sql_params.extend(condition_params) result_sql, result_params = compiler.compile(self.result) template_params['result'] = result_sql sql_params.extend(result_params) template = template or self.template return template % template_params, sql_params def get_group_by_cols(self, alias=None): # This is not a complete expression and cannot be used in GROUP BY. cols = [] for source in self.get_source_expressions(): cols.extend(source.get_group_by_cols()) return cols class Case(Expression): """ An SQL searched CASE expression: CASE WHEN n > 0 THEN 'positive' WHEN n < 0 THEN 'negative' ELSE 'zero' END """ template = 'CASE %(cases)s ELSE %(default)s END' case_joiner = ' ' def __init__(self, *cases, default=None, output_field=None, **extra): if not all(isinstance(case, When) for case in cases): raise TypeError("Positional arguments must all be When objects.") super().__init__(output_field) self.cases = list(cases) self.default = self._parse_expressions(default)[0] self.extra = extra def __str__(self): return "CASE %s, ELSE %r" % (', '.join(str(c) for c in self.cases), self.default) def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, self) def get_source_expressions(self): return self.cases + [self.default] def set_source_expressions(self, exprs): *self.cases, self.default = exprs def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = self.copy() c.is_summary = summarize for pos, case in enumerate(c.cases): c.cases[pos] = case.resolve_expression(query, allow_joins, reuse, summarize, for_save) c.default = c.default.resolve_expression(query, allow_joins, reuse, summarize, for_save) return c def copy(self): c = super().copy() c.cases = c.cases[:] return c def as_sql(self, compiler, connection, template=None, case_joiner=None, **extra_context): connection.ops.check_expression_support(self) if not self.cases: return compiler.compile(self.default) template_params = {**self.extra, **extra_context} case_parts = [] sql_params = [] for case in self.cases: try: case_sql, case_params = compiler.compile(case) except EmptyResultSet: continue case_parts.append(case_sql) sql_params.extend(case_params) default_sql, default_params = compiler.compile(self.default) if not case_parts: return default_sql, default_params case_joiner = case_joiner or self.case_joiner template_params['cases'] = case_joiner.join(case_parts) template_params['default'] = default_sql sql_params.extend(default_params) template = template or template_params.get('template', self.template) sql = template % template_params if self._output_field_or_none is not None: sql = connection.ops.unification_cast_sql(self.output_field) % sql return sql, sql_params def get_group_by_cols(self, alias=None): if not self.cases: return self.default.get_group_by_cols(alias) return super().get_group_by_cols(alias) class Subquery(Expression): """ An explicit subquery. It may contain OuterRef() references to the outer query which will be resolved when it is applied to that query. """ template = '(%(subquery)s)' contains_aggregate = False def __init__(self, queryset, output_field=None, **extra): # Allow the usage of both QuerySet and sql.Query objects. self.query = getattr(queryset, 'query', queryset) self.extra = extra super().__init__(output_field) def __getstate__(self): state = super().__getstate__() args, kwargs = state['_constructor_args'] if args: args = (self.query, *args[1:]) else: kwargs['queryset'] = self.query state['_constructor_args'] = args, kwargs return state def get_source_expressions(self): return [self.query] def set_source_expressions(self, exprs): self.query = exprs[0] def _resolve_output_field(self): return self.query.output_field def copy(self): clone = super().copy() clone.query = clone.query.clone() return clone @property def external_aliases(self): return self.query.external_aliases def get_external_cols(self): return self.query.get_external_cols() def as_sql(self, compiler, connection, template=None, query=None, **extra_context): connection.ops.check_expression_support(self) template_params = {**self.extra, **extra_context} query = query or self.query subquery_sql, sql_params = query.as_sql(compiler, connection) template_params['subquery'] = subquery_sql[1:-1] template = template or template_params.get('template', self.template) sql = template % template_params return sql, sql_params def get_group_by_cols(self, alias=None): if alias: return [Ref(alias, self)] external_cols = self.get_external_cols() if any(col.possibly_multivalued for col in external_cols): return [self] return external_cols class Exists(Subquery): template = 'EXISTS(%(subquery)s)' output_field = fields.BooleanField() def __init__(self, queryset, negated=False, **kwargs): self.negated = negated super().__init__(queryset, **kwargs) def __invert__(self): clone = self.copy() clone.negated = not self.negated return clone def as_sql(self, compiler, connection, template=None, **extra_context): query = self.query.exists(using=connection.alias) sql, params = super().as_sql( compiler, connection, template=template, query=query, **extra_context, ) if self.negated: sql = 'NOT {}'.format(sql) return sql, params def select_format(self, compiler, sql, params): # Wrap EXISTS() with a CASE WHEN expression if a database backend # (e.g. Oracle) doesn't support boolean expression in SELECT or GROUP # BY list. if not compiler.connection.features.supports_boolean_expr_in_select_clause: sql = 'CASE WHEN {} THEN 1 ELSE 0 END'.format(sql) return sql, params class OrderBy(BaseExpression): template = '%(expression)s %(ordering)s' conditional = False def __init__(self, expression, descending=False, nulls_first=False, nulls_last=False): if nulls_first and nulls_last: raise ValueError('nulls_first and nulls_last are mutually exclusive') self.nulls_first = nulls_first self.nulls_last = nulls_last self.descending = descending if not hasattr(expression, 'resolve_expression'): raise ValueError('expression must be an expression type') self.expression = expression def __repr__(self): return "{}({}, descending={})".format( self.__class__.__name__, self.expression, self.descending) def set_source_expressions(self, exprs): self.expression = exprs[0] def get_source_expressions(self): return [self.expression] def as_sql(self, compiler, connection, template=None, **extra_context): template = template or self.template if connection.features.supports_order_by_nulls_modifier: if self.nulls_last: template = '%s NULLS LAST' % template elif self.nulls_first: template = '%s NULLS FIRST' % template else: if self.nulls_last and not ( self.descending and connection.features.order_by_nulls_first ): template = '%%(expression)s IS NULL, %s' % template elif self.nulls_first and not ( not self.descending and connection.features.order_by_nulls_first ): template = '%%(expression)s IS NOT NULL, %s' % template connection.ops.check_expression_support(self) expression_sql, params = compiler.compile(self.expression) placeholders = { 'expression': expression_sql, 'ordering': 'DESC' if self.descending else 'ASC', **extra_context, } params *= template.count('%(expression)s') return (template % placeholders).rstrip(), params def as_oracle(self, compiler, connection): # Oracle doesn't allow ORDER BY EXISTS() unless it's wrapped in # a CASE WHEN. if isinstance(self.expression, Exists): copy = self.copy() copy.expression = Case( When(self.expression, then=True), default=False, ) return copy.as_sql(compiler, connection) return self.as_sql(compiler, connection) def get_group_by_cols(self, alias=None): cols = [] for source in self.get_source_expressions(): cols.extend(source.get_group_by_cols()) return cols def reverse_ordering(self): self.descending = not self.descending if self.nulls_first or self.nulls_last: self.nulls_first = not self.nulls_first self.nulls_last = not self.nulls_last return self def asc(self): self.descending = False def desc(self): self.descending = True class Window(SQLiteNumericMixin, Expression): template = '%(expression)s OVER (%(window)s)' # Although the main expression may either be an aggregate or an # expression with an aggregate function, the GROUP BY that will # be introduced in the query as a result is not desired. contains_aggregate = False contains_over_clause = True filterable = False def __init__(self, expression, partition_by=None, order_by=None, frame=None, output_field=None): self.partition_by = partition_by self.order_by = order_by self.frame = frame if not getattr(expression, 'window_compatible', False): raise ValueError( "Expression '%s' isn't compatible with OVER clauses." % expression.__class__.__name__ ) if self.partition_by is not None: if not isinstance(self.partition_by, (tuple, list)): self.partition_by = (self.partition_by,) self.partition_by = ExpressionList(*self.partition_by) if self.order_by is not None: if isinstance(self.order_by, (list, tuple)): self.order_by = ExpressionList(*self.order_by) elif not isinstance(self.order_by, BaseExpression): raise ValueError( 'order_by must be either an Expression or a sequence of ' 'expressions.' ) super().__init__(output_field=output_field) self.source_expression = self._parse_expressions(expression)[0] def _resolve_output_field(self): return self.source_expression.output_field def get_source_expressions(self): return [self.source_expression, self.partition_by, self.order_by, self.frame] def set_source_expressions(self, exprs): self.source_expression, self.partition_by, self.order_by, self.frame = exprs def as_sql(self, compiler, connection, template=None): connection.ops.check_expression_support(self) if not connection.features.supports_over_clause: raise NotSupportedError('This backend does not support window expressions.') expr_sql, params = compiler.compile(self.source_expression) window_sql, window_params = [], [] if self.partition_by is not None: sql_expr, sql_params = self.partition_by.as_sql( compiler=compiler, connection=connection, template='PARTITION BY %(expressions)s', ) window_sql.extend(sql_expr) window_params.extend(sql_params) if self.order_by is not None: window_sql.append(' ORDER BY ') order_sql, order_params = compiler.compile(self.order_by) window_sql.extend(order_sql) window_params.extend(order_params) if self.frame: frame_sql, frame_params = compiler.compile(self.frame) window_sql.append(' ' + frame_sql) window_params.extend(frame_params) params.extend(window_params) template = template or self.template return template % { 'expression': expr_sql, 'window': ''.join(window_sql).strip() }, params def as_sqlite(self, compiler, connection): if isinstance(self.output_field, fields.DecimalField): # Casting to numeric must be outside of the window expression. copy = self.copy() source_expressions = copy.get_source_expressions() source_expressions[0].output_field = fields.FloatField() copy.set_source_expressions(source_expressions) return super(Window, copy).as_sqlite(compiler, connection) return self.as_sql(compiler, connection) def __str__(self): return '{} OVER ({}{}{})'.format( str(self.source_expression), 'PARTITION BY ' + str(self.partition_by) if self.partition_by else '', 'ORDER BY ' + str(self.order_by) if self.order_by else '', str(self.frame or ''), ) def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self) def get_group_by_cols(self, alias=None): return [] class WindowFrame(Expression): """ Model the frame clause in window expressions. There are two types of frame clauses which are subclasses, however, all processing and validation (by no means intended to be complete) is done here. Thus, providing an end for a frame is optional (the default is UNBOUNDED FOLLOWING, which is the last row in the frame). """ template = '%(frame_type)s BETWEEN %(start)s AND %(end)s' def __init__(self, start=None, end=None): self.start = Value(start) self.end = Value(end) def set_source_expressions(self, exprs): self.start, self.end = exprs def get_source_expressions(self): return [self.start, self.end] def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) start, end = self.window_frame_start_end(connection, self.start.value, self.end.value) return self.template % { 'frame_type': self.frame_type, 'start': start, 'end': end, }, [] def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self) def get_group_by_cols(self, alias=None): return [] def __str__(self): if self.start.value is not None and self.start.value < 0: start = '%d %s' % (abs(self.start.value), connection.ops.PRECEDING) elif self.start.value is not None and self.start.value == 0: start = connection.ops.CURRENT_ROW else: start = connection.ops.UNBOUNDED_PRECEDING if self.end.value is not None and self.end.value > 0: end = '%d %s' % (self.end.value, connection.ops.FOLLOWING) elif self.end.value is not None and self.end.value == 0: end = connection.ops.CURRENT_ROW else: end = connection.ops.UNBOUNDED_FOLLOWING return self.template % { 'frame_type': self.frame_type, 'start': start, 'end': end, } def window_frame_start_end(self, connection, start, end): raise NotImplementedError('Subclasses must implement window_frame_start_end().') class RowRange(WindowFrame): frame_type = 'ROWS' def window_frame_start_end(self, connection, start, end): return connection.ops.window_frame_rows_start_end(start, end) class ValueRange(WindowFrame): frame_type = 'RANGE' def window_frame_start_end(self, connection, start, end): return connection.ops.window_frame_range_start_end(start, end)
3e573e5a31e68d2e435f0d3b50b0d1ad7f5edf4c23b68a0ea642d6212e4bca82
import itertools import math from copy import copy from django.core.exceptions import EmptyResultSet from django.db.models.expressions import Case, Func, Value, When from django.db.models.fields import ( CharField, DateTimeField, Field, IntegerField, UUIDField, ) from django.db.models.query_utils import RegisterLookupMixin from django.utils.datastructures import OrderedSet from django.utils.functional import cached_property from django.utils.hashable import make_hashable class Lookup: lookup_name = None prepare_rhs = True can_use_none_as_rhs = False def __init__(self, lhs, rhs): self.lhs, self.rhs = lhs, rhs self.rhs = self.get_prep_lookup() if hasattr(self.lhs, 'get_bilateral_transforms'): bilateral_transforms = self.lhs.get_bilateral_transforms() else: bilateral_transforms = [] if bilateral_transforms: # Warn the user as soon as possible if they are trying to apply # a bilateral transformation on a nested QuerySet: that won't work. from django.db.models.sql.query import ( # avoid circular import Query, ) if isinstance(rhs, Query): raise NotImplementedError("Bilateral transformations on nested querysets are not implemented.") self.bilateral_transforms = bilateral_transforms def apply_bilateral_transforms(self, value): for transform in self.bilateral_transforms: value = transform(value) return value def batch_process_rhs(self, compiler, connection, rhs=None): if rhs is None: rhs = self.rhs if self.bilateral_transforms: sqls, sqls_params = [], [] for p in rhs: value = Value(p, output_field=self.lhs.output_field) value = self.apply_bilateral_transforms(value) value = value.resolve_expression(compiler.query) sql, sql_params = compiler.compile(value) sqls.append(sql) sqls_params.extend(sql_params) else: _, params = self.get_db_prep_lookup(rhs, connection) sqls, sqls_params = ['%s'] * len(params), params return sqls, sqls_params def get_source_expressions(self): if self.rhs_is_direct_value(): return [self.lhs] return [self.lhs, self.rhs] def set_source_expressions(self, new_exprs): if len(new_exprs) == 1: self.lhs = new_exprs[0] else: self.lhs, self.rhs = new_exprs def get_prep_lookup(self): if hasattr(self.rhs, 'resolve_expression'): return self.rhs if self.prepare_rhs and hasattr(self.lhs.output_field, 'get_prep_value'): return self.lhs.output_field.get_prep_value(self.rhs) return self.rhs def get_db_prep_lookup(self, value, connection): return ('%s', [value]) def process_lhs(self, compiler, connection, lhs=None): lhs = lhs or self.lhs if hasattr(lhs, 'resolve_expression'): lhs = lhs.resolve_expression(compiler.query) return compiler.compile(lhs) def process_rhs(self, compiler, connection): value = self.rhs if self.bilateral_transforms: if self.rhs_is_direct_value(): # Do not call get_db_prep_lookup here as the value will be # transformed before being used for lookup value = Value(value, output_field=self.lhs.output_field) value = self.apply_bilateral_transforms(value) value = value.resolve_expression(compiler.query) if hasattr(value, 'as_sql'): sql, params = compiler.compile(value) # Ensure expression is wrapped in parentheses to respect operator # precedence but avoid double wrapping as it can be misinterpreted # on some backends (e.g. subqueries on SQLite). if sql and sql[0] != '(': sql = '(%s)' % sql return sql, params else: return self.get_db_prep_lookup(value, connection) def rhs_is_direct_value(self): return not hasattr(self.rhs, 'as_sql') def relabeled_clone(self, relabels): new = copy(self) new.lhs = new.lhs.relabeled_clone(relabels) if hasattr(new.rhs, 'relabeled_clone'): new.rhs = new.rhs.relabeled_clone(relabels) return new def get_group_by_cols(self, alias=None): cols = self.lhs.get_group_by_cols() if hasattr(self.rhs, 'get_group_by_cols'): cols.extend(self.rhs.get_group_by_cols()) return cols def as_sql(self, compiler, connection): raise NotImplementedError def as_oracle(self, compiler, connection): # Oracle doesn't allow EXISTS() and filters to be compared to another # expression unless they're wrapped in a CASE WHEN. wrapped = False exprs = [] for expr in (self.lhs, self.rhs): if connection.ops.conditional_expression_supported_in_where_clause(expr): expr = Case(When(expr, then=True), default=False) wrapped = True exprs.append(expr) lookup = type(self)(*exprs) if wrapped else self return lookup.as_sql(compiler, connection) @cached_property def contains_aggregate(self): return self.lhs.contains_aggregate or getattr(self.rhs, 'contains_aggregate', False) @cached_property def contains_over_clause(self): return self.lhs.contains_over_clause or getattr(self.rhs, 'contains_over_clause', False) @property def is_summary(self): return self.lhs.is_summary or getattr(self.rhs, 'is_summary', False) @property def identity(self): return self.__class__, self.lhs, self.rhs def __eq__(self, other): if not isinstance(other, Lookup): return NotImplemented return self.identity == other.identity def __hash__(self): return hash(make_hashable(self.identity)) class Transform(RegisterLookupMixin, Func): """ RegisterLookupMixin() is first so that get_lookup() and get_transform() first examine self and then check output_field. """ bilateral = False arity = 1 @property def lhs(self): return self.get_source_expressions()[0] def get_bilateral_transforms(self): if hasattr(self.lhs, 'get_bilateral_transforms'): bilateral_transforms = self.lhs.get_bilateral_transforms() else: bilateral_transforms = [] if self.bilateral: bilateral_transforms.append(self.__class__) return bilateral_transforms class BuiltinLookup(Lookup): def process_lhs(self, compiler, connection, lhs=None): lhs_sql, params = super().process_lhs(compiler, connection, lhs) field_internal_type = self.lhs.output_field.get_internal_type() db_type = self.lhs.output_field.db_type(connection=connection) lhs_sql = connection.ops.field_cast_sql( db_type, field_internal_type) % lhs_sql lhs_sql = connection.ops.lookup_cast(self.lookup_name, field_internal_type) % lhs_sql return lhs_sql, list(params) def as_sql(self, compiler, connection): lhs_sql, params = self.process_lhs(compiler, connection) rhs_sql, rhs_params = self.process_rhs(compiler, connection) params.extend(rhs_params) rhs_sql = self.get_rhs_op(connection, rhs_sql) return '%s %s' % (lhs_sql, rhs_sql), params def get_rhs_op(self, connection, rhs): return connection.operators[self.lookup_name] % rhs class FieldGetDbPrepValueMixin: """ Some lookups require Field.get_db_prep_value() to be called on their inputs. """ get_db_prep_lookup_value_is_iterable = False def get_db_prep_lookup(self, value, connection): # For relational fields, use the 'target_field' attribute of the # output_field. field = getattr(self.lhs.output_field, 'target_field', None) get_db_prep_value = getattr(field, 'get_db_prep_value', None) or self.lhs.output_field.get_db_prep_value return ( '%s', [get_db_prep_value(v, connection, prepared=True) for v in value] if self.get_db_prep_lookup_value_is_iterable else [get_db_prep_value(value, connection, prepared=True)] ) class FieldGetDbPrepValueIterableMixin(FieldGetDbPrepValueMixin): """ Some lookups require Field.get_db_prep_value() to be called on each value in an iterable. """ get_db_prep_lookup_value_is_iterable = True def get_prep_lookup(self): if hasattr(self.rhs, 'resolve_expression'): return self.rhs prepared_values = [] for rhs_value in self.rhs: if hasattr(rhs_value, 'resolve_expression'): # An expression will be handled by the database but can coexist # alongside real values. pass elif self.prepare_rhs and hasattr(self.lhs.output_field, 'get_prep_value'): rhs_value = self.lhs.output_field.get_prep_value(rhs_value) prepared_values.append(rhs_value) return prepared_values def process_rhs(self, compiler, connection): if self.rhs_is_direct_value(): # rhs should be an iterable of values. Use batch_process_rhs() # to prepare/transform those values. return self.batch_process_rhs(compiler, connection) else: return super().process_rhs(compiler, connection) def resolve_expression_parameter(self, compiler, connection, sql, param): params = [param] if hasattr(param, 'resolve_expression'): param = param.resolve_expression(compiler.query) if hasattr(param, 'as_sql'): sql, params = compiler.compile(param) return sql, params def batch_process_rhs(self, compiler, connection, rhs=None): pre_processed = super().batch_process_rhs(compiler, connection, rhs) # The params list may contain expressions which compile to a # sql/param pair. Zip them to get sql and param pairs that refer to the # same argument and attempt to replace them with the result of # compiling the param step. sql, params = zip(*( self.resolve_expression_parameter(compiler, connection, sql, param) for sql, param in zip(*pre_processed) )) params = itertools.chain.from_iterable(params) return sql, tuple(params) class PostgresOperatorLookup(FieldGetDbPrepValueMixin, Lookup): """Lookup defined by operators on PostgreSQL.""" postgres_operator = None def as_postgresql(self, compiler, connection): lhs, lhs_params = self.process_lhs(compiler, connection) rhs, rhs_params = self.process_rhs(compiler, connection) params = tuple(lhs_params) + tuple(rhs_params) return '%s %s %s' % (lhs, self.postgres_operator, rhs), params @Field.register_lookup class Exact(FieldGetDbPrepValueMixin, BuiltinLookup): lookup_name = 'exact' def process_rhs(self, compiler, connection): from django.db.models.sql.query import Query if isinstance(self.rhs, Query): if self.rhs.has_limit_one(): if not self.rhs.has_select_fields: self.rhs.clear_select_clause() self.rhs.add_fields(['pk']) else: raise ValueError( 'The QuerySet value for an exact lookup must be limited to ' 'one result using slicing.' ) return super().process_rhs(compiler, connection) def as_sql(self, compiler, connection): # Avoid comparison against direct rhs if lhs is a boolean value. That # turns "boolfield__exact=True" into "WHERE boolean_field" instead of # "WHERE boolean_field = True" when allowed. if ( isinstance(self.rhs, bool) and getattr(self.lhs, 'conditional', False) and connection.ops.conditional_expression_supported_in_where_clause(self.lhs) ): lhs_sql, params = self.process_lhs(compiler, connection) template = '%s' if self.rhs else 'NOT %s' return template % lhs_sql, params return super().as_sql(compiler, connection) @Field.register_lookup class IExact(BuiltinLookup): lookup_name = 'iexact' prepare_rhs = False def process_rhs(self, qn, connection): rhs, params = super().process_rhs(qn, connection) if params: params[0] = connection.ops.prep_for_iexact_query(params[0]) return rhs, params @Field.register_lookup class GreaterThan(FieldGetDbPrepValueMixin, BuiltinLookup): lookup_name = 'gt' @Field.register_lookup class GreaterThanOrEqual(FieldGetDbPrepValueMixin, BuiltinLookup): lookup_name = 'gte' @Field.register_lookup class LessThan(FieldGetDbPrepValueMixin, BuiltinLookup): lookup_name = 'lt' @Field.register_lookup class LessThanOrEqual(FieldGetDbPrepValueMixin, BuiltinLookup): lookup_name = 'lte' class IntegerFieldFloatRounding: """ Allow floats to work as query values for IntegerField. Without this, the decimal portion of the float would always be discarded. """ def get_prep_lookup(self): if isinstance(self.rhs, float): self.rhs = math.ceil(self.rhs) return super().get_prep_lookup() @IntegerField.register_lookup class IntegerGreaterThanOrEqual(IntegerFieldFloatRounding, GreaterThanOrEqual): pass @IntegerField.register_lookup class IntegerLessThan(IntegerFieldFloatRounding, LessThan): pass @Field.register_lookup class In(FieldGetDbPrepValueIterableMixin, BuiltinLookup): lookup_name = 'in' def process_rhs(self, compiler, connection): db_rhs = getattr(self.rhs, '_db', None) if db_rhs is not None and db_rhs != connection.alias: raise ValueError( "Subqueries aren't allowed across different databases. Force " "the inner query to be evaluated using `list(inner_query)`." ) if self.rhs_is_direct_value(): # Remove None from the list as NULL is never equal to anything. try: rhs = OrderedSet(self.rhs) rhs.discard(None) except TypeError: # Unhashable items in self.rhs rhs = [r for r in self.rhs if r is not None] if not rhs: raise EmptyResultSet # rhs should be an iterable; use batch_process_rhs() to # prepare/transform those values. sqls, sqls_params = self.batch_process_rhs(compiler, connection, rhs) placeholder = '(' + ', '.join(sqls) + ')' return (placeholder, sqls_params) else: if not getattr(self.rhs, 'has_select_fields', True): self.rhs.clear_select_clause() self.rhs.add_fields(['pk']) return super().process_rhs(compiler, connection) def get_rhs_op(self, connection, rhs): return 'IN %s' % rhs def as_sql(self, compiler, connection): max_in_list_size = connection.ops.max_in_list_size() if self.rhs_is_direct_value() and max_in_list_size and len(self.rhs) > max_in_list_size: return self.split_parameter_list_as_sql(compiler, connection) return super().as_sql(compiler, connection) def split_parameter_list_as_sql(self, compiler, connection): # This is a special case for databases which limit the number of # elements which can appear in an 'IN' clause. max_in_list_size = connection.ops.max_in_list_size() lhs, lhs_params = self.process_lhs(compiler, connection) rhs, rhs_params = self.batch_process_rhs(compiler, connection) in_clause_elements = ['('] params = [] for offset in range(0, len(rhs_params), max_in_list_size): if offset > 0: in_clause_elements.append(' OR ') in_clause_elements.append('%s IN (' % lhs) params.extend(lhs_params) sqls = rhs[offset: offset + max_in_list_size] sqls_params = rhs_params[offset: offset + max_in_list_size] param_group = ', '.join(sqls) in_clause_elements.append(param_group) in_clause_elements.append(')') params.extend(sqls_params) in_clause_elements.append(')') return ''.join(in_clause_elements), params class PatternLookup(BuiltinLookup): param_pattern = '%%%s%%' prepare_rhs = False def get_rhs_op(self, connection, rhs): # Assume we are in startswith. We need to produce SQL like: # col LIKE %s, ['thevalue%'] # For python values we can (and should) do that directly in Python, # but if the value is for example reference to other column, then # we need to add the % pattern match to the lookup by something like # col LIKE othercol || '%%' # So, for Python values we don't need any special pattern, but for # SQL reference values or SQL transformations we need the correct # pattern added. if hasattr(self.rhs, 'as_sql') or self.bilateral_transforms: pattern = connection.pattern_ops[self.lookup_name].format(connection.pattern_esc) return pattern.format(rhs) else: return super().get_rhs_op(connection, rhs) def process_rhs(self, qn, connection): rhs, params = super().process_rhs(qn, connection) if self.rhs_is_direct_value() and params and not self.bilateral_transforms: params[0] = self.param_pattern % connection.ops.prep_for_like_query(params[0]) return rhs, params @Field.register_lookup class Contains(PatternLookup): lookup_name = 'contains' @Field.register_lookup class IContains(Contains): lookup_name = 'icontains' @Field.register_lookup class StartsWith(PatternLookup): lookup_name = 'startswith' param_pattern = '%s%%' @Field.register_lookup class IStartsWith(StartsWith): lookup_name = 'istartswith' @Field.register_lookup class EndsWith(PatternLookup): lookup_name = 'endswith' param_pattern = '%%%s' @Field.register_lookup class IEndsWith(EndsWith): lookup_name = 'iendswith' @Field.register_lookup class Range(FieldGetDbPrepValueIterableMixin, BuiltinLookup): lookup_name = 'range' def get_rhs_op(self, connection, rhs): return "BETWEEN %s AND %s" % (rhs[0], rhs[1]) @Field.register_lookup class IsNull(BuiltinLookup): lookup_name = 'isnull' prepare_rhs = False def as_sql(self, compiler, connection): if not isinstance(self.rhs, bool): raise ValueError( 'The QuerySet value for an isnull lookup must be True or ' 'False.' ) sql, params = compiler.compile(self.lhs) if self.rhs: return "%s IS NULL" % sql, params else: return "%s IS NOT NULL" % sql, params @Field.register_lookup class Regex(BuiltinLookup): lookup_name = 'regex' prepare_rhs = False def as_sql(self, compiler, connection): if self.lookup_name in connection.operators: return super().as_sql(compiler, connection) else: lhs, lhs_params = self.process_lhs(compiler, connection) rhs, rhs_params = self.process_rhs(compiler, connection) sql_template = connection.ops.regex_lookup(self.lookup_name) return sql_template % (lhs, rhs), lhs_params + rhs_params @Field.register_lookup class IRegex(Regex): lookup_name = 'iregex' class YearLookup(Lookup): def year_lookup_bounds(self, connection, year): from django.db.models.functions import ExtractIsoYear iso_year = isinstance(self.lhs, ExtractIsoYear) output_field = self.lhs.lhs.output_field if isinstance(output_field, DateTimeField): bounds = connection.ops.year_lookup_bounds_for_datetime_field( year, iso_year=iso_year, ) else: bounds = connection.ops.year_lookup_bounds_for_date_field( year, iso_year=iso_year, ) return bounds def as_sql(self, compiler, connection): # Avoid the extract operation if the rhs is a direct value to allow # indexes to be used. if self.rhs_is_direct_value(): # Skip the extract part by directly using the originating field, # that is self.lhs.lhs. lhs_sql, params = self.process_lhs(compiler, connection, self.lhs.lhs) rhs_sql, _ = self.process_rhs(compiler, connection) rhs_sql = self.get_direct_rhs_sql(connection, rhs_sql) start, finish = self.year_lookup_bounds(connection, self.rhs) params.extend(self.get_bound_params(start, finish)) return '%s %s' % (lhs_sql, rhs_sql), params return super().as_sql(compiler, connection) def get_direct_rhs_sql(self, connection, rhs): return connection.operators[self.lookup_name] % rhs def get_bound_params(self, start, finish): raise NotImplementedError( 'subclasses of YearLookup must provide a get_bound_params() method' ) class YearExact(YearLookup, Exact): def get_direct_rhs_sql(self, connection, rhs): return 'BETWEEN %s AND %s' def get_bound_params(self, start, finish): return (start, finish) class YearGt(YearLookup, GreaterThan): def get_bound_params(self, start, finish): return (finish,) class YearGte(YearLookup, GreaterThanOrEqual): def get_bound_params(self, start, finish): return (start,) class YearLt(YearLookup, LessThan): def get_bound_params(self, start, finish): return (start,) class YearLte(YearLookup, LessThanOrEqual): def get_bound_params(self, start, finish): return (finish,) class UUIDTextMixin: """ Strip hyphens from a value when filtering a UUIDField on backends without a native datatype for UUID. """ def process_rhs(self, qn, connection): if not connection.features.has_native_uuid_field: from django.db.models.functions import Replace if self.rhs_is_direct_value(): self.rhs = Value(self.rhs) self.rhs = Replace(self.rhs, Value('-'), Value(''), output_field=CharField()) rhs, params = super().process_rhs(qn, connection) return rhs, params @UUIDField.register_lookup class UUIDIExact(UUIDTextMixin, IExact): pass @UUIDField.register_lookup class UUIDContains(UUIDTextMixin, Contains): pass @UUIDField.register_lookup class UUIDIContains(UUIDTextMixin, IContains): pass @UUIDField.register_lookup class UUIDStartsWith(UUIDTextMixin, StartsWith): pass @UUIDField.register_lookup class UUIDIStartsWith(UUIDTextMixin, IStartsWith): pass @UUIDField.register_lookup class UUIDEndsWith(UUIDTextMixin, EndsWith): pass @UUIDField.register_lookup class UUIDIEndsWith(UUIDTextMixin, IEndsWith): pass
1edae775705bc0a38d3856b22044a336ee69c8a6f529f34434f4b33096bb873b
""" Create SQL statements for QuerySets. The code in here encapsulates all of the SQL construction so that QuerySets themselves do not have to (and could be backed by things other than SQL databases). The abstraction barrier only works one way: this module has to know all about the internals of models in order to get the information it needs. """ import copy import difflib import functools import sys from collections import Counter, namedtuple from collections.abc import Iterator, Mapping from itertools import chain, count, product from string import ascii_uppercase from django.core.exceptions import FieldDoesNotExist, FieldError from django.db import DEFAULT_DB_ALIAS, NotSupportedError, connections from django.db.models.aggregates import Count from django.db.models.constants import LOOKUP_SEP from django.db.models.expressions import ( BaseExpression, Col, Exists, F, OuterRef, Ref, ResolvedOuterRef, ) from django.db.models.fields import Field from django.db.models.fields.related_lookups import MultiColSource from django.db.models.lookups import Lookup from django.db.models.query_utils import ( Q, check_rel_lookup_compatibility, refs_expression, ) from django.db.models.sql.constants import INNER, LOUTER, ORDER_DIR, SINGLE from django.db.models.sql.datastructures import ( BaseTable, Empty, Join, MultiJoin, ) from django.db.models.sql.where import ( AND, OR, ExtraWhere, NothingNode, WhereNode, ) from django.utils.functional import cached_property from django.utils.hashable import make_hashable from django.utils.tree import Node __all__ = ['Query', 'RawQuery'] def get_field_names_from_opts(opts): return set(chain.from_iterable( (f.name, f.attname) if f.concrete else (f.name,) for f in opts.get_fields() )) def get_children_from_q(q): for child in q.children: if isinstance(child, Node): yield from get_children_from_q(child) else: yield child JoinInfo = namedtuple( 'JoinInfo', ('final_field', 'targets', 'opts', 'joins', 'path', 'transform_function') ) class RawQuery: """A single raw SQL query.""" def __init__(self, sql, using, params=()): self.params = params self.sql = sql self.using = using self.cursor = None # Mirror some properties of a normal query so that # the compiler can be used to process results. self.low_mark, self.high_mark = 0, None # Used for offset/limit self.extra_select = {} self.annotation_select = {} def chain(self, using): return self.clone(using) def clone(self, using): return RawQuery(self.sql, using, params=self.params) def get_columns(self): if self.cursor is None: self._execute_query() converter = connections[self.using].introspection.identifier_converter return [converter(column_meta[0]) for column_meta in self.cursor.description] def __iter__(self): # Always execute a new query for a new iterator. # This could be optimized with a cache at the expense of RAM. self._execute_query() if not connections[self.using].features.can_use_chunked_reads: # If the database can't use chunked reads we need to make sure we # evaluate the entire query up front. result = list(self.cursor) else: result = self.cursor return iter(result) def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, self) @property def params_type(self): if self.params is None: return None return dict if isinstance(self.params, Mapping) else tuple def __str__(self): if self.params_type is None: return self.sql return self.sql % self.params_type(self.params) def _execute_query(self): connection = connections[self.using] # Adapt parameters to the database, as much as possible considering # that the target type isn't known. See #17755. params_type = self.params_type adapter = connection.ops.adapt_unknown_value if params_type is tuple: params = tuple(adapter(val) for val in self.params) elif params_type is dict: params = {key: adapter(val) for key, val in self.params.items()} elif params_type is None: params = None else: raise RuntimeError("Unexpected params type: %s" % params_type) self.cursor = connection.cursor() self.cursor.execute(self.sql, params) class Query(BaseExpression): """A single SQL query.""" alias_prefix = 'T' subq_aliases = frozenset([alias_prefix]) compiler = 'SQLCompiler' def __init__(self, model, where=WhereNode, alias_cols=True): self.model = model self.alias_refcount = {} # alias_map is the most important data structure regarding joins. # It's used for recording which joins exist in the query and what # types they are. The key is the alias of the joined table (possibly # the table name) and the value is a Join-like object (see # sql.datastructures.Join for more information). self.alias_map = {} # Whether to provide alias to columns during reference resolving. self.alias_cols = alias_cols # Sometimes the query contains references to aliases in outer queries (as # a result of split_exclude). Correct alias quoting needs to know these # aliases too. # Map external tables to whether they are aliased. self.external_aliases = {} self.table_map = {} # Maps table names to list of aliases. self.default_cols = True self.default_ordering = True self.standard_ordering = True self.used_aliases = set() self.filter_is_sticky = False self.subquery = False # SQL-related attributes # Select and related select clauses are expressions to use in the # SELECT clause of the query. # The select is used for cases where we want to set up the select # clause to contain other than default fields (values(), subqueries...) # Note that annotations go to annotations dictionary. self.select = () self.where = where() self.where_class = where # The group_by attribute can have one of the following forms: # - None: no group by at all in the query # - A tuple of expressions: group by (at least) those expressions. # String refs are also allowed for now. # - True: group by all select fields of the model # See compiler.get_group_by() for details. self.group_by = None self.order_by = () self.low_mark, self.high_mark = 0, None # Used for offset/limit self.distinct = False self.distinct_fields = () self.select_for_update = False self.select_for_update_nowait = False self.select_for_update_skip_locked = False self.select_for_update_of = () self.select_for_no_key_update = False self.select_related = False # Arbitrary limit for select_related to prevents infinite recursion. self.max_depth = 5 # Holds the selects defined by a call to values() or values_list() # excluding annotation_select and extra_select. self.values_select = () # SQL annotation-related attributes self.annotations = {} # Maps alias -> Annotation Expression self.annotation_select_mask = None self._annotation_select_cache = None # Set combination attributes self.combinator = None self.combinator_all = False self.combined_queries = () # These are for extensions. The contents are more or less appended # verbatim to the appropriate clause. self.extra = {} # Maps col_alias -> (col_sql, params). self.extra_select_mask = None self._extra_select_cache = None self.extra_tables = () self.extra_order_by = () # A tuple that is a set of model field names and either True, if these # are the fields to defer, or False if these are the only fields to # load. self.deferred_loading = (frozenset(), True) self._filtered_relations = {} self.explain_query = False self.explain_format = None self.explain_options = {} @property def output_field(self): if len(self.select) == 1: select = self.select[0] return getattr(select, 'target', None) or select.field elif len(self.annotation_select) == 1: return next(iter(self.annotation_select.values())).output_field @property def has_select_fields(self): return bool(self.select or self.annotation_select_mask or self.extra_select_mask) @cached_property def base_table(self): for alias in self.alias_map: return alias @property def identity(self): identity = ( (arg, make_hashable(value)) for arg, value in self.__dict__.items() ) return (self.__class__, *identity) def __str__(self): """ Return the query as a string of SQL with the parameter values substituted in (use sql_with_params() to see the unsubstituted string). Parameter values won't necessarily be quoted correctly, since that is done by the database interface at execution time. """ sql, params = self.sql_with_params() return sql % params def sql_with_params(self): """ Return the query as an SQL string and the parameters that will be substituted into the query. """ return self.get_compiler(DEFAULT_DB_ALIAS).as_sql() def __deepcopy__(self, memo): """Limit the amount of work when a Query is deepcopied.""" result = self.clone() memo[id(self)] = result return result def get_compiler(self, using=None, connection=None): if using is None and connection is None: raise ValueError("Need either using or connection") if using: connection = connections[using] return connection.ops.compiler(self.compiler)(self, connection, using) def get_meta(self): """ Return the Options instance (the model._meta) from which to start processing. Normally, this is self.model._meta, but it can be changed by subclasses. """ return self.model._meta def clone(self): """ Return a copy of the current Query. A lightweight alternative to to deepcopy(). """ obj = Empty() obj.__class__ = self.__class__ # Copy references to everything. obj.__dict__ = self.__dict__.copy() # Clone attributes that can't use shallow copy. obj.alias_refcount = self.alias_refcount.copy() obj.alias_map = self.alias_map.copy() obj.external_aliases = self.external_aliases.copy() obj.table_map = self.table_map.copy() obj.where = self.where.clone() obj.annotations = self.annotations.copy() if self.annotation_select_mask is None: obj.annotation_select_mask = None else: obj.annotation_select_mask = self.annotation_select_mask.copy() obj.combined_queries = tuple(query.clone() for query in self.combined_queries) # _annotation_select_cache cannot be copied, as doing so breaks the # (necessary) state in which both annotations and # _annotation_select_cache point to the same underlying objects. # It will get re-populated in the cloned queryset the next time it's # used. obj._annotation_select_cache = None obj.extra = self.extra.copy() if self.extra_select_mask is None: obj.extra_select_mask = None else: obj.extra_select_mask = self.extra_select_mask.copy() if self._extra_select_cache is None: obj._extra_select_cache = None else: obj._extra_select_cache = self._extra_select_cache.copy() if self.select_related is not False: # Use deepcopy because select_related stores fields in nested # dicts. obj.select_related = copy.deepcopy(obj.select_related) if 'subq_aliases' in self.__dict__: obj.subq_aliases = self.subq_aliases.copy() obj.used_aliases = self.used_aliases.copy() obj._filtered_relations = self._filtered_relations.copy() # Clear the cached_property try: del obj.base_table except AttributeError: pass return obj def chain(self, klass=None): """ Return a copy of the current Query that's ready for another operation. The klass argument changes the type of the Query, e.g. UpdateQuery. """ obj = self.clone() if klass and obj.__class__ != klass: obj.__class__ = klass if not obj.filter_is_sticky: obj.used_aliases = set() obj.filter_is_sticky = False if hasattr(obj, '_setup_query'): obj._setup_query() return obj def relabeled_clone(self, change_map): clone = self.clone() clone.change_aliases(change_map) return clone def _get_col(self, target, field, alias): if not self.alias_cols: alias = None return target.get_col(alias, field) def rewrite_cols(self, annotation, col_cnt): # We must make sure the inner query has the referred columns in it. # If we are aggregating over an annotation, then Django uses Ref() # instances to note this. However, if we are annotating over a column # of a related model, then it might be that column isn't part of the # SELECT clause of the inner query, and we must manually make sure # the column is selected. An example case is: # .aggregate(Sum('author__awards')) # Resolving this expression results in a join to author, but there # is no guarantee the awards column of author is in the select clause # of the query. Thus we must manually add the column to the inner # query. orig_exprs = annotation.get_source_expressions() new_exprs = [] for expr in orig_exprs: # FIXME: These conditions are fairly arbitrary. Identify a better # method of having expressions decide which code path they should # take. if isinstance(expr, Ref): # Its already a Ref to subquery (see resolve_ref() for # details) new_exprs.append(expr) elif isinstance(expr, (WhereNode, Lookup)): # Decompose the subexpressions further. The code here is # copied from the else clause, but this condition must appear # before the contains_aggregate/is_summary condition below. new_expr, col_cnt = self.rewrite_cols(expr, col_cnt) new_exprs.append(new_expr) else: # Reuse aliases of expressions already selected in subquery. for col_alias, selected_annotation in self.annotation_select.items(): if selected_annotation is expr: new_expr = Ref(col_alias, expr) break else: # An expression that is not selected the subquery. if isinstance(expr, Col) or (expr.contains_aggregate and not expr.is_summary): # Reference column or another aggregate. Select it # under a non-conflicting alias. col_cnt += 1 col_alias = '__col%d' % col_cnt self.annotations[col_alias] = expr self.append_annotation_mask([col_alias]) new_expr = Ref(col_alias, expr) else: # Some other expression not referencing database values # directly. Its subexpression might contain Cols. new_expr, col_cnt = self.rewrite_cols(expr, col_cnt) new_exprs.append(new_expr) annotation.set_source_expressions(new_exprs) return annotation, col_cnt def get_aggregation(self, using, added_aggregate_names): """ Return the dictionary with the values of the existing aggregations. """ if not self.annotation_select: return {} existing_annotations = [ annotation for alias, annotation in self.annotations.items() if alias not in added_aggregate_names ] # Decide if we need to use a subquery. # # Existing annotations would cause incorrect results as get_aggregation() # must produce just one result and thus must not use GROUP BY. But we # aren't smart enough to remove the existing annotations from the # query, so those would force us to use GROUP BY. # # If the query has limit or distinct, or uses set operations, then # those operations must be done in a subquery so that the query # aggregates on the limit and/or distinct results instead of applying # the distinct and limit after the aggregation. if (isinstance(self.group_by, tuple) or self.is_sliced or existing_annotations or self.distinct or self.combinator): from django.db.models.sql.subqueries import AggregateQuery inner_query = self.clone() inner_query.subquery = True outer_query = AggregateQuery(self.model, inner_query) inner_query.select_for_update = False inner_query.select_related = False inner_query.set_annotation_mask(self.annotation_select) if not self.is_sliced and not self.distinct_fields: # Queries with distinct_fields need ordering and when a limit # is applied we must take the slice from the ordered query. # Otherwise no need for ordering. inner_query.clear_ordering(True) if not inner_query.distinct: # If the inner query uses default select and it has some # aggregate annotations, then we must make sure the inner # query is grouped by the main model's primary key. However, # clearing the select clause can alter results if distinct is # used. has_existing_aggregate_annotations = any( annotation for annotation in existing_annotations if getattr(annotation, 'contains_aggregate', True) ) if inner_query.default_cols and has_existing_aggregate_annotations: inner_query.group_by = (self.model._meta.pk.get_col(inner_query.get_initial_alias()),) inner_query.default_cols = False relabels = {t: 'subquery' for t in inner_query.alias_map} relabels[None] = 'subquery' # Remove any aggregates marked for reduction from the subquery # and move them to the outer AggregateQuery. col_cnt = 0 for alias, expression in list(inner_query.annotation_select.items()): annotation_select_mask = inner_query.annotation_select_mask if expression.is_summary: expression, col_cnt = inner_query.rewrite_cols(expression, col_cnt) outer_query.annotations[alias] = expression.relabeled_clone(relabels) del inner_query.annotations[alias] annotation_select_mask.remove(alias) # Make sure the annotation_select wont use cached results. inner_query.set_annotation_mask(inner_query.annotation_select_mask) if inner_query.select == () and not inner_query.default_cols and not inner_query.annotation_select_mask: # In case of Model.objects[0:3].count(), there would be no # field selected in the inner query, yet we must use a subquery. # So, make sure at least one field is selected. inner_query.select = (self.model._meta.pk.get_col(inner_query.get_initial_alias()),) else: outer_query = self self.select = () self.default_cols = False self.extra = {} outer_query.clear_ordering(True) outer_query.clear_limits() outer_query.select_for_update = False outer_query.select_related = False compiler = outer_query.get_compiler(using) result = compiler.execute_sql(SINGLE) if result is None: result = [None] * len(outer_query.annotation_select) converters = compiler.get_converters(outer_query.annotation_select.values()) result = next(compiler.apply_converters((result,), converters)) return dict(zip(outer_query.annotation_select, result)) def get_count(self, using): """ Perform a COUNT() query using the current filter constraints. """ obj = self.clone() obj.add_annotation(Count('*'), alias='__count', is_summary=True) number = obj.get_aggregation(using, ['__count'])['__count'] if number is None: number = 0 return number def has_filters(self): return self.where def exists(self, using, limit=True): q = self.clone() if not q.distinct: if q.group_by is True: q.add_fields((f.attname for f in self.model._meta.concrete_fields), False) # Disable GROUP BY aliases to avoid orphaning references to the # SELECT clause which is about to be cleared. q.set_group_by(allow_aliases=False) q.clear_select_clause() if q.combined_queries and q.combinator == 'union': limit_combined = connections[using].features.supports_slicing_ordering_in_compound q.combined_queries = tuple( combined_query.exists(using, limit=limit_combined) for combined_query in q.combined_queries ) q.clear_ordering(True) if limit: q.set_limits(high=1) q.add_extra({'a': 1}, None, None, None, None, None) q.set_extra_mask(['a']) return q def has_results(self, using): q = self.exists(using) compiler = q.get_compiler(using=using) return compiler.has_results() def explain(self, using, format=None, **options): q = self.clone() q.explain_query = True q.explain_format = format q.explain_options = options compiler = q.get_compiler(using=using) return '\n'.join(compiler.explain_query()) def combine(self, rhs, connector): """ Merge the 'rhs' query into the current one (with any 'rhs' effects being applied *after* (that is, "to the right of") anything in the current query. 'rhs' is not modified during a call to this function. The 'connector' parameter describes how to connect filters from the 'rhs' query. """ assert self.model == rhs.model, \ "Cannot combine queries on two different base models." if self.is_sliced: raise TypeError('Cannot combine queries once a slice has been taken.') assert self.distinct == rhs.distinct, \ "Cannot combine a unique query with a non-unique query." assert self.distinct_fields == rhs.distinct_fields, \ "Cannot combine queries with different distinct fields." # Work out how to relabel the rhs aliases, if necessary. change_map = {} conjunction = (connector == AND) # Determine which existing joins can be reused. When combining the # query with AND we must recreate all joins for m2m filters. When # combining with OR we can reuse joins. The reason is that in AND # case a single row can't fulfill a condition like: # revrel__col=1 & revrel__col=2 # But, there might be two different related rows matching this # condition. In OR case a single True is enough, so single row is # enough, too. # # Note that we will be creating duplicate joins for non-m2m joins in # the AND case. The results will be correct but this creates too many # joins. This is something that could be fixed later on. reuse = set() if conjunction else set(self.alias_map) # Base table must be present in the query - this is the same # table on both sides. self.get_initial_alias() joinpromoter = JoinPromoter(connector, 2, False) joinpromoter.add_votes( j for j in self.alias_map if self.alias_map[j].join_type == INNER) rhs_votes = set() # Now, add the joins from rhs query into the new query (skipping base # table). rhs_tables = list(rhs.alias_map)[1:] for alias in rhs_tables: join = rhs.alias_map[alias] # If the left side of the join was already relabeled, use the # updated alias. join = join.relabeled_clone(change_map) new_alias = self.join(join, reuse=reuse) if join.join_type == INNER: rhs_votes.add(new_alias) # We can't reuse the same join again in the query. If we have two # distinct joins for the same connection in rhs query, then the # combined query must have two joins, too. reuse.discard(new_alias) if alias != new_alias: change_map[alias] = new_alias if not rhs.alias_refcount[alias]: # The alias was unused in the rhs query. Unref it so that it # will be unused in the new query, too. We have to add and # unref the alias so that join promotion has information of # the join type for the unused alias. self.unref_alias(new_alias) joinpromoter.add_votes(rhs_votes) joinpromoter.update_join_types(self) # Combine subqueries aliases to ensure aliases relabelling properly # handle subqueries when combining where and select clauses. self.subq_aliases |= rhs.subq_aliases # Now relabel a copy of the rhs where-clause and add it to the current # one. w = rhs.where.clone() w.relabel_aliases(change_map) self.where.add(w, connector) # Selection columns and extra extensions are those provided by 'rhs'. if rhs.select: self.set_select([col.relabeled_clone(change_map) for col in rhs.select]) else: self.select = () if connector == OR: # It would be nice to be able to handle this, but the queries don't # really make sense (or return consistent value sets). Not worth # the extra complexity when you can write a real query instead. if self.extra and rhs.extra: raise ValueError("When merging querysets using 'or', you cannot have extra(select=...) on both sides.") self.extra.update(rhs.extra) extra_select_mask = set() if self.extra_select_mask is not None: extra_select_mask.update(self.extra_select_mask) if rhs.extra_select_mask is not None: extra_select_mask.update(rhs.extra_select_mask) if extra_select_mask: self.set_extra_mask(extra_select_mask) self.extra_tables += rhs.extra_tables # Ordering uses the 'rhs' ordering, unless it has none, in which case # the current ordering is used. self.order_by = rhs.order_by or self.order_by self.extra_order_by = rhs.extra_order_by or self.extra_order_by def deferred_to_data(self, target, callback): """ Convert the self.deferred_loading data structure to an alternate data structure, describing the field that *will* be loaded. This is used to compute the columns to select from the database and also by the QuerySet class to work out which fields are being initialized on each model. Models that have all their fields included aren't mentioned in the result, only those that have field restrictions in place. The "target" parameter is the instance that is populated (in place). The "callback" is a function that is called whenever a (model, field) pair need to be added to "target". It accepts three parameters: "target", and the model and list of fields being added for that model. """ field_names, defer = self.deferred_loading if not field_names: return orig_opts = self.get_meta() seen = {} must_include = {orig_opts.concrete_model: {orig_opts.pk}} for field_name in field_names: parts = field_name.split(LOOKUP_SEP) cur_model = self.model._meta.concrete_model opts = orig_opts for name in parts[:-1]: old_model = cur_model if name in self._filtered_relations: name = self._filtered_relations[name].relation_name source = opts.get_field(name) if is_reverse_o2o(source): cur_model = source.related_model else: cur_model = source.remote_field.model opts = cur_model._meta # Even if we're "just passing through" this model, we must add # both the current model's pk and the related reference field # (if it's not a reverse relation) to the things we select. if not is_reverse_o2o(source): must_include[old_model].add(source) add_to_dict(must_include, cur_model, opts.pk) field = opts.get_field(parts[-1]) is_reverse_object = field.auto_created and not field.concrete model = field.related_model if is_reverse_object else field.model model = model._meta.concrete_model if model == opts.model: model = cur_model if not is_reverse_o2o(field): add_to_dict(seen, model, field) if defer: # We need to load all fields for each model, except those that # appear in "seen" (for all models that appear in "seen"). The only # slight complexity here is handling fields that exist on parent # models. workset = {} for model, values in seen.items(): for field in model._meta.local_fields: if field not in values: m = field.model._meta.concrete_model add_to_dict(workset, m, field) for model, values in must_include.items(): # If we haven't included a model in workset, we don't add the # corresponding must_include fields for that model, since an # empty set means "include all fields". That's why there's no # "else" branch here. if model in workset: workset[model].update(values) for model, values in workset.items(): callback(target, model, values) else: for model, values in must_include.items(): if model in seen: seen[model].update(values) else: # As we've passed through this model, but not explicitly # included any fields, we have to make sure it's mentioned # so that only the "must include" fields are pulled in. seen[model] = values # Now ensure that every model in the inheritance chain is mentioned # in the parent list. Again, it must be mentioned to ensure that # only "must include" fields are pulled in. for model in orig_opts.get_parent_list(): seen.setdefault(model, set()) for model, values in seen.items(): callback(target, model, values) def table_alias(self, table_name, create=False, filtered_relation=None): """ Return a table alias for the given table_name and whether this is a new alias or not. If 'create' is true, a new alias is always created. Otherwise, the most recently created alias for the table (if one exists) is reused. """ alias_list = self.table_map.get(table_name) if not create and alias_list: alias = alias_list[0] self.alias_refcount[alias] += 1 return alias, False # Create a new alias for this table. if alias_list: alias = '%s%d' % (self.alias_prefix, len(self.alias_map) + 1) alias_list.append(alias) else: # The first occurrence of a table uses the table name directly. alias = filtered_relation.alias if filtered_relation is not None else table_name self.table_map[table_name] = [alias] self.alias_refcount[alias] = 1 return alias, True def ref_alias(self, alias): """Increases the reference count for this alias.""" self.alias_refcount[alias] += 1 def unref_alias(self, alias, amount=1): """Decreases the reference count for this alias.""" self.alias_refcount[alias] -= amount def promote_joins(self, aliases): """ Promote recursively the join type of given aliases and its children to an outer join. If 'unconditional' is False, only promote the join if it is nullable or the parent join is an outer join. The children promotion is done to avoid join chains that contain a LOUTER b INNER c. So, if we have currently a INNER b INNER c and a->b is promoted, then we must also promote b->c automatically, or otherwise the promotion of a->b doesn't actually change anything in the query results. """ aliases = list(aliases) while aliases: alias = aliases.pop(0) if self.alias_map[alias].join_type is None: # This is the base table (first FROM entry) - this table # isn't really joined at all in the query, so we should not # alter its join type. continue # Only the first alias (skipped above) should have None join_type assert self.alias_map[alias].join_type is not None parent_alias = self.alias_map[alias].parent_alias parent_louter = parent_alias and self.alias_map[parent_alias].join_type == LOUTER already_louter = self.alias_map[alias].join_type == LOUTER if ((self.alias_map[alias].nullable or parent_louter) and not already_louter): self.alias_map[alias] = self.alias_map[alias].promote() # Join type of 'alias' changed, so re-examine all aliases that # refer to this one. aliases.extend( join for join in self.alias_map if self.alias_map[join].parent_alias == alias and join not in aliases ) def demote_joins(self, aliases): """ Change join type from LOUTER to INNER for all joins in aliases. Similarly to promote_joins(), this method must ensure no join chains containing first an outer, then an inner join are generated. If we are demoting b->c join in chain a LOUTER b LOUTER c then we must demote a->b automatically, or otherwise the demotion of b->c doesn't actually change anything in the query results. . """ aliases = list(aliases) while aliases: alias = aliases.pop(0) if self.alias_map[alias].join_type == LOUTER: self.alias_map[alias] = self.alias_map[alias].demote() parent_alias = self.alias_map[alias].parent_alias if self.alias_map[parent_alias].join_type == INNER: aliases.append(parent_alias) def reset_refcounts(self, to_counts): """ Reset reference counts for aliases so that they match the value passed in `to_counts`. """ for alias, cur_refcount in self.alias_refcount.copy().items(): unref_amount = cur_refcount - to_counts.get(alias, 0) self.unref_alias(alias, unref_amount) def change_aliases(self, change_map): """ Change the aliases in change_map (which maps old-alias -> new-alias), relabelling any references to them in select columns and the where clause. """ assert set(change_map).isdisjoint(change_map.values()) # 1. Update references in "select" (normal columns plus aliases), # "group by" and "where". self.where.relabel_aliases(change_map) if isinstance(self.group_by, tuple): self.group_by = tuple([col.relabeled_clone(change_map) for col in self.group_by]) self.select = tuple([col.relabeled_clone(change_map) for col in self.select]) self.annotations = self.annotations and { key: col.relabeled_clone(change_map) for key, col in self.annotations.items() } # 2. Rename the alias in the internal table/alias datastructures. for old_alias, new_alias in change_map.items(): if old_alias not in self.alias_map: continue alias_data = self.alias_map[old_alias].relabeled_clone(change_map) self.alias_map[new_alias] = alias_data self.alias_refcount[new_alias] = self.alias_refcount[old_alias] del self.alias_refcount[old_alias] del self.alias_map[old_alias] table_aliases = self.table_map[alias_data.table_name] for pos, alias in enumerate(table_aliases): if alias == old_alias: table_aliases[pos] = new_alias break self.external_aliases = { # Table is aliased or it's being changed and thus is aliased. change_map.get(alias, alias): (aliased or alias in change_map) for alias, aliased in self.external_aliases.items() } def bump_prefix(self, outer_query): """ Change the alias prefix to the next letter in the alphabet in a way that the outer query's aliases and this query's aliases will not conflict. Even tables that previously had no alias will get an alias after this call. """ def prefix_gen(): """ Generate a sequence of characters in alphabetical order: -> 'A', 'B', 'C', ... When the alphabet is finished, the sequence will continue with the Cartesian product: -> 'AA', 'AB', 'AC', ... """ alphabet = ascii_uppercase prefix = chr(ord(self.alias_prefix) + 1) yield prefix for n in count(1): seq = alphabet[alphabet.index(prefix):] if prefix else alphabet for s in product(seq, repeat=n): yield ''.join(s) prefix = None if self.alias_prefix != outer_query.alias_prefix: # No clashes between self and outer query should be possible. return # Explicitly avoid infinite loop. The constant divider is based on how # much depth recursive subquery references add to the stack. This value # might need to be adjusted when adding or removing function calls from # the code path in charge of performing these operations. local_recursion_limit = sys.getrecursionlimit() // 16 for pos, prefix in enumerate(prefix_gen()): if prefix not in self.subq_aliases: self.alias_prefix = prefix break if pos > local_recursion_limit: raise RecursionError( 'Maximum recursion depth exceeded: too many subqueries.' ) self.subq_aliases = self.subq_aliases.union([self.alias_prefix]) outer_query.subq_aliases = outer_query.subq_aliases.union(self.subq_aliases) self.change_aliases({ alias: '%s%d' % (self.alias_prefix, pos) for pos, alias in enumerate(self.alias_map) }) def get_initial_alias(self): """ Return the first alias for this query, after increasing its reference count. """ if self.alias_map: alias = self.base_table self.ref_alias(alias) else: alias = self.join(BaseTable(self.get_meta().db_table, None)) return alias def count_active_tables(self): """ Return the number of tables in this query with a non-zero reference count. After execution, the reference counts are zeroed, so tables added in compiler will not be seen by this method. """ return len([1 for count in self.alias_refcount.values() if count]) def join(self, join, reuse=None, reuse_with_filtered_relation=False): """ Return an alias for the 'join', either reusing an existing alias for that join or creating a new one. 'join' is either a sql.datastructures.BaseTable or Join. The 'reuse' parameter can be either None which means all joins are reusable, or it can be a set containing the aliases that can be reused. The 'reuse_with_filtered_relation' parameter is used when computing FilteredRelation instances. A join is always created as LOUTER if the lhs alias is LOUTER to make sure chains like t1 LOUTER t2 INNER t3 aren't generated. All new joins are created as LOUTER if the join is nullable. """ if reuse_with_filtered_relation and reuse: reuse_aliases = [ a for a, j in self.alias_map.items() if a in reuse and j.equals(join, with_filtered_relation=False) ] else: reuse_aliases = [ a for a, j in self.alias_map.items() if (reuse is None or a in reuse) and j == join ] if reuse_aliases: if join.table_alias in reuse_aliases: reuse_alias = join.table_alias else: # Reuse the most recent alias of the joined table # (a many-to-many relation may be joined multiple times). reuse_alias = reuse_aliases[-1] self.ref_alias(reuse_alias) return reuse_alias # No reuse is possible, so we need a new alias. alias, _ = self.table_alias(join.table_name, create=True, filtered_relation=join.filtered_relation) if join.join_type: if self.alias_map[join.parent_alias].join_type == LOUTER or join.nullable: join_type = LOUTER else: join_type = INNER join.join_type = join_type join.table_alias = alias self.alias_map[alias] = join return alias def join_parent_model(self, opts, model, alias, seen): """ Make sure the given 'model' is joined in the query. If 'model' isn't a parent of 'opts' or if it is None this method is a no-op. The 'alias' is the root alias for starting the join, 'seen' is a dict of model -> alias of existing joins. It must also contain a mapping of None -> some alias. This will be returned in the no-op case. """ if model in seen: return seen[model] chain = opts.get_base_chain(model) if not chain: return alias curr_opts = opts for int_model in chain: if int_model in seen: curr_opts = int_model._meta alias = seen[int_model] continue # Proxy model have elements in base chain # with no parents, assign the new options # object and skip to the next base in that # case if not curr_opts.parents[int_model]: curr_opts = int_model._meta continue link_field = curr_opts.get_ancestor_link(int_model) join_info = self.setup_joins([link_field.name], curr_opts, alias) curr_opts = int_model._meta alias = seen[int_model] = join_info.joins[-1] return alias or seen[None] def add_annotation(self, annotation, alias, is_summary=False, select=True): """Add a single annotation expression to the Query.""" annotation = annotation.resolve_expression(self, allow_joins=True, reuse=None, summarize=is_summary) if select: self.append_annotation_mask([alias]) else: self.set_annotation_mask(set(self.annotation_select).difference({alias})) self.annotations[alias] = annotation def resolve_expression(self, query, *args, **kwargs): clone = self.clone() # Subqueries need to use a different set of aliases than the outer query. clone.bump_prefix(query) clone.subquery = True # It's safe to drop ordering if the queryset isn't using slicing, # distinct(*fields) or select_for_update(). if (self.low_mark == 0 and self.high_mark is None and not self.distinct_fields and not self.select_for_update): clone.clear_ordering(True) clone.where.resolve_expression(query, *args, **kwargs) for key, value in clone.annotations.items(): resolved = value.resolve_expression(query, *args, **kwargs) if hasattr(resolved, 'external_aliases'): resolved.external_aliases.update(clone.external_aliases) clone.annotations[key] = resolved # Outer query's aliases are considered external. for alias, table in query.alias_map.items(): clone.external_aliases[alias] = ( (isinstance(table, Join) and table.join_field.related_model._meta.db_table != alias) or (isinstance(table, BaseTable) and table.table_name != table.table_alias) ) return clone def get_external_cols(self): exprs = chain(self.annotations.values(), self.where.children) return [ col for col in self._gen_cols(exprs, include_external=True) if col.alias in self.external_aliases ] def as_sql(self, compiler, connection): sql, params = self.get_compiler(connection=connection).as_sql() if self.subquery: sql = '(%s)' % sql return sql, params def resolve_lookup_value(self, value, can_reuse, allow_joins): if hasattr(value, 'resolve_expression'): value = value.resolve_expression( self, reuse=can_reuse, allow_joins=allow_joins, ) elif isinstance(value, (list, tuple)): # The items of the iterable may be expressions and therefore need # to be resolved independently. values = ( self.resolve_lookup_value(sub_value, can_reuse, allow_joins) for sub_value in value ) type_ = type(value) if hasattr(type_, '_make'): # namedtuple return type_(*values) return type_(values) return value def solve_lookup_type(self, lookup): """ Solve the lookup type from the lookup (e.g.: 'foobar__id__icontains'). """ lookup_splitted = lookup.split(LOOKUP_SEP) if self.annotations: expression, expression_lookups = refs_expression(lookup_splitted, self.annotations) if expression: return expression_lookups, (), expression _, field, _, lookup_parts = self.names_to_path(lookup_splitted, self.get_meta()) field_parts = lookup_splitted[0:len(lookup_splitted) - len(lookup_parts)] if len(lookup_parts) > 1 and not field_parts: raise FieldError( 'Invalid lookup "%s" for model %s".' % (lookup, self.get_meta().model.__name__) ) return lookup_parts, field_parts, False def check_query_object_type(self, value, opts, field): """ Check whether the object passed while querying is of the correct type. If not, raise a ValueError specifying the wrong object. """ if hasattr(value, '_meta'): if not check_rel_lookup_compatibility(value._meta.model, opts, field): raise ValueError( 'Cannot query "%s": Must be "%s" instance.' % (value, opts.object_name)) def check_related_objects(self, field, value, opts): """Check the type of object passed to query relations.""" if field.is_relation: # Check that the field and the queryset use the same model in a # query like .filter(author=Author.objects.all()). For example, the # opts would be Author's (from the author field) and value.model # would be Author.objects.all() queryset's .model (Author also). # The field is the related field on the lhs side. if (isinstance(value, Query) and not value.has_select_fields and not check_rel_lookup_compatibility(value.model, opts, field)): raise ValueError( 'Cannot use QuerySet for "%s": Use a QuerySet for "%s".' % (value.model._meta.object_name, opts.object_name) ) elif hasattr(value, '_meta'): self.check_query_object_type(value, opts, field) elif hasattr(value, '__iter__'): for v in value: self.check_query_object_type(v, opts, field) def check_filterable(self, expression): """Raise an error if expression cannot be used in a WHERE clause.""" if ( hasattr(expression, 'resolve_expression') and not getattr(expression, 'filterable', True) ): raise NotSupportedError( expression.__class__.__name__ + ' is disallowed in the filter ' 'clause.' ) if hasattr(expression, 'get_source_expressions'): for expr in expression.get_source_expressions(): self.check_filterable(expr) def build_lookup(self, lookups, lhs, rhs): """ Try to extract transforms and lookup from given lhs. The lhs value is something that works like SQLExpression. The rhs value is what the lookup is going to compare against. The lookups is a list of names to extract using get_lookup() and get_transform(). """ # __exact is the default lookup if one isn't given. *transforms, lookup_name = lookups or ['exact'] for name in transforms: lhs = self.try_transform(lhs, name) # First try get_lookup() so that the lookup takes precedence if the lhs # supports both transform and lookup for the name. lookup_class = lhs.get_lookup(lookup_name) if not lookup_class: if lhs.field.is_relation: raise FieldError('Related Field got invalid lookup: {}'.format(lookup_name)) # A lookup wasn't found. Try to interpret the name as a transform # and do an Exact lookup against it. lhs = self.try_transform(lhs, lookup_name) lookup_name = 'exact' lookup_class = lhs.get_lookup(lookup_name) if not lookup_class: return lookup = lookup_class(lhs, rhs) # Interpret '__exact=None' as the sql 'is NULL'; otherwise, reject all # uses of None as a query value unless the lookup supports it. if lookup.rhs is None and not lookup.can_use_none_as_rhs: if lookup_name not in ('exact', 'iexact'): raise ValueError("Cannot use None as a query value") return lhs.get_lookup('isnull')(lhs, True) # For Oracle '' is equivalent to null. The check must be done at this # stage because join promotion can't be done in the compiler. Using # DEFAULT_DB_ALIAS isn't nice but it's the best that can be done here. # A similar thing is done in is_nullable(), too. if (connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls and lookup_name == 'exact' and lookup.rhs == ''): return lhs.get_lookup('isnull')(lhs, True) return lookup def try_transform(self, lhs, name): """ Helper method for build_lookup(). Try to fetch and initialize a transform for name parameter from lhs. """ transform_class = lhs.get_transform(name) if transform_class: return transform_class(lhs) else: output_field = lhs.output_field.__class__ suggested_lookups = difflib.get_close_matches(name, output_field.get_lookups()) if suggested_lookups: suggestion = ', perhaps you meant %s?' % ' or '.join(suggested_lookups) else: suggestion = '.' raise FieldError( "Unsupported lookup '%s' for %s or join on the field not " "permitted%s" % (name, output_field.__name__, suggestion) ) def build_filter(self, filter_expr, branch_negated=False, current_negated=False, can_reuse=None, allow_joins=True, split_subq=True, reuse_with_filtered_relation=False, check_filterable=True): """ Build a WhereNode for a single filter clause but don't add it to this Query. Query.add_q() will then add this filter to the where Node. The 'branch_negated' tells us if the current branch contains any negations. This will be used to determine if subqueries are needed. The 'current_negated' is used to determine if the current filter is negated or not and this will be used to determine if IS NULL filtering is needed. The difference between current_negated and branch_negated is that branch_negated is set on first negation, but current_negated is flipped for each negation. Note that add_filter will not do any negating itself, that is done upper in the code by add_q(). The 'can_reuse' is a set of reusable joins for multijoins. If 'reuse_with_filtered_relation' is True, then only joins in can_reuse will be reused. The method will create a filter clause that can be added to the current query. However, if the filter isn't added to the query then the caller is responsible for unreffing the joins used. """ if isinstance(filter_expr, dict): raise FieldError("Cannot parse keyword query as dict") if isinstance(filter_expr, Q): return self._add_q( filter_expr, branch_negated=branch_negated, current_negated=current_negated, used_aliases=can_reuse, allow_joins=allow_joins, split_subq=split_subq, check_filterable=check_filterable, ) if hasattr(filter_expr, 'resolve_expression'): if not getattr(filter_expr, 'conditional', False): raise TypeError('Cannot filter against a non-conditional expression.') condition = self.build_lookup( ['exact'], filter_expr.resolve_expression(self, allow_joins=allow_joins), True ) clause = self.where_class() clause.add(condition, AND) return clause, [] arg, value = filter_expr if not arg: raise FieldError("Cannot parse keyword query %r" % arg) lookups, parts, reffed_expression = self.solve_lookup_type(arg) if check_filterable: self.check_filterable(reffed_expression) if not allow_joins and len(parts) > 1: raise FieldError("Joined field references are not permitted in this query") pre_joins = self.alias_refcount.copy() value = self.resolve_lookup_value(value, can_reuse, allow_joins) used_joins = {k for k, v in self.alias_refcount.items() if v > pre_joins.get(k, 0)} if check_filterable: self.check_filterable(value) clause = self.where_class() if reffed_expression: condition = self.build_lookup(lookups, reffed_expression, value) clause.add(condition, AND) return clause, [] opts = self.get_meta() alias = self.get_initial_alias() allow_many = not branch_negated or not split_subq try: join_info = self.setup_joins( parts, opts, alias, can_reuse=can_reuse, allow_many=allow_many, reuse_with_filtered_relation=reuse_with_filtered_relation, ) # Prevent iterator from being consumed by check_related_objects() if isinstance(value, Iterator): value = list(value) self.check_related_objects(join_info.final_field, value, join_info.opts) # split_exclude() needs to know which joins were generated for the # lookup parts self._lookup_joins = join_info.joins except MultiJoin as e: return self.split_exclude(filter_expr, can_reuse, e.names_with_path) # Update used_joins before trimming since they are reused to determine # which joins could be later promoted to INNER. used_joins.update(join_info.joins) targets, alias, join_list = self.trim_joins(join_info.targets, join_info.joins, join_info.path) if can_reuse is not None: can_reuse.update(join_list) if join_info.final_field.is_relation: # No support for transforms for relational fields num_lookups = len(lookups) if num_lookups > 1: raise FieldError('Related Field got invalid lookup: {}'.format(lookups[0])) if len(targets) == 1: col = self._get_col(targets[0], join_info.final_field, alias) else: col = MultiColSource(alias, targets, join_info.targets, join_info.final_field) else: col = self._get_col(targets[0], join_info.final_field, alias) condition = self.build_lookup(lookups, col, value) lookup_type = condition.lookup_name clause.add(condition, AND) require_outer = lookup_type == 'isnull' and condition.rhs is True and not current_negated if current_negated and (lookup_type != 'isnull' or condition.rhs is False) and condition.rhs is not None: require_outer = True if lookup_type != 'isnull': # The condition added here will be SQL like this: # NOT (col IS NOT NULL), where the first NOT is added in # upper layers of code. The reason for addition is that if col # is null, then col != someval will result in SQL "unknown" # which isn't the same as in Python. The Python None handling # is wanted, and it can be gotten by # (col IS NULL OR col != someval) # <=> # NOT (col IS NOT NULL AND col = someval). if ( self.is_nullable(targets[0]) or self.alias_map[join_list[-1]].join_type == LOUTER ): lookup_class = targets[0].get_lookup('isnull') col = self._get_col(targets[0], join_info.targets[0], alias) clause.add(lookup_class(col, False), AND) # If someval is a nullable column, someval IS NOT NULL is # added. if isinstance(value, Col) and self.is_nullable(value.target): lookup_class = value.target.get_lookup('isnull') clause.add(lookup_class(value, False), AND) return clause, used_joins if not require_outer else () def add_filter(self, filter_clause): self.add_q(Q(**{filter_clause[0]: filter_clause[1]})) def add_q(self, q_object): """ A preprocessor for the internal _add_q(). Responsible for doing final join promotion. """ # For join promotion this case is doing an AND for the added q_object # and existing conditions. So, any existing inner join forces the join # type to remain inner. Existing outer joins can however be demoted. # (Consider case where rel_a is LOUTER and rel_a__col=1 is added - if # rel_a doesn't produce any rows, then the whole condition must fail. # So, demotion is OK. existing_inner = {a for a in self.alias_map if self.alias_map[a].join_type == INNER} clause, _ = self._add_q(q_object, self.used_aliases) if clause: self.where.add(clause, AND) self.demote_joins(existing_inner) def build_where(self, filter_expr): return self.build_filter(filter_expr, allow_joins=False)[0] def _add_q(self, q_object, used_aliases, branch_negated=False, current_negated=False, allow_joins=True, split_subq=True, check_filterable=True): """Add a Q-object to the current filter.""" connector = q_object.connector current_negated = current_negated ^ q_object.negated branch_negated = branch_negated or q_object.negated target_clause = self.where_class(connector=connector, negated=q_object.negated) joinpromoter = JoinPromoter(q_object.connector, len(q_object.children), current_negated) for child in q_object.children: child_clause, needed_inner = self.build_filter( child, can_reuse=used_aliases, branch_negated=branch_negated, current_negated=current_negated, allow_joins=allow_joins, split_subq=split_subq, check_filterable=check_filterable, ) joinpromoter.add_votes(needed_inner) if child_clause: target_clause.add(child_clause, connector) needed_inner = joinpromoter.update_join_types(self) return target_clause, needed_inner def build_filtered_relation_q(self, q_object, reuse, branch_negated=False, current_negated=False): """Add a FilteredRelation object to the current filter.""" connector = q_object.connector current_negated ^= q_object.negated branch_negated = branch_negated or q_object.negated target_clause = self.where_class(connector=connector, negated=q_object.negated) for child in q_object.children: if isinstance(child, Node): child_clause = self.build_filtered_relation_q( child, reuse=reuse, branch_negated=branch_negated, current_negated=current_negated, ) else: child_clause, _ = self.build_filter( child, can_reuse=reuse, branch_negated=branch_negated, current_negated=current_negated, allow_joins=True, split_subq=False, reuse_with_filtered_relation=True, ) target_clause.add(child_clause, connector) return target_clause def add_filtered_relation(self, filtered_relation, alias): filtered_relation.alias = alias lookups = dict(get_children_from_q(filtered_relation.condition)) relation_lookup_parts, relation_field_parts, _ = self.solve_lookup_type(filtered_relation.relation_name) if relation_lookup_parts: raise ValueError( "FilteredRelation's relation_name cannot contain lookups " "(got %r)." % filtered_relation.relation_name ) for lookup in chain(lookups): lookup_parts, lookup_field_parts, _ = self.solve_lookup_type(lookup) shift = 2 if not lookup_parts else 1 lookup_field_path = lookup_field_parts[:-shift] for idx, lookup_field_part in enumerate(lookup_field_path): if len(relation_field_parts) > idx: if relation_field_parts[idx] != lookup_field_part: raise ValueError( "FilteredRelation's condition doesn't support " "relations outside the %r (got %r)." % (filtered_relation.relation_name, lookup) ) else: raise ValueError( "FilteredRelation's condition doesn't support nested " "relations deeper than the relation_name (got %r for " "%r)." % (lookup, filtered_relation.relation_name) ) self._filtered_relations[filtered_relation.alias] = filtered_relation def names_to_path(self, names, opts, allow_many=True, fail_on_missing=False): """ Walk the list of names and turns them into PathInfo tuples. A single name in 'names' can generate multiple PathInfos (m2m, for example). 'names' is the path of names to travel, 'opts' is the model Options we start the name resolving from, 'allow_many' is as for setup_joins(). If fail_on_missing is set to True, then a name that can't be resolved will generate a FieldError. Return a list of PathInfo tuples. In addition return the final field (the last used join field) and target (which is a field guaranteed to contain the same value as the final field). Finally, return those names that weren't found (which are likely transforms and the final lookup). """ path, names_with_path = [], [] for pos, name in enumerate(names): cur_names_with_path = (name, []) if name == 'pk': name = opts.pk.name field = None filtered_relation = None try: field = opts.get_field(name) except FieldDoesNotExist: if name in self.annotation_select: field = self.annotation_select[name].output_field elif name in self._filtered_relations and pos == 0: filtered_relation = self._filtered_relations[name] if LOOKUP_SEP in filtered_relation.relation_name: parts = filtered_relation.relation_name.split(LOOKUP_SEP) filtered_relation_path, field, _, _ = self.names_to_path( parts, opts, allow_many, fail_on_missing, ) path.extend(filtered_relation_path[:-1]) else: field = opts.get_field(filtered_relation.relation_name) if field is not None: # Fields that contain one-to-many relations with a generic # model (like a GenericForeignKey) cannot generate reverse # relations and therefore cannot be used for reverse querying. if field.is_relation and not field.related_model: raise FieldError( "Field %r does not generate an automatic reverse " "relation and therefore cannot be used for reverse " "querying. If it is a GenericForeignKey, consider " "adding a GenericRelation." % name ) try: model = field.model._meta.concrete_model except AttributeError: # QuerySet.annotate() may introduce fields that aren't # attached to a model. model = None else: # We didn't find the current field, so move position back # one step. pos -= 1 if pos == -1 or fail_on_missing: available = sorted([ *get_field_names_from_opts(opts), *self.annotation_select, *self._filtered_relations, ]) raise FieldError("Cannot resolve keyword '%s' into field. " "Choices are: %s" % (name, ", ".join(available))) break # Check if we need any joins for concrete inheritance cases (the # field lives in parent, but we are currently in one of its # children) if model is not opts.model: path_to_parent = opts.get_path_to_parent(model) if path_to_parent: path.extend(path_to_parent) cur_names_with_path[1].extend(path_to_parent) opts = path_to_parent[-1].to_opts if hasattr(field, 'get_path_info'): pathinfos = field.get_path_info(filtered_relation) if not allow_many: for inner_pos, p in enumerate(pathinfos): if p.m2m: cur_names_with_path[1].extend(pathinfos[0:inner_pos + 1]) names_with_path.append(cur_names_with_path) raise MultiJoin(pos + 1, names_with_path) last = pathinfos[-1] path.extend(pathinfos) final_field = last.join_field opts = last.to_opts targets = last.target_fields cur_names_with_path[1].extend(pathinfos) names_with_path.append(cur_names_with_path) else: # Local non-relational field. final_field = field targets = (field,) if fail_on_missing and pos + 1 != len(names): raise FieldError( "Cannot resolve keyword %r into field. Join on '%s'" " not permitted." % (names[pos + 1], name)) break return path, final_field, targets, names[pos + 1:] def setup_joins(self, names, opts, alias, can_reuse=None, allow_many=True, reuse_with_filtered_relation=False): """ Compute the necessary table joins for the passage through the fields given in 'names'. 'opts' is the Options class for the current model (which gives the table we are starting from), 'alias' is the alias for the table to start the joining from. The 'can_reuse' defines the reverse foreign key joins we can reuse. It can be None in which case all joins are reusable or a set of aliases that can be reused. Note that non-reverse foreign keys are always reusable when using setup_joins(). The 'reuse_with_filtered_relation' can be used to force 'can_reuse' parameter and force the relation on the given connections. If 'allow_many' is False, then any reverse foreign key seen will generate a MultiJoin exception. Return the final field involved in the joins, the target field (used for any 'where' constraint), the final 'opts' value, the joins, the field path traveled to generate the joins, and a transform function that takes a field and alias and is equivalent to `field.get_col(alias)` in the simple case but wraps field transforms if they were included in names. The target field is the field containing the concrete value. Final field can be something different, for example foreign key pointing to that value. Final field is needed for example in some value conversions (convert 'obj' in fk__id=obj to pk val using the foreign key field for example). """ joins = [alias] # The transform can't be applied yet, as joins must be trimmed later. # To avoid making every caller of this method look up transforms # directly, compute transforms here and create a partial that converts # fields to the appropriate wrapped version. def final_transformer(field, alias): if not self.alias_cols: alias = None return field.get_col(alias) # Try resolving all the names as fields first. If there's an error, # treat trailing names as lookups until a field can be resolved. last_field_exception = None for pivot in range(len(names), 0, -1): try: path, final_field, targets, rest = self.names_to_path( names[:pivot], opts, allow_many, fail_on_missing=True, ) except FieldError as exc: if pivot == 1: # The first item cannot be a lookup, so it's safe # to raise the field error here. raise else: last_field_exception = exc else: # The transforms are the remaining items that couldn't be # resolved into fields. transforms = names[pivot:] break for name in transforms: def transform(field, alias, *, name, previous): try: wrapped = previous(field, alias) return self.try_transform(wrapped, name) except FieldError: # FieldError is raised if the transform doesn't exist. if isinstance(final_field, Field) and last_field_exception: raise last_field_exception else: raise final_transformer = functools.partial(transform, name=name, previous=final_transformer) # Then, add the path to the query's joins. Note that we can't trim # joins at this stage - we will need the information about join type # of the trimmed joins. for join in path: if join.filtered_relation: filtered_relation = join.filtered_relation.clone() table_alias = filtered_relation.alias else: filtered_relation = None table_alias = None opts = join.to_opts if join.direct: nullable = self.is_nullable(join.join_field) else: nullable = True connection = Join( opts.db_table, alias, table_alias, INNER, join.join_field, nullable, filtered_relation=filtered_relation, ) reuse = can_reuse if join.m2m or reuse_with_filtered_relation else None alias = self.join( connection, reuse=reuse, reuse_with_filtered_relation=reuse_with_filtered_relation, ) joins.append(alias) if filtered_relation: filtered_relation.path = joins[:] return JoinInfo(final_field, targets, opts, joins, path, final_transformer) def trim_joins(self, targets, joins, path): """ The 'target' parameter is the final field being joined to, 'joins' is the full list of join aliases. The 'path' contain the PathInfos used to create the joins. Return the final target field and table alias and the new active joins. Always trim any direct join if the target column is already in the previous table. Can't trim reverse joins as it's unknown if there's anything on the other side of the join. """ joins = joins[:] for pos, info in enumerate(reversed(path)): if len(joins) == 1 or not info.direct: break if info.filtered_relation: break join_targets = {t.column for t in info.join_field.foreign_related_fields} cur_targets = {t.column for t in targets} if not cur_targets.issubset(join_targets): break targets_dict = {r[1].column: r[0] for r in info.join_field.related_fields if r[1].column in cur_targets} targets = tuple(targets_dict[t.column] for t in targets) self.unref_alias(joins.pop()) return targets, joins[-1], joins @classmethod def _gen_cols(cls, exprs, include_external=False): for expr in exprs: if isinstance(expr, Col): yield expr elif include_external and callable(getattr(expr, 'get_external_cols', None)): yield from expr.get_external_cols() else: yield from cls._gen_cols( expr.get_source_expressions(), include_external=include_external, ) @classmethod def _gen_col_aliases(cls, exprs): yield from (expr.alias for expr in cls._gen_cols(exprs)) def resolve_ref(self, name, allow_joins=True, reuse=None, summarize=False): annotation = self.annotations.get(name) if annotation is not None: if not allow_joins: for alias in self._gen_col_aliases([annotation]): if isinstance(self.alias_map[alias], Join): raise FieldError( 'Joined field references are not permitted in ' 'this query' ) if summarize: # Summarize currently means we are doing an aggregate() query # which is executed as a wrapped subquery if any of the # aggregate() elements reference an existing annotation. In # that case we need to return a Ref to the subquery's annotation. if name not in self.annotation_select: raise FieldError( "Cannot aggregate over the '%s' alias. Use annotate() " "to promote it." % name ) return Ref(name, self.annotation_select[name]) else: return annotation else: field_list = name.split(LOOKUP_SEP) annotation = self.annotations.get(field_list[0]) if annotation is not None: for transform in field_list[1:]: annotation = self.try_transform(annotation, transform) return annotation join_info = self.setup_joins(field_list, self.get_meta(), self.get_initial_alias(), can_reuse=reuse) targets, final_alias, join_list = self.trim_joins(join_info.targets, join_info.joins, join_info.path) if not allow_joins and len(join_list) > 1: raise FieldError('Joined field references are not permitted in this query') if len(targets) > 1: raise FieldError("Referencing multicolumn fields with F() objects " "isn't supported") # Verify that the last lookup in name is a field or a transform: # transform_function() raises FieldError if not. transform = join_info.transform_function(targets[0], final_alias) if reuse is not None: reuse.update(join_list) return transform def split_exclude(self, filter_expr, can_reuse, names_with_path): """ When doing an exclude against any kind of N-to-many relation, we need to use a subquery. This method constructs the nested query, given the original exclude filter (filter_expr) and the portion up to the first N-to-many relation field. For example, if the origin filter is ~Q(child__name='foo'), filter_expr is ('child__name', 'foo') and can_reuse is a set of joins usable for filters in the original query. We will turn this into equivalent of: WHERE NOT EXISTS( SELECT 1 FROM child WHERE name = 'foo' AND child.parent_id = parent.id LIMIT 1 ) """ filter_lhs, filter_rhs = filter_expr if isinstance(filter_rhs, OuterRef): filter_expr = (filter_lhs, OuterRef(filter_rhs)) elif isinstance(filter_rhs, F): filter_expr = (filter_lhs, OuterRef(filter_rhs.name)) # Generate the inner query. query = Query(self.model) query._filtered_relations = self._filtered_relations query.add_filter(filter_expr) query.clear_ordering(True) # Try to have as simple as possible subquery -> trim leading joins from # the subquery. trimmed_prefix, contains_louter = query.trim_start(names_with_path) col = query.select[0] select_field = col.target alias = col.alias if alias in can_reuse: pk = select_field.model._meta.pk # Need to add a restriction so that outer query's filters are in effect for # the subquery, too. query.bump_prefix(self) lookup_class = select_field.get_lookup('exact') # Note that the query.select[0].alias is different from alias # due to bump_prefix above. lookup = lookup_class(pk.get_col(query.select[0].alias), pk.get_col(alias)) query.where.add(lookup, AND) query.external_aliases[alias] = True lookup_class = select_field.get_lookup('exact') lookup = lookup_class(col, ResolvedOuterRef(trimmed_prefix)) query.where.add(lookup, AND) condition, needed_inner = self.build_filter(Exists(query)) if contains_louter: or_null_condition, _ = self.build_filter( ('%s__isnull' % trimmed_prefix, True), current_negated=True, branch_negated=True, can_reuse=can_reuse) condition.add(or_null_condition, OR) # Note that the end result will be: # (outercol NOT IN innerq AND outercol IS NOT NULL) OR outercol IS NULL. # This might look crazy but due to how IN works, this seems to be # correct. If the IS NOT NULL check is removed then outercol NOT # IN will return UNKNOWN. If the IS NULL check is removed, then if # outercol IS NULL we will not match the row. return condition, needed_inner def set_empty(self): self.where.add(NothingNode(), AND) for query in self.combined_queries: query.set_empty() def is_empty(self): return any(isinstance(c, NothingNode) for c in self.where.children) def set_limits(self, low=None, high=None): """ Adjust the limits on the rows retrieved. Use low/high to set these, as it makes it more Pythonic to read and write. When the SQL query is created, convert them to the appropriate offset and limit values. Apply any limits passed in here to the existing constraints. Add low to the current low value and clamp both to any existing high value. """ if high is not None: if self.high_mark is not None: self.high_mark = min(self.high_mark, self.low_mark + high) else: self.high_mark = self.low_mark + high if low is not None: if self.high_mark is not None: self.low_mark = min(self.high_mark, self.low_mark + low) else: self.low_mark = self.low_mark + low if self.low_mark == self.high_mark: self.set_empty() def clear_limits(self): """Clear any existing limits.""" self.low_mark, self.high_mark = 0, None @property def is_sliced(self): return self.low_mark != 0 or self.high_mark is not None def has_limit_one(self): return self.high_mark is not None and (self.high_mark - self.low_mark) == 1 def can_filter(self): """ Return True if adding filters to this instance is still possible. Typically, this means no limits or offsets have been put on the results. """ return not self.is_sliced def clear_select_clause(self): """Remove all fields from SELECT clause.""" self.select = () self.default_cols = False self.select_related = False self.set_extra_mask(()) self.set_annotation_mask(()) def clear_select_fields(self): """ Clear the list of fields to select (but not extra_select columns). Some queryset types completely replace any existing list of select columns. """ self.select = () self.values_select = () def add_select_col(self, col, name): self.select += col, self.values_select += name, def set_select(self, cols): self.default_cols = False self.select = tuple(cols) def add_distinct_fields(self, *field_names): """ Add and resolve the given fields to the query's "distinct on" clause. """ self.distinct_fields = field_names self.distinct = True def add_fields(self, field_names, allow_m2m=True): """ Add the given (model) fields to the select set. Add the field names in the order specified. """ alias = self.get_initial_alias() opts = self.get_meta() try: cols = [] for name in field_names: # Join promotion note - we must not remove any rows here, so # if there is no existing joins, use outer join. join_info = self.setup_joins(name.split(LOOKUP_SEP), opts, alias, allow_many=allow_m2m) targets, final_alias, joins = self.trim_joins( join_info.targets, join_info.joins, join_info.path, ) for target in targets: cols.append(join_info.transform_function(target, final_alias)) if cols: self.set_select(cols) except MultiJoin: raise FieldError("Invalid field name: '%s'" % name) except FieldError: if LOOKUP_SEP in name: # For lookups spanning over relationships, show the error # from the model on which the lookup failed. raise elif name in self.annotations: raise FieldError( "Cannot select the '%s' alias. Use annotate() to promote " "it." % name ) else: names = sorted([ *get_field_names_from_opts(opts), *self.extra, *self.annotation_select, *self._filtered_relations ]) raise FieldError("Cannot resolve keyword %r into field. " "Choices are: %s" % (name, ", ".join(names))) def add_ordering(self, *ordering): """ Add items from the 'ordering' sequence to the query's "order by" clause. These items are either field names (not column names) -- possibly with a direction prefix ('-' or '?') -- or OrderBy expressions. If 'ordering' is empty, clear all ordering from the query. """ errors = [] for item in ordering: if isinstance(item, str): if item == '?': continue if item.startswith('-'): item = item[1:] if item in self.annotations: continue if self.extra and item in self.extra: continue # names_to_path() validates the lookup. A descriptive # FieldError will be raise if it's not. self.names_to_path(item.split(LOOKUP_SEP), self.model._meta) elif not hasattr(item, 'resolve_expression'): errors.append(item) if getattr(item, 'contains_aggregate', False): raise FieldError( 'Using an aggregate in order_by() without also including ' 'it in annotate() is not allowed: %s' % item ) if errors: raise FieldError('Invalid order_by arguments: %s' % errors) if ordering: self.order_by += ordering else: self.default_ordering = False def clear_ordering(self, force_empty): """ Remove any ordering settings. If 'force_empty' is True, there will be no ordering in the resulting query (not even the model's default). """ self.order_by = () self.extra_order_by = () if force_empty: self.default_ordering = False def set_group_by(self, allow_aliases=True): """ Expand the GROUP BY clause required by the query. This will usually be the set of all non-aggregate fields in the return data. If the database backend supports grouping by the primary key, and the query would be equivalent, the optimization will be made automatically. """ # Column names from JOINs to check collisions with aliases. if allow_aliases: column_names = set() seen_models = set() for join in list(self.alias_map.values())[1:]: # Skip base table. model = join.join_field.related_model if model not in seen_models: column_names.update({ field.column for field in model._meta.local_concrete_fields }) seen_models.add(model) group_by = list(self.select) if self.annotation_select: for alias, annotation in self.annotation_select.items(): if not allow_aliases or alias in column_names: alias = None group_by_cols = annotation.get_group_by_cols(alias=alias) group_by.extend(group_by_cols) self.group_by = tuple(group_by) def add_select_related(self, fields): """ Set up the select_related data structure so that we only select certain related models (as opposed to all models, when self.select_related=True). """ if isinstance(self.select_related, bool): field_dict = {} else: field_dict = self.select_related for field in fields: d = field_dict for part in field.split(LOOKUP_SEP): d = d.setdefault(part, {}) self.select_related = field_dict def add_extra(self, select, select_params, where, params, tables, order_by): """ Add data to the various extra_* attributes for user-created additions to the query. """ if select: # We need to pair any placeholder markers in the 'select' # dictionary with their parameters in 'select_params' so that # subsequent updates to the select dictionary also adjust the # parameters appropriately. select_pairs = {} if select_params: param_iter = iter(select_params) else: param_iter = iter([]) for name, entry in select.items(): entry = str(entry) entry_params = [] pos = entry.find("%s") while pos != -1: if pos == 0 or entry[pos - 1] != '%': entry_params.append(next(param_iter)) pos = entry.find("%s", pos + 2) select_pairs[name] = (entry, entry_params) self.extra.update(select_pairs) if where or params: self.where.add(ExtraWhere(where, params), AND) if tables: self.extra_tables += tuple(tables) if order_by: self.extra_order_by = order_by def clear_deferred_loading(self): """Remove any fields from the deferred loading set.""" self.deferred_loading = (frozenset(), True) def add_deferred_loading(self, field_names): """ Add the given list of model field names to the set of fields to exclude from loading from the database when automatic column selection is done. Add the new field names to any existing field names that are deferred (or removed from any existing field names that are marked as the only ones for immediate loading). """ # Fields on related models are stored in the literal double-underscore # format, so that we can use a set datastructure. We do the foo__bar # splitting and handling when computing the SQL column names (as part of # get_columns()). existing, defer = self.deferred_loading if defer: # Add to existing deferred names. self.deferred_loading = existing.union(field_names), True else: # Remove names from the set of any existing "immediate load" names. self.deferred_loading = existing.difference(field_names), False def add_immediate_loading(self, field_names): """ Add the given list of model field names to the set of fields to retrieve when the SQL is executed ("immediate loading" fields). The field names replace any existing immediate loading field names. If there are field names already specified for deferred loading, remove those names from the new field_names before storing the new names for immediate loading. (That is, immediate loading overrides any existing immediate values, but respects existing deferrals.) """ existing, defer = self.deferred_loading field_names = set(field_names) if 'pk' in field_names: field_names.remove('pk') field_names.add(self.get_meta().pk.name) if defer: # Remove any existing deferred names from the current set before # setting the new names. self.deferred_loading = field_names.difference(existing), False else: # Replace any existing "immediate load" field names. self.deferred_loading = frozenset(field_names), False def get_loaded_field_names(self): """ If any fields are marked to be deferred, return a dictionary mapping models to a set of names in those fields that will be loaded. If a model is not in the returned dictionary, none of its fields are deferred. If no fields are marked for deferral, return an empty dictionary. """ # We cache this because we call this function multiple times # (compiler.fill_related_selections, query.iterator) try: return self._loaded_field_names_cache except AttributeError: collection = {} self.deferred_to_data(collection, self.get_loaded_field_names_cb) self._loaded_field_names_cache = collection return collection def get_loaded_field_names_cb(self, target, model, fields): """Callback used by get_deferred_field_names().""" target[model] = {f.attname for f in fields} def set_annotation_mask(self, names): """Set the mask of annotations that will be returned by the SELECT.""" if names is None: self.annotation_select_mask = None else: self.annotation_select_mask = set(names) self._annotation_select_cache = None def append_annotation_mask(self, names): if self.annotation_select_mask is not None: self.set_annotation_mask(self.annotation_select_mask.union(names)) def set_extra_mask(self, names): """ Set the mask of extra select items that will be returned by SELECT. Don't remove them from the Query since they might be used later. """ if names is None: self.extra_select_mask = None else: self.extra_select_mask = set(names) self._extra_select_cache = None def set_values(self, fields): self.select_related = False self.clear_deferred_loading() self.clear_select_fields() if fields: field_names = [] extra_names = [] annotation_names = [] if not self.extra and not self.annotations: # Shortcut - if there are no extra or annotations, then # the values() clause must be just field names. field_names = list(fields) else: self.default_cols = False for f in fields: if f in self.extra_select: extra_names.append(f) elif f in self.annotation_select: annotation_names.append(f) else: field_names.append(f) self.set_extra_mask(extra_names) self.set_annotation_mask(annotation_names) selected = frozenset(field_names + extra_names + annotation_names) else: field_names = [f.attname for f in self.model._meta.concrete_fields] selected = frozenset(field_names) # Selected annotations must be known before setting the GROUP BY # clause. if self.group_by is True: self.add_fields((f.attname for f in self.model._meta.concrete_fields), False) # Disable GROUP BY aliases to avoid orphaning references to the # SELECT clause which is about to be cleared. self.set_group_by(allow_aliases=False) self.clear_select_fields() elif self.group_by: # Resolve GROUP BY annotation references if they are not part of # the selected fields anymore. group_by = [] for expr in self.group_by: if isinstance(expr, Ref) and expr.refs not in selected: expr = self.annotations[expr.refs] group_by.append(expr) self.group_by = tuple(group_by) self.values_select = tuple(field_names) self.add_fields(field_names, True) @property def annotation_select(self): """ Return the dictionary of aggregate columns that are not masked and should be used in the SELECT clause. Cache this result for performance. """ if self._annotation_select_cache is not None: return self._annotation_select_cache elif not self.annotations: return {} elif self.annotation_select_mask is not None: self._annotation_select_cache = { k: v for k, v in self.annotations.items() if k in self.annotation_select_mask } return self._annotation_select_cache else: return self.annotations @property def extra_select(self): if self._extra_select_cache is not None: return self._extra_select_cache if not self.extra: return {} elif self.extra_select_mask is not None: self._extra_select_cache = { k: v for k, v in self.extra.items() if k in self.extra_select_mask } return self._extra_select_cache else: return self.extra def trim_start(self, names_with_path): """ Trim joins from the start of the join path. The candidates for trim are the PathInfos in names_with_path structure that are m2m joins. Also set the select column so the start matches the join. This method is meant to be used for generating the subquery joins & cols in split_exclude(). Return a lookup usable for doing outerq.filter(lookup=self) and a boolean indicating if the joins in the prefix contain a LEFT OUTER join. _""" all_paths = [] for _, paths in names_with_path: all_paths.extend(paths) contains_louter = False # Trim and operate only on tables that were generated for # the lookup part of the query. That is, avoid trimming # joins generated for F() expressions. lookup_tables = [ t for t in self.alias_map if t in self._lookup_joins or t == self.base_table ] for trimmed_paths, path in enumerate(all_paths): if path.m2m: break if self.alias_map[lookup_tables[trimmed_paths + 1]].join_type == LOUTER: contains_louter = True alias = lookup_tables[trimmed_paths] self.unref_alias(alias) # The path.join_field is a Rel, lets get the other side's field join_field = path.join_field.field # Build the filter prefix. paths_in_prefix = trimmed_paths trimmed_prefix = [] for name, path in names_with_path: if paths_in_prefix - len(path) < 0: break trimmed_prefix.append(name) paths_in_prefix -= len(path) trimmed_prefix.append( join_field.foreign_related_fields[0].name) trimmed_prefix = LOOKUP_SEP.join(trimmed_prefix) # Lets still see if we can trim the first join from the inner query # (that is, self). We can't do this for: # - LEFT JOINs because we would miss those rows that have nothing on # the outer side, # - INNER JOINs from filtered relations because we would miss their # filters. first_join = self.alias_map[lookup_tables[trimmed_paths + 1]] if first_join.join_type != LOUTER and not first_join.filtered_relation: select_fields = [r[0] for r in join_field.related_fields] select_alias = lookup_tables[trimmed_paths + 1] self.unref_alias(lookup_tables[trimmed_paths]) extra_restriction = join_field.get_extra_restriction( self.where_class, None, lookup_tables[trimmed_paths + 1]) if extra_restriction: self.where.add(extra_restriction, AND) else: # TODO: It might be possible to trim more joins from the start of the # inner query if it happens to have a longer join chain containing the # values in select_fields. Lets punt this one for now. select_fields = [r[1] for r in join_field.related_fields] select_alias = lookup_tables[trimmed_paths] # The found starting point is likely a Join instead of a BaseTable reference. # But the first entry in the query's FROM clause must not be a JOIN. for table in self.alias_map: if self.alias_refcount[table] > 0: self.alias_map[table] = BaseTable(self.alias_map[table].table_name, table) break self.set_select([f.get_col(select_alias) for f in select_fields]) return trimmed_prefix, contains_louter def is_nullable(self, field): """ Check if the given field should be treated as nullable. Some backends treat '' as null and Django treats such fields as nullable for those backends. In such situations field.null can be False even if we should treat the field as nullable. """ # We need to use DEFAULT_DB_ALIAS here, as QuerySet does not have # (nor should it have) knowledge of which connection is going to be # used. The proper fix would be to defer all decisions where # is_nullable() is needed to the compiler stage, but that is not easy # to do currently. return ( connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls and field.empty_strings_allowed ) or field.null def get_order_dir(field, default='ASC'): """ Return the field name and direction for an order specification. For example, '-foo' is returned as ('foo', 'DESC'). The 'default' param is used to indicate which way no prefix (or a '+' prefix) should sort. The '-' prefix always sorts the opposite way. """ dirn = ORDER_DIR[default] if field[0] == '-': return field[1:], dirn[1] return field, dirn[0] def add_to_dict(data, key, value): """ Add "value" to the set of values for "key", whether or not "key" already exists. """ if key in data: data[key].add(value) else: data[key] = {value} def is_reverse_o2o(field): """ Check if the given field is reverse-o2o. The field is expected to be some sort of relation field or related object. """ return field.is_relation and field.one_to_one and not field.concrete class JoinPromoter: """ A class to abstract away join promotion problems for complex filter conditions. """ def __init__(self, connector, num_children, negated): self.connector = connector self.negated = negated if self.negated: if connector == AND: self.effective_connector = OR else: self.effective_connector = AND else: self.effective_connector = self.connector self.num_children = num_children # Maps of table alias to how many times it is seen as required for # inner and/or outer joins. self.votes = Counter() def add_votes(self, votes): """ Add single vote per item to self.votes. Parameter can be any iterable. """ self.votes.update(votes) def update_join_types(self, query): """ Change join types so that the generated query is as efficient as possible, but still correct. So, change as many joins as possible to INNER, but don't make OUTER joins INNER if that could remove results from the query. """ to_promote = set() to_demote = set() # The effective_connector is used so that NOT (a AND b) is treated # similarly to (a OR b) for join promotion. for table, votes in self.votes.items(): # We must use outer joins in OR case when the join isn't contained # in all of the joins. Otherwise the INNER JOIN itself could remove # valid results. Consider the case where a model with rel_a and # rel_b relations is queried with rel_a__col=1 | rel_b__col=2. Now, # if rel_a join doesn't produce any results is null (for example # reverse foreign key or null value in direct foreign key), and # there is a matching row in rel_b with col=2, then an INNER join # to rel_a would remove a valid match from the query. So, we need # to promote any existing INNER to LOUTER (it is possible this # promotion in turn will be demoted later on). if self.effective_connector == 'OR' and votes < self.num_children: to_promote.add(table) # If connector is AND and there is a filter that can match only # when there is a joinable row, then use INNER. For example, in # rel_a__col=1 & rel_b__col=2, if either of the rels produce NULL # as join output, then the col=1 or col=2 can't match (as # NULL=anything is always false). # For the OR case, if all children voted for a join to be inner, # then we can use INNER for the join. For example: # (rel_a__col__icontains=Alex | rel_a__col__icontains=Russell) # then if rel_a doesn't produce any rows, the whole condition # can't match. Hence we can safely use INNER join. if self.effective_connector == 'AND' or ( self.effective_connector == 'OR' and votes == self.num_children): to_demote.add(table) # Finally, what happens in cases where we have: # (rel_a__col=1|rel_b__col=2) & rel_a__col__gte=0 # Now, we first generate the OR clause, and promote joins for it # in the first if branch above. Both rel_a and rel_b are promoted # to LOUTER joins. After that we do the AND case. The OR case # voted no inner joins but the rel_a__col__gte=0 votes inner join # for rel_a. We demote it back to INNER join (in AND case a single # vote is enough). The demotion is OK, if rel_a doesn't produce # rows, then the rel_a__col__gte=0 clause can't be true, and thus # the whole clause must be false. So, it is safe to use INNER # join. # Note that in this example we could just as well have the __gte # clause and the OR clause swapped. Or we could replace the __gte # clause with an OR clause containing rel_a__col=1|rel_a__col=2, # and again we could safely demote to INNER. query.promote_joins(to_promote) query.demote_joins(to_demote) return to_demote
e026987035117f14a9d5156526114c9e25b68f38b8a9801291e12257f66468d5
import collections import re from functools import partial from itertools import chain from django.core.exceptions import EmptyResultSet, FieldError from django.db import DatabaseError, NotSupportedError from django.db.models.constants import LOOKUP_SEP from django.db.models.expressions import F, OrderBy, RawSQL, Ref, Value from django.db.models.functions import Cast, Random from django.db.models.query_utils import Q, select_related_descend from django.db.models.sql.constants import ( CURSOR, GET_ITERATOR_CHUNK_SIZE, MULTI, NO_RESULTS, ORDER_DIR, SINGLE, ) from django.db.models.sql.query import Query, get_order_dir from django.db.transaction import TransactionManagementError from django.utils.functional import cached_property from django.utils.hashable import make_hashable from django.utils.regex_helper import _lazy_re_compile class SQLCompiler: # Multiline ordering SQL clause may appear from RawSQL. ordering_parts = _lazy_re_compile( r'^(.*)\s(?:ASC|DESC).*', re.MULTILINE | re.DOTALL, ) def __init__(self, query, connection, using): self.query = query self.connection = connection self.using = using self.quote_cache = {'*': '*'} # The select, klass_info, and annotations are needed by QuerySet.iterator() # these are set as a side-effect of executing the query. Note that we calculate # separately a list of extra select columns needed for grammatical correctness # of the query, but these columns are not included in self.select. self.select = None self.annotation_col_map = None self.klass_info = None self._meta_ordering = None def setup_query(self): if all(self.query.alias_refcount[a] == 0 for a in self.query.alias_map): self.query.get_initial_alias() self.select, self.klass_info, self.annotation_col_map = self.get_select() self.col_count = len(self.select) def pre_sql_setup(self): """ Do any necessary class setup immediately prior to producing SQL. This is for things that can't necessarily be done in __init__ because we might not have all the pieces in place at that time. """ self.setup_query() order_by = self.get_order_by() self.where, self.having = self.query.where.split_having() extra_select = self.get_extra_select(order_by, self.select) self.has_extra_select = bool(extra_select) group_by = self.get_group_by(self.select + extra_select, order_by) return extra_select, order_by, group_by def get_group_by(self, select, order_by): """ Return a list of 2-tuples of form (sql, params). The logic of what exactly the GROUP BY clause contains is hard to describe in other words than "if it passes the test suite, then it is correct". """ # Some examples: # SomeModel.objects.annotate(Count('somecol')) # GROUP BY: all fields of the model # # SomeModel.objects.values('name').annotate(Count('somecol')) # GROUP BY: name # # SomeModel.objects.annotate(Count('somecol')).values('name') # GROUP BY: all cols of the model # # SomeModel.objects.values('name', 'pk').annotate(Count('somecol')).values('pk') # GROUP BY: name, pk # # SomeModel.objects.values('name').annotate(Count('somecol')).values('pk') # GROUP BY: name, pk # # In fact, the self.query.group_by is the minimal set to GROUP BY. It # can't be ever restricted to a smaller set, but additional columns in # HAVING, ORDER BY, and SELECT clauses are added to it. Unfortunately # the end result is that it is impossible to force the query to have # a chosen GROUP BY clause - you can almost do this by using the form: # .values(*wanted_cols).annotate(AnAggregate()) # but any later annotations, extra selects, values calls that # refer some column outside of the wanted_cols, order_by, or even # filter calls can alter the GROUP BY clause. # The query.group_by is either None (no GROUP BY at all), True # (group by select fields), or a list of expressions to be added # to the group by. if self.query.group_by is None: return [] expressions = [] if self.query.group_by is not True: # If the group by is set to a list (by .values() call most likely), # then we need to add everything in it to the GROUP BY clause. # Backwards compatibility hack for setting query.group_by. Remove # when we have public API way of forcing the GROUP BY clause. # Converts string references to expressions. for expr in self.query.group_by: if not hasattr(expr, 'as_sql'): expressions.append(self.query.resolve_ref(expr)) else: expressions.append(expr) # Note that even if the group_by is set, it is only the minimal # set to group by. So, we need to add cols in select, order_by, and # having into the select in any case. ref_sources = { expr.source for expr in expressions if isinstance(expr, Ref) } for expr, _, _ in select: # Skip members of the select clause that are already included # by reference. if expr in ref_sources: continue cols = expr.get_group_by_cols() for col in cols: expressions.append(col) if not self._meta_ordering: for expr, (sql, params, is_ref) in order_by: # Skip references to the SELECT clause, as all expressions in # the SELECT clause are already part of the GROUP BY. if not is_ref: expressions.extend(expr.get_group_by_cols()) having_group_by = self.having.get_group_by_cols() if self.having else () for expr in having_group_by: expressions.append(expr) result = [] seen = set() expressions = self.collapse_group_by(expressions, having_group_by) for expr in expressions: sql, params = self.compile(expr) sql, params = expr.select_format(self, sql, params) params_hash = make_hashable(params) if (sql, params_hash) not in seen: result.append((sql, params)) seen.add((sql, params_hash)) return result def collapse_group_by(self, expressions, having): # If the DB can group by primary key, then group by the primary key of # query's main model. Note that for PostgreSQL the GROUP BY clause must # include the primary key of every table, but for MySQL it is enough to # have the main table's primary key. if self.connection.features.allows_group_by_pk: # Determine if the main model's primary key is in the query. pk = None for expr in expressions: # Is this a reference to query's base table primary key? If the # expression isn't a Col-like, then skip the expression. if (getattr(expr, 'target', None) == self.query.model._meta.pk and getattr(expr, 'alias', None) == self.query.base_table): pk = expr break # If the main model's primary key is in the query, group by that # field, HAVING expressions, and expressions associated with tables # that don't have a primary key included in the grouped columns. if pk: pk_aliases = { expr.alias for expr in expressions if hasattr(expr, 'target') and expr.target.primary_key } expressions = [pk] + [ expr for expr in expressions if expr in having or ( getattr(expr, 'alias', None) is not None and expr.alias not in pk_aliases ) ] elif self.connection.features.allows_group_by_selected_pks: # Filter out all expressions associated with a table's primary key # present in the grouped columns. This is done by identifying all # tables that have their primary key included in the grouped # columns and removing non-primary key columns referring to them. # Unmanaged models are excluded because they could be representing # database views on which the optimization might not be allowed. pks = { expr for expr in expressions if ( hasattr(expr, 'target') and expr.target.primary_key and self.connection.features.allows_group_by_selected_pks_on_model(expr.target.model) ) } aliases = {expr.alias for expr in pks} expressions = [ expr for expr in expressions if expr in pks or getattr(expr, 'alias', None) not in aliases ] return expressions def get_select(self): """ Return three values: - a list of 3-tuples of (expression, (sql, params), alias) - a klass_info structure, - a dictionary of annotations The (sql, params) is what the expression will produce, and alias is the "AS alias" for the column (possibly None). The klass_info structure contains the following information: - The base model of the query. - Which columns for that model are present in the query (by position of the select clause). - related_klass_infos: [f, klass_info] to descent into The annotations is a dictionary of {'attname': column position} values. """ select = [] klass_info = None annotations = {} select_idx = 0 for alias, (sql, params) in self.query.extra_select.items(): annotations[alias] = select_idx select.append((RawSQL(sql, params), alias)) select_idx += 1 assert not (self.query.select and self.query.default_cols) if self.query.default_cols: cols = self.get_default_columns() else: # self.query.select is a special case. These columns never go to # any model. cols = self.query.select if cols: select_list = [] for col in cols: select_list.append(select_idx) select.append((col, None)) select_idx += 1 klass_info = { 'model': self.query.model, 'select_fields': select_list, } for alias, annotation in self.query.annotation_select.items(): annotations[alias] = select_idx select.append((annotation, alias)) select_idx += 1 if self.query.select_related: related_klass_infos = self.get_related_selections(select) klass_info['related_klass_infos'] = related_klass_infos def get_select_from_parent(klass_info): for ki in klass_info['related_klass_infos']: if ki['from_parent']: ki['select_fields'] = (klass_info['select_fields'] + ki['select_fields']) get_select_from_parent(ki) get_select_from_parent(klass_info) ret = [] for col, alias in select: try: sql, params = self.compile(col) except EmptyResultSet: # Select a predicate that's always False. sql, params = '0', () else: sql, params = col.select_format(self, sql, params) ret.append((col, (sql, params), alias)) return ret, klass_info, annotations def _order_by_pairs(self): if self.query.extra_order_by: ordering = self.query.extra_order_by elif not self.query.default_ordering: ordering = self.query.order_by elif self.query.order_by: ordering = self.query.order_by elif self.query.get_meta().ordering: ordering = self.query.get_meta().ordering self._meta_ordering = ordering else: ordering = [] if self.query.standard_ordering: default_order, _ = ORDER_DIR['ASC'] else: default_order, _ = ORDER_DIR['DESC'] for field in ordering: if hasattr(field, 'resolve_expression'): if isinstance(field, Value): # output_field must be resolved for constants. field = Cast(field, field.output_field) if not isinstance(field, OrderBy): field = field.asc() if not self.query.standard_ordering: field = field.copy() field.reverse_ordering() yield field, False continue if field == '?': # random yield OrderBy(Random()), False continue col, order = get_order_dir(field, default_order) descending = order == 'DESC' if col in self.query.annotation_select: # Reference to expression in SELECT clause yield ( OrderBy( Ref(col, self.query.annotation_select[col]), descending=descending, ), True, ) continue if col in self.query.annotations: # References to an expression which is masked out of the SELECT # clause. if self.query.combinator and self.select: # Don't use the resolved annotation because other # combinated queries might define it differently. expr = F(col) else: expr = self.query.annotations[col] if isinstance(expr, Value): # output_field must be resolved for constants. expr = Cast(expr, expr.output_field) yield OrderBy(expr, descending=descending), False continue if '.' in field: # This came in through an extra(order_by=...) addition. Pass it # on verbatim. table, col = col.split('.', 1) yield ( OrderBy( RawSQL('%s.%s' % (self.quote_name_unless_alias(table), col), []), descending=descending, ), False, ) continue if self.query.extra and col in self.query.extra: if col in self.query.extra_select: yield ( OrderBy(Ref(col, RawSQL(*self.query.extra[col])), descending=descending), True, ) else: yield ( OrderBy(RawSQL(*self.query.extra[col]), descending=descending), False, ) else: if self.query.combinator and self.select: # Don't use the first model's field because other # combinated queries might define it differently. yield OrderBy(F(col), descending=descending), False else: # 'col' is of the form 'field' or 'field1__field2' or # '-field1__field2__field', etc. yield from self.find_ordering_name( field, self.query.get_meta(), default_order=default_order, ) def get_order_by(self): """ Return a list of 2-tuples of the form (expr, (sql, params, is_ref)) for the ORDER BY clause. The order_by clause can alter the select clause (for example it can add aliases to clauses that do not yet have one, or it can add totally new select clauses). """ result = [] seen = set() for expr, is_ref in self._order_by_pairs(): resolved = expr.resolve_expression(self.query, allow_joins=True, reuse=None) if self.query.combinator and self.select: src = resolved.get_source_expressions()[0] expr_src = expr.get_source_expressions()[0] # Relabel order by columns to raw numbers if this is a combined # query; necessary since the columns can't be referenced by the # fully qualified name and the simple column names may collide. for idx, (sel_expr, _, col_alias) in enumerate(self.select): if is_ref and col_alias == src.refs: src = src.source elif col_alias and not ( isinstance(expr_src, F) and col_alias == expr_src.name ): continue if src == sel_expr: resolved.set_source_expressions([RawSQL('%d' % (idx + 1), ())]) break else: if col_alias: raise DatabaseError('ORDER BY term does not match any column in the result set.') # Add column used in ORDER BY clause to the selected # columns and to each combined query. order_by_idx = len(self.query.select) + 1 col_name = f'__orderbycol{order_by_idx}' for q in self.query.combined_queries: q.add_annotation(expr_src, col_name) self.query.add_select_col(resolved, col_name) resolved.set_source_expressions([RawSQL(f'{order_by_idx}', ())]) sql, params = self.compile(resolved) # Don't add the same column twice, but the order direction is # not taken into account so we strip it. When this entire method # is refactored into expressions, then we can check each part as we # generate it. without_ordering = self.ordering_parts.search(sql)[1] params_hash = make_hashable(params) if (without_ordering, params_hash) in seen: continue seen.add((without_ordering, params_hash)) result.append((resolved, (sql, params, is_ref))) return result def get_extra_select(self, order_by, select): extra_select = [] if self.query.distinct and not self.query.distinct_fields: select_sql = [t[1] for t in select] for expr, (sql, params, is_ref) in order_by: without_ordering = self.ordering_parts.search(sql)[1] if not is_ref and (without_ordering, params) not in select_sql: extra_select.append((expr, (without_ordering, params), None)) return extra_select def quote_name_unless_alias(self, name): """ A wrapper around connection.ops.quote_name that doesn't quote aliases for table names. This avoids problems with some SQL dialects that treat quoted strings specially (e.g. PostgreSQL). """ if name in self.quote_cache: return self.quote_cache[name] if ((name in self.query.alias_map and name not in self.query.table_map) or name in self.query.extra_select or ( self.query.external_aliases.get(name) and name not in self.query.table_map)): self.quote_cache[name] = name return name r = self.connection.ops.quote_name(name) self.quote_cache[name] = r return r def compile(self, node): vendor_impl = getattr(node, 'as_' + self.connection.vendor, None) if vendor_impl: sql, params = vendor_impl(self, self.connection) else: sql, params = node.as_sql(self, self.connection) return sql, params def get_combinator_sql(self, combinator, all): features = self.connection.features compilers = [ query.get_compiler(self.using, self.connection) for query in self.query.combined_queries if not query.is_empty() ] if not features.supports_slicing_ordering_in_compound: for query, compiler in zip(self.query.combined_queries, compilers): if query.low_mark or query.high_mark: raise DatabaseError('LIMIT/OFFSET not allowed in subqueries of compound statements.') if compiler.get_order_by(): raise DatabaseError('ORDER BY not allowed in subqueries of compound statements.') parts = () for compiler in compilers: try: # If the columns list is limited, then all combined queries # must have the same columns list. Set the selects defined on # the query on all combined queries, if not already set. if not compiler.query.values_select and self.query.values_select: compiler.query = compiler.query.clone() compiler.query.set_values(( *self.query.extra_select, *self.query.values_select, *self.query.annotation_select, )) part_sql, part_args = compiler.as_sql() if compiler.query.combinator: # Wrap in a subquery if wrapping in parentheses isn't # supported. if not features.supports_parentheses_in_compound: part_sql = 'SELECT * FROM ({})'.format(part_sql) # Add parentheses when combining with compound query if not # already added for all compound queries. elif not features.supports_slicing_ordering_in_compound: part_sql = '({})'.format(part_sql) parts += ((part_sql, part_args),) except EmptyResultSet: # Omit the empty queryset with UNION and with DIFFERENCE if the # first queryset is nonempty. if combinator == 'union' or (combinator == 'difference' and parts): continue raise if not parts: raise EmptyResultSet combinator_sql = self.connection.ops.set_operators[combinator] if all and combinator == 'union': combinator_sql += ' ALL' braces = '({})' if features.supports_slicing_ordering_in_compound else '{}' sql_parts, args_parts = zip(*((braces.format(sql), args) for sql, args in parts)) result = [' {} '.format(combinator_sql).join(sql_parts)] params = [] for part in args_parts: params.extend(part) return result, params def as_sql(self, with_limits=True, with_col_aliases=False): """ Create the SQL for this query. Return the SQL string and list of parameters. If 'with_limits' is False, any limit/offset information is not included in the query. """ refcounts_before = self.query.alias_refcount.copy() try: extra_select, order_by, group_by = self.pre_sql_setup() for_update_part = None # Is a LIMIT/OFFSET clause needed? with_limit_offset = with_limits and (self.query.high_mark is not None or self.query.low_mark) combinator = self.query.combinator features = self.connection.features if combinator: if not getattr(features, 'supports_select_{}'.format(combinator)): raise NotSupportedError('{} is not supported on this database backend.'.format(combinator)) result, params = self.get_combinator_sql(combinator, self.query.combinator_all) else: distinct_fields, distinct_params = self.get_distinct() # This must come after 'select', 'ordering', and 'distinct' # (see docstring of get_from_clause() for details). from_, f_params = self.get_from_clause() where, w_params = self.compile(self.where) if self.where is not None else ("", []) having, h_params = self.compile(self.having) if self.having is not None else ("", []) result = ['SELECT'] params = [] if self.query.distinct: distinct_result, distinct_params = self.connection.ops.distinct_sql( distinct_fields, distinct_params, ) result += distinct_result params += distinct_params out_cols = [] col_idx = 1 for _, (s_sql, s_params), alias in self.select + extra_select: if alias: s_sql = '%s AS %s' % (s_sql, self.connection.ops.quote_name(alias)) elif with_col_aliases: s_sql = '%s AS %s' % (s_sql, 'Col%d' % col_idx) col_idx += 1 params.extend(s_params) out_cols.append(s_sql) result += [', '.join(out_cols), 'FROM', *from_] params.extend(f_params) if self.query.select_for_update and self.connection.features.has_select_for_update: if self.connection.get_autocommit(): raise TransactionManagementError('select_for_update cannot be used outside of a transaction.') if with_limit_offset and not self.connection.features.supports_select_for_update_with_limit: raise NotSupportedError( 'LIMIT/OFFSET is not supported with ' 'select_for_update on this database backend.' ) nowait = self.query.select_for_update_nowait skip_locked = self.query.select_for_update_skip_locked of = self.query.select_for_update_of no_key = self.query.select_for_no_key_update # If it's a NOWAIT/SKIP LOCKED/OF/NO KEY query but the # backend doesn't support it, raise NotSupportedError to # prevent a possible deadlock. if nowait and not self.connection.features.has_select_for_update_nowait: raise NotSupportedError('NOWAIT is not supported on this database backend.') elif skip_locked and not self.connection.features.has_select_for_update_skip_locked: raise NotSupportedError('SKIP LOCKED is not supported on this database backend.') elif of and not self.connection.features.has_select_for_update_of: raise NotSupportedError('FOR UPDATE OF is not supported on this database backend.') elif no_key and not self.connection.features.has_select_for_no_key_update: raise NotSupportedError( 'FOR NO KEY UPDATE is not supported on this ' 'database backend.' ) for_update_part = self.connection.ops.for_update_sql( nowait=nowait, skip_locked=skip_locked, of=self.get_select_for_update_of_arguments(), no_key=no_key, ) if for_update_part and self.connection.features.for_update_after_from: result.append(for_update_part) if where: result.append('WHERE %s' % where) params.extend(w_params) grouping = [] for g_sql, g_params in group_by: grouping.append(g_sql) params.extend(g_params) if grouping: if distinct_fields: raise NotImplementedError('annotate() + distinct(fields) is not implemented.') order_by = order_by or self.connection.ops.force_no_ordering() result.append('GROUP BY %s' % ', '.join(grouping)) if self._meta_ordering: order_by = None if having: result.append('HAVING %s' % having) params.extend(h_params) if self.query.explain_query: result.insert(0, self.connection.ops.explain_query_prefix( self.query.explain_format, **self.query.explain_options )) if order_by: ordering = [] for _, (o_sql, o_params, _) in order_by: ordering.append(o_sql) params.extend(o_params) result.append('ORDER BY %s' % ', '.join(ordering)) if with_limit_offset: result.append(self.connection.ops.limit_offset_sql(self.query.low_mark, self.query.high_mark)) if for_update_part and not self.connection.features.for_update_after_from: result.append(for_update_part) if self.query.subquery and extra_select: # If the query is used as a subquery, the extra selects would # result in more columns than the left-hand side expression is # expecting. This can happen when a subquery uses a combination # of order_by() and distinct(), forcing the ordering expressions # to be selected as well. Wrap the query in another subquery # to exclude extraneous selects. sub_selects = [] sub_params = [] for index, (select, _, alias) in enumerate(self.select, start=1): if not alias and with_col_aliases: alias = 'col%d' % index if alias: sub_selects.append("%s.%s" % ( self.connection.ops.quote_name('subquery'), self.connection.ops.quote_name(alias), )) else: select_clone = select.relabeled_clone({select.alias: 'subquery'}) subselect, subparams = select_clone.as_sql(self, self.connection) sub_selects.append(subselect) sub_params.extend(subparams) return 'SELECT %s FROM (%s) subquery' % ( ', '.join(sub_selects), ' '.join(result), ), tuple(sub_params + params) return ' '.join(result), tuple(params) finally: # Finally do cleanup - get rid of the joins we created above. self.query.reset_refcounts(refcounts_before) def get_default_columns(self, start_alias=None, opts=None, from_parent=None): """ Compute the default columns for selecting every field in the base model. Will sometimes be called to pull in related models (e.g. via select_related), in which case "opts" and "start_alias" will be given to provide a starting point for the traversal. Return a list of strings, quoted appropriately for use in SQL directly, as well as a set of aliases used in the select statement (if 'as_pairs' is True, return a list of (alias, col_name) pairs instead of strings as the first component and None as the second component). """ result = [] if opts is None: opts = self.query.get_meta() only_load = self.deferred_to_columns() start_alias = start_alias or self.query.get_initial_alias() # The 'seen_models' is used to optimize checking the needed parent # alias for a given field. This also includes None -> start_alias to # be used by local fields. seen_models = {None: start_alias} for field in opts.concrete_fields: model = field.model._meta.concrete_model # A proxy model will have a different model and concrete_model. We # will assign None if the field belongs to this model. if model == opts.model: model = None if from_parent and model is not None and issubclass( from_parent._meta.concrete_model, model._meta.concrete_model): # Avoid loading data for already loaded parents. # We end up here in the case select_related() resolution # proceeds from parent model to child model. In that case the # parent model data is already present in the SELECT clause, # and we want to avoid reloading the same data again. continue if field.model in only_load and field.attname not in only_load[field.model]: continue alias = self.query.join_parent_model(opts, model, start_alias, seen_models) column = field.get_col(alias) result.append(column) return result def get_distinct(self): """ Return a quoted list of fields to use in DISTINCT ON part of the query. This method can alter the tables in the query, and thus it must be called before get_from_clause(). """ result = [] params = [] opts = self.query.get_meta() for name in self.query.distinct_fields: parts = name.split(LOOKUP_SEP) _, targets, alias, joins, path, _, transform_function = self._setup_joins(parts, opts, None) targets, alias, _ = self.query.trim_joins(targets, joins, path) for target in targets: if name in self.query.annotation_select: result.append(name) else: r, p = self.compile(transform_function(target, alias)) result.append(r) params.append(p) return result, params def find_ordering_name(self, name, opts, alias=None, default_order='ASC', already_seen=None): """ Return the table alias (the name might be ambiguous, the alias will not be) and column name for ordering by the given 'name' parameter. The 'name' is of the form 'field1__field2__...__fieldN'. """ name, order = get_order_dir(name, default_order) descending = order == 'DESC' pieces = name.split(LOOKUP_SEP) field, targets, alias, joins, path, opts, transform_function = self._setup_joins(pieces, opts, alias) # If we get to this point and the field is a relation to another model, # append the default ordering for that model unless it is the pk # shortcut or the attribute name of the field that is specified. if ( field.is_relation and opts.ordering and getattr(field, 'attname', None) != pieces[-1] and name != 'pk' ): # Firstly, avoid infinite loops. already_seen = already_seen or set() join_tuple = tuple(getattr(self.query.alias_map[j], 'join_cols', None) for j in joins) if join_tuple in already_seen: raise FieldError('Infinite loop caused by ordering.') already_seen.add(join_tuple) results = [] for item in opts.ordering: if hasattr(item, 'resolve_expression') and not isinstance(item, OrderBy): item = item.desc() if descending else item.asc() if isinstance(item, OrderBy): results.append((item, False)) continue results.extend(self.find_ordering_name(item, opts, alias, order, already_seen)) return results targets, alias, _ = self.query.trim_joins(targets, joins, path) return [(OrderBy(transform_function(t, alias), descending=descending), False) for t in targets] def _setup_joins(self, pieces, opts, alias): """ Helper method for get_order_by() and get_distinct(). get_ordering() and get_distinct() must produce same target columns on same input, as the prefixes of get_ordering() and get_distinct() must match. Executing SQL where this is not true is an error. """ alias = alias or self.query.get_initial_alias() field, targets, opts, joins, path, transform_function = self.query.setup_joins(pieces, opts, alias) alias = joins[-1] return field, targets, alias, joins, path, opts, transform_function def get_from_clause(self): """ Return a list of strings that are joined together to go after the "FROM" part of the query, as well as a list any extra parameters that need to be included. Subclasses, can override this to create a from-clause via a "select". This should only be called after any SQL construction methods that might change the tables that are needed. This means the select columns, ordering, and distinct must be done first. """ result = [] params = [] for alias in tuple(self.query.alias_map): if not self.query.alias_refcount[alias]: continue try: from_clause = self.query.alias_map[alias] except KeyError: # Extra tables can end up in self.tables, but not in the # alias_map if they aren't in a join. That's OK. We skip them. continue clause_sql, clause_params = self.compile(from_clause) result.append(clause_sql) params.extend(clause_params) for t in self.query.extra_tables: alias, _ = self.query.table_alias(t) # Only add the alias if it's not already present (the table_alias() # call increments the refcount, so an alias refcount of one means # this is the only reference). if alias not in self.query.alias_map or self.query.alias_refcount[alias] == 1: result.append(', %s' % self.quote_name_unless_alias(alias)) return result, params def get_related_selections(self, select, opts=None, root_alias=None, cur_depth=1, requested=None, restricted=None): """ Fill in the information needed for a select_related query. The current depth is measured as the number of connections away from the root model (for example, cur_depth=1 means we are looking at models with direct connections to the root model). """ def _get_field_choices(): direct_choices = (f.name for f in opts.fields if f.is_relation) reverse_choices = ( f.field.related_query_name() for f in opts.related_objects if f.field.unique ) return chain(direct_choices, reverse_choices, self.query._filtered_relations) related_klass_infos = [] if not restricted and cur_depth > self.query.max_depth: # We've recursed far enough; bail out. return related_klass_infos if not opts: opts = self.query.get_meta() root_alias = self.query.get_initial_alias() only_load = self.query.get_loaded_field_names() # Setup for the case when only particular related fields should be # included in the related selection. fields_found = set() if requested is None: restricted = isinstance(self.query.select_related, dict) if restricted: requested = self.query.select_related def get_related_klass_infos(klass_info, related_klass_infos): klass_info['related_klass_infos'] = related_klass_infos for f in opts.fields: field_model = f.model._meta.concrete_model fields_found.add(f.name) if restricted: next = requested.get(f.name, {}) if not f.is_relation: # If a non-related field is used like a relation, # or if a single non-relational field is given. if next or f.name in requested: raise FieldError( "Non-relational field given in select_related: '%s'. " "Choices are: %s" % ( f.name, ", ".join(_get_field_choices()) or '(none)', ) ) else: next = False if not select_related_descend(f, restricted, requested, only_load.get(field_model)): continue klass_info = { 'model': f.remote_field.model, 'field': f, 'reverse': False, 'local_setter': f.set_cached_value, 'remote_setter': f.remote_field.set_cached_value if f.unique else lambda x, y: None, 'from_parent': False, } related_klass_infos.append(klass_info) select_fields = [] _, _, _, joins, _, _ = self.query.setup_joins( [f.name], opts, root_alias) alias = joins[-1] columns = self.get_default_columns(start_alias=alias, opts=f.remote_field.model._meta) for col in columns: select_fields.append(len(select)) select.append((col, None)) klass_info['select_fields'] = select_fields next_klass_infos = self.get_related_selections( select, f.remote_field.model._meta, alias, cur_depth + 1, next, restricted) get_related_klass_infos(klass_info, next_klass_infos) if restricted: related_fields = [ (o.field, o.related_model) for o in opts.related_objects if o.field.unique and not o.many_to_many ] for f, model in related_fields: if not select_related_descend(f, restricted, requested, only_load.get(model), reverse=True): continue related_field_name = f.related_query_name() fields_found.add(related_field_name) join_info = self.query.setup_joins([related_field_name], opts, root_alias) alias = join_info.joins[-1] from_parent = issubclass(model, opts.model) and model is not opts.model klass_info = { 'model': model, 'field': f, 'reverse': True, 'local_setter': f.remote_field.set_cached_value, 'remote_setter': f.set_cached_value, 'from_parent': from_parent, } related_klass_infos.append(klass_info) select_fields = [] columns = self.get_default_columns( start_alias=alias, opts=model._meta, from_parent=opts.model) for col in columns: select_fields.append(len(select)) select.append((col, None)) klass_info['select_fields'] = select_fields next = requested.get(f.related_query_name(), {}) next_klass_infos = self.get_related_selections( select, model._meta, alias, cur_depth + 1, next, restricted) get_related_klass_infos(klass_info, next_klass_infos) def local_setter(obj, from_obj): # Set a reverse fk object when relation is non-empty. if from_obj: f.remote_field.set_cached_value(from_obj, obj) def remote_setter(name, obj, from_obj): setattr(from_obj, name, obj) for name in list(requested): # Filtered relations work only on the topmost level. if cur_depth > 1: break if name in self.query._filtered_relations: fields_found.add(name) f, _, join_opts, joins, _, _ = self.query.setup_joins([name], opts, root_alias) model = join_opts.model alias = joins[-1] from_parent = issubclass(model, opts.model) and model is not opts.model klass_info = { 'model': model, 'field': f, 'reverse': True, 'local_setter': local_setter, 'remote_setter': partial(remote_setter, name), 'from_parent': from_parent, } related_klass_infos.append(klass_info) select_fields = [] columns = self.get_default_columns( start_alias=alias, opts=model._meta, from_parent=opts.model, ) for col in columns: select_fields.append(len(select)) select.append((col, None)) klass_info['select_fields'] = select_fields next_requested = requested.get(name, {}) next_klass_infos = self.get_related_selections( select, opts=model._meta, root_alias=alias, cur_depth=cur_depth + 1, requested=next_requested, restricted=restricted, ) get_related_klass_infos(klass_info, next_klass_infos) fields_not_found = set(requested).difference(fields_found) if fields_not_found: invalid_fields = ("'%s'" % s for s in fields_not_found) raise FieldError( 'Invalid field name(s) given in select_related: %s. ' 'Choices are: %s' % ( ', '.join(invalid_fields), ', '.join(_get_field_choices()) or '(none)', ) ) return related_klass_infos def get_select_for_update_of_arguments(self): """ Return a quoted list of arguments for the SELECT FOR UPDATE OF part of the query. """ def _get_parent_klass_info(klass_info): concrete_model = klass_info['model']._meta.concrete_model for parent_model, parent_link in concrete_model._meta.parents.items(): parent_list = parent_model._meta.get_parent_list() yield { 'model': parent_model, 'field': parent_link, 'reverse': False, 'select_fields': [ select_index for select_index in klass_info['select_fields'] # Selected columns from a model or its parents. if ( self.select[select_index][0].target.model == parent_model or self.select[select_index][0].target.model in parent_list ) ], } def _get_first_selected_col_from_model(klass_info): """ Find the first selected column from a model. If it doesn't exist, don't lock a model. select_fields is filled recursively, so it also contains fields from the parent models. """ concrete_model = klass_info['model']._meta.concrete_model for select_index in klass_info['select_fields']: if self.select[select_index][0].target.model == concrete_model: return self.select[select_index][0] def _get_field_choices(): """Yield all allowed field paths in breadth-first search order.""" queue = collections.deque([(None, self.klass_info)]) while queue: parent_path, klass_info = queue.popleft() if parent_path is None: path = [] yield 'self' else: field = klass_info['field'] if klass_info['reverse']: field = field.remote_field path = parent_path + [field.name] yield LOOKUP_SEP.join(path) queue.extend( (path, klass_info) for klass_info in _get_parent_klass_info(klass_info) ) queue.extend( (path, klass_info) for klass_info in klass_info.get('related_klass_infos', []) ) result = [] invalid_names = [] for name in self.query.select_for_update_of: klass_info = self.klass_info if name == 'self': col = _get_first_selected_col_from_model(klass_info) else: for part in name.split(LOOKUP_SEP): klass_infos = ( *klass_info.get('related_klass_infos', []), *_get_parent_klass_info(klass_info), ) for related_klass_info in klass_infos: field = related_klass_info['field'] if related_klass_info['reverse']: field = field.remote_field if field.name == part: klass_info = related_klass_info break else: klass_info = None break if klass_info is None: invalid_names.append(name) continue col = _get_first_selected_col_from_model(klass_info) if col is not None: if self.connection.features.select_for_update_of_column: result.append(self.compile(col)[0]) else: result.append(self.quote_name_unless_alias(col.alias)) if invalid_names: raise FieldError( 'Invalid field name(s) given in select_for_update(of=(...)): %s. ' 'Only relational fields followed in the query are allowed. ' 'Choices are: %s.' % ( ', '.join(invalid_names), ', '.join(_get_field_choices()), ) ) return result def deferred_to_columns(self): """ Convert the self.deferred_loading data structure to mapping of table names to sets of column names which are to be loaded. Return the dictionary. """ columns = {} self.query.deferred_to_data(columns, self.query.get_loaded_field_names_cb) return columns def get_converters(self, expressions): converters = {} for i, expression in enumerate(expressions): if expression: backend_converters = self.connection.ops.get_db_converters(expression) field_converters = expression.get_db_converters(self.connection) if backend_converters or field_converters: converters[i] = (backend_converters + field_converters, expression) return converters def apply_converters(self, rows, converters): connection = self.connection converters = list(converters.items()) for row in map(list, rows): for pos, (convs, expression) in converters: value = row[pos] for converter in convs: value = converter(value, expression, connection) row[pos] = value yield row def results_iter(self, results=None, tuple_expected=False, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE): """Return an iterator over the results from executing this query.""" if results is None: results = self.execute_sql(MULTI, chunked_fetch=chunked_fetch, chunk_size=chunk_size) fields = [s[0] for s in self.select[0:self.col_count]] converters = self.get_converters(fields) rows = chain.from_iterable(results) if converters: rows = self.apply_converters(rows, converters) if tuple_expected: rows = map(tuple, rows) return rows def has_results(self): """ Backends (e.g. NoSQL) can override this in order to use optimized versions of "query has any results." """ return bool(self.execute_sql(SINGLE)) def execute_sql(self, result_type=MULTI, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE): """ Run the query against the database and return the result(s). The return value is a single data item if result_type is SINGLE, or an iterator over the results if the result_type is MULTI. result_type is either MULTI (use fetchmany() to retrieve all rows), SINGLE (only retrieve a single row), or None. In this last case, the cursor is returned if any query is executed, since it's used by subclasses such as InsertQuery). It's possible, however, that no query is needed, as the filters describe an empty set. In that case, None is returned, to avoid any unnecessary database interaction. """ result_type = result_type or NO_RESULTS try: sql, params = self.as_sql() if not sql: raise EmptyResultSet except EmptyResultSet: if result_type == MULTI: return iter([]) else: return if chunked_fetch: cursor = self.connection.chunked_cursor() else: cursor = self.connection.cursor() try: cursor.execute(sql, params) except Exception: # Might fail for server-side cursors (e.g. connection closed) cursor.close() raise if result_type == CURSOR: # Give the caller the cursor to process and close. return cursor if result_type == SINGLE: try: val = cursor.fetchone() if val: return val[0:self.col_count] return val finally: # done with the cursor cursor.close() if result_type == NO_RESULTS: cursor.close() return result = cursor_iter( cursor, self.connection.features.empty_fetchmany_value, self.col_count if self.has_extra_select else None, chunk_size, ) if not chunked_fetch or not self.connection.features.can_use_chunked_reads: try: # If we are using non-chunked reads, we return the same data # structure as normally, but ensure it is all read into memory # before going any further. Use chunked_fetch if requested, # unless the database doesn't support it. return list(result) finally: # done with the cursor cursor.close() return result def as_subquery_condition(self, alias, columns, compiler): qn = compiler.quote_name_unless_alias qn2 = self.connection.ops.quote_name for index, select_col in enumerate(self.query.select): lhs_sql, lhs_params = self.compile(select_col) rhs = '%s.%s' % (qn(alias), qn2(columns[index])) self.query.where.add( RawSQL('%s = %s' % (lhs_sql, rhs), lhs_params), 'AND') sql, params = self.as_sql() return 'EXISTS (%s)' % sql, params def explain_query(self): result = list(self.execute_sql()) # Some backends return 1 item tuples with strings, and others return # tuples with integers and strings. Flatten them out into strings. for row in result[0]: if not isinstance(row, str): yield ' '.join(str(c) for c in row) else: yield row class SQLInsertCompiler(SQLCompiler): returning_fields = None returning_params = tuple() def field_as_sql(self, field, val): """ Take a field and a value intended to be saved on that field, and return placeholder SQL and accompanying params. Check for raw values, expressions, and fields with get_placeholder() defined in that order. When field is None, consider the value raw and use it as the placeholder, with no corresponding parameters returned. """ if field is None: # A field value of None means the value is raw. sql, params = val, [] elif hasattr(val, 'as_sql'): # This is an expression, let's compile it. sql, params = self.compile(val) elif hasattr(field, 'get_placeholder'): # Some fields (e.g. geo fields) need special munging before # they can be inserted. sql, params = field.get_placeholder(val, self, self.connection), [val] else: # Return the common case for the placeholder sql, params = '%s', [val] # The following hook is only used by Oracle Spatial, which sometimes # needs to yield 'NULL' and [] as its placeholder and params instead # of '%s' and [None]. The 'NULL' placeholder is produced earlier by # OracleOperations.get_geom_placeholder(). The following line removes # the corresponding None parameter. See ticket #10888. params = self.connection.ops.modify_insert_params(sql, params) return sql, params def prepare_value(self, field, value): """ Prepare a value to be used in a query by resolving it if it is an expression and otherwise calling the field's get_db_prep_save(). """ if hasattr(value, 'resolve_expression'): value = value.resolve_expression(self.query, allow_joins=False, for_save=True) # Don't allow values containing Col expressions. They refer to # existing columns on a row, but in the case of insert the row # doesn't exist yet. if value.contains_column_references: raise ValueError( 'Failed to insert expression "%s" on %s. F() expressions ' 'can only be used to update, not to insert.' % (value, field) ) if value.contains_aggregate: raise FieldError( 'Aggregate functions are not allowed in this query ' '(%s=%r).' % (field.name, value) ) if value.contains_over_clause: raise FieldError( 'Window expressions are not allowed in this query (%s=%r).' % (field.name, value) ) else: value = field.get_db_prep_save(value, connection=self.connection) return value def pre_save_val(self, field, obj): """ Get the given field's value off the given obj. pre_save() is used for things like auto_now on DateTimeField. Skip it if this is a raw query. """ if self.query.raw: return getattr(obj, field.attname) return field.pre_save(obj, add=True) def assemble_as_sql(self, fields, value_rows): """ Take a sequence of N fields and a sequence of M rows of values, and generate placeholder SQL and parameters for each field and value. Return a pair containing: * a sequence of M rows of N SQL placeholder strings, and * a sequence of M rows of corresponding parameter values. Each placeholder string may contain any number of '%s' interpolation strings, and each parameter row will contain exactly as many params as the total number of '%s's in the corresponding placeholder row. """ if not value_rows: return [], [] # list of (sql, [params]) tuples for each object to be saved # Shape: [n_objs][n_fields][2] rows_of_fields_as_sql = ( (self.field_as_sql(field, v) for field, v in zip(fields, row)) for row in value_rows ) # tuple like ([sqls], [[params]s]) for each object to be saved # Shape: [n_objs][2][n_fields] sql_and_param_pair_rows = (zip(*row) for row in rows_of_fields_as_sql) # Extract separate lists for placeholders and params. # Each of these has shape [n_objs][n_fields] placeholder_rows, param_rows = zip(*sql_and_param_pair_rows) # Params for each field are still lists, and need to be flattened. param_rows = [[p for ps in row for p in ps] for row in param_rows] return placeholder_rows, param_rows def as_sql(self): # We don't need quote_name_unless_alias() here, since these are all # going to be column names (so we can avoid the extra overhead). qn = self.connection.ops.quote_name opts = self.query.get_meta() insert_statement = self.connection.ops.insert_statement(ignore_conflicts=self.query.ignore_conflicts) result = ['%s %s' % (insert_statement, qn(opts.db_table))] fields = self.query.fields or [opts.pk] result.append('(%s)' % ', '.join(qn(f.column) for f in fields)) if self.query.fields: value_rows = [ [self.prepare_value(field, self.pre_save_val(field, obj)) for field in fields] for obj in self.query.objs ] else: # An empty object. value_rows = [[self.connection.ops.pk_default_value()] for _ in self.query.objs] fields = [None] # Currently the backends just accept values when generating bulk # queries and generate their own placeholders. Doing that isn't # necessary and it should be possible to use placeholders and # expressions in bulk inserts too. can_bulk = (not self.returning_fields and self.connection.features.has_bulk_insert) placeholder_rows, param_rows = self.assemble_as_sql(fields, value_rows) ignore_conflicts_suffix_sql = self.connection.ops.ignore_conflicts_suffix_sql( ignore_conflicts=self.query.ignore_conflicts ) if self.returning_fields and self.connection.features.can_return_columns_from_insert: if self.connection.features.can_return_rows_from_bulk_insert: result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows)) params = param_rows else: result.append("VALUES (%s)" % ", ".join(placeholder_rows[0])) params = [param_rows[0]] if ignore_conflicts_suffix_sql: result.append(ignore_conflicts_suffix_sql) # Skip empty r_sql to allow subclasses to customize behavior for # 3rd party backends. Refs #19096. r_sql, self.returning_params = self.connection.ops.return_insert_columns(self.returning_fields) if r_sql: result.append(r_sql) params += [self.returning_params] return [(" ".join(result), tuple(chain.from_iterable(params)))] if can_bulk: result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows)) if ignore_conflicts_suffix_sql: result.append(ignore_conflicts_suffix_sql) return [(" ".join(result), tuple(p for ps in param_rows for p in ps))] else: if ignore_conflicts_suffix_sql: result.append(ignore_conflicts_suffix_sql) return [ (" ".join(result + ["VALUES (%s)" % ", ".join(p)]), vals) for p, vals in zip(placeholder_rows, param_rows) ] def execute_sql(self, returning_fields=None): assert not ( returning_fields and len(self.query.objs) != 1 and not self.connection.features.can_return_rows_from_bulk_insert ) opts = self.query.get_meta() self.returning_fields = returning_fields with self.connection.cursor() as cursor: for sql, params in self.as_sql(): cursor.execute(sql, params) if not self.returning_fields: return [] if self.connection.features.can_return_rows_from_bulk_insert and len(self.query.objs) > 1: rows = self.connection.ops.fetch_returned_insert_rows(cursor) elif self.connection.features.can_return_columns_from_insert: assert len(self.query.objs) == 1 rows = [self.connection.ops.fetch_returned_insert_columns( cursor, self.returning_params, )] else: rows = [(self.connection.ops.last_insert_id( cursor, opts.db_table, opts.pk.column, ),)] cols = [field.get_col(opts.db_table) for field in self.returning_fields] converters = self.get_converters(cols) if converters: rows = list(self.apply_converters(rows, converters)) return rows class SQLDeleteCompiler(SQLCompiler): @cached_property def single_alias(self): # Ensure base table is in aliases. self.query.get_initial_alias() return sum(self.query.alias_refcount[t] > 0 for t in self.query.alias_map) == 1 @classmethod def _expr_refs_base_model(cls, expr, base_model): if isinstance(expr, Query): return expr.model == base_model if not hasattr(expr, 'get_source_expressions'): return False return any( cls._expr_refs_base_model(source_expr, base_model) for source_expr in expr.get_source_expressions() ) @cached_property def contains_self_reference_subquery(self): return any( self._expr_refs_base_model(expr, self.query.model) for expr in chain(self.query.annotations.values(), self.query.where.children) ) def _as_sql(self, query): result = [ 'DELETE FROM %s' % self.quote_name_unless_alias(query.base_table) ] where, params = self.compile(query.where) if where: result.append('WHERE %s' % where) return ' '.join(result), tuple(params) def as_sql(self): """ Create the SQL for this query. Return the SQL string and list of parameters. """ if self.single_alias and not self.contains_self_reference_subquery: return self._as_sql(self.query) innerq = self.query.clone() innerq.__class__ = Query innerq.clear_select_clause() pk = self.query.model._meta.pk innerq.select = [ pk.get_col(self.query.get_initial_alias()) ] outerq = Query(self.query.model) outerq.where = self.query.where_class() if not self.connection.features.update_can_self_select: # Force the materialization of the inner query to allow reference # to the target table on MySQL. sql, params = innerq.get_compiler(connection=self.connection).as_sql() innerq = RawSQL('SELECT * FROM (%s) subquery' % sql, params) outerq.add_q(Q(pk__in=innerq)) return self._as_sql(outerq) class SQLUpdateCompiler(SQLCompiler): def as_sql(self): """ Create the SQL for this query. Return the SQL string and list of parameters. """ self.pre_sql_setup() if not self.query.values: return '', () qn = self.quote_name_unless_alias values, update_params = [], [] for field, model, val in self.query.values: if hasattr(val, 'resolve_expression'): val = val.resolve_expression(self.query, allow_joins=False, for_save=True) if val.contains_aggregate: raise FieldError( 'Aggregate functions are not allowed in this query ' '(%s=%r).' % (field.name, val) ) if val.contains_over_clause: raise FieldError( 'Window expressions are not allowed in this query ' '(%s=%r).' % (field.name, val) ) elif hasattr(val, 'prepare_database_save'): if field.remote_field: val = field.get_db_prep_save( val.prepare_database_save(field), connection=self.connection, ) else: raise TypeError( "Tried to update field %s with a model instance, %r. " "Use a value compatible with %s." % (field, val, field.__class__.__name__) ) else: val = field.get_db_prep_save(val, connection=self.connection) # Getting the placeholder for the field. if hasattr(field, 'get_placeholder'): placeholder = field.get_placeholder(val, self, self.connection) else: placeholder = '%s' name = field.column if hasattr(val, 'as_sql'): sql, params = self.compile(val) values.append('%s = %s' % (qn(name), placeholder % sql)) update_params.extend(params) elif val is not None: values.append('%s = %s' % (qn(name), placeholder)) update_params.append(val) else: values.append('%s = NULL' % qn(name)) table = self.query.base_table result = [ 'UPDATE %s SET' % qn(table), ', '.join(values), ] where, params = self.compile(self.query.where) if where: result.append('WHERE %s' % where) return ' '.join(result), tuple(update_params + params) def execute_sql(self, result_type): """ Execute the specified update. Return the number of rows affected by the primary update query. The "primary update query" is the first non-empty query that is executed. Row counts for any subsequent, related queries are not available. """ cursor = super().execute_sql(result_type) try: rows = cursor.rowcount if cursor else 0 is_empty = cursor is None finally: if cursor: cursor.close() for query in self.query.get_related_updates(): aux_rows = query.get_compiler(self.using).execute_sql(result_type) if is_empty and aux_rows: rows = aux_rows is_empty = False return rows def pre_sql_setup(self): """ If the update depends on results from other tables, munge the "where" conditions to match the format required for (portable) SQL updates. If multiple updates are required, pull out the id values to update at this point so that they don't change as a result of the progressive updates. """ refcounts_before = self.query.alias_refcount.copy() # Ensure base table is in the query self.query.get_initial_alias() count = self.query.count_active_tables() if not self.query.related_updates and count == 1: return query = self.query.chain(klass=Query) query.select_related = False query.clear_ordering(True) query.extra = {} query.select = [] query.add_fields([query.get_meta().pk.name]) super().pre_sql_setup() must_pre_select = count > 1 and not self.connection.features.update_can_self_select # Now we adjust the current query: reset the where clause and get rid # of all the tables we don't need (since they're in the sub-select). self.query.where = self.query.where_class() if self.query.related_updates or must_pre_select: # Either we're using the idents in multiple update queries (so # don't want them to change), or the db backend doesn't support # selecting from the updating table (e.g. MySQL). idents = [] for rows in query.get_compiler(self.using).execute_sql(MULTI): idents.extend(r[0] for r in rows) self.query.add_filter(('pk__in', idents)) self.query.related_ids = idents else: # The fast path. Filters and updates in one query. self.query.add_filter(('pk__in', query)) self.query.reset_refcounts(refcounts_before) class SQLAggregateCompiler(SQLCompiler): def as_sql(self): """ Create the SQL for this query. Return the SQL string and list of parameters. """ sql, params = [], [] for annotation in self.query.annotation_select.values(): ann_sql, ann_params = self.compile(annotation) ann_sql, ann_params = annotation.select_format(self, ann_sql, ann_params) sql.append(ann_sql) params.extend(ann_params) self.col_count = len(self.query.annotation_select) sql = ', '.join(sql) params = tuple(params) inner_query_sql, inner_query_params = self.query.inner_query.get_compiler( self.using ).as_sql(with_col_aliases=True) sql = 'SELECT %s FROM (%s) subquery' % (sql, inner_query_sql) params = params + inner_query_params return sql, params def cursor_iter(cursor, sentinel, col_count, itersize): """ Yield blocks of rows from a cursor and ensure the cursor is closed when done. """ try: for rows in iter((lambda: cursor.fetchmany(itersize)), sentinel): yield rows if col_count is None else [r[:col_count] for r in rows] finally: cursor.close()
034753ac0bc085a1b741ba49581cd54ee402f7e7022a232d3b6d4c0de4d01db5
import logging from datetime import datetime from django.db.backends.ddl_references import ( Columns, Expressions, ForeignKeyName, IndexName, Statement, Table, ) from django.db.backends.utils import names_digest, split_identifier from django.db.models import Deferrable, Index from django.db.models.sql import Query from django.db.transaction import TransactionManagementError, atomic from django.utils import timezone logger = logging.getLogger('django.db.backends.schema') def _is_relevant_relation(relation, altered_field): """ When altering the given field, must constraints on its model from the given relation be temporarily dropped? """ field = relation.field if field.many_to_many: # M2M reverse field return False if altered_field.primary_key and field.to_fields == [None]: # Foreign key constraint on the primary key, which is being altered. return True # Is the constraint targeting the field being altered? return altered_field.name in field.to_fields def _all_related_fields(model): return model._meta._get_fields(forward=False, reverse=True, include_hidden=True) def _related_non_m2m_objects(old_field, new_field): # Filter out m2m objects from reverse relations. # Return (old_relation, new_relation) tuples. return zip( (obj for obj in _all_related_fields(old_field.model) if _is_relevant_relation(obj, old_field)), (obj for obj in _all_related_fields(new_field.model) if _is_relevant_relation(obj, new_field)), ) class BaseDatabaseSchemaEditor: """ This class and its subclasses are responsible for emitting schema-changing statements to the databases - model creation/removal/alteration, field renaming, index fiddling, and so on. """ # Overrideable SQL templates sql_create_table = "CREATE TABLE %(table)s (%(definition)s)" sql_rename_table = "ALTER TABLE %(old_table)s RENAME TO %(new_table)s" sql_retablespace_table = "ALTER TABLE %(table)s SET TABLESPACE %(new_tablespace)s" sql_delete_table = "DROP TABLE %(table)s CASCADE" sql_create_column = "ALTER TABLE %(table)s ADD COLUMN %(column)s %(definition)s" sql_alter_column = "ALTER TABLE %(table)s %(changes)s" sql_alter_column_type = "ALTER COLUMN %(column)s TYPE %(type)s" sql_alter_column_null = "ALTER COLUMN %(column)s DROP NOT NULL" sql_alter_column_not_null = "ALTER COLUMN %(column)s SET NOT NULL" sql_alter_column_default = "ALTER COLUMN %(column)s SET DEFAULT %(default)s" sql_alter_column_no_default = "ALTER COLUMN %(column)s DROP DEFAULT" sql_alter_column_no_default_null = sql_alter_column_no_default sql_alter_column_collate = "ALTER COLUMN %(column)s TYPE %(type)s%(collation)s" sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s CASCADE" sql_rename_column = "ALTER TABLE %(table)s RENAME COLUMN %(old_column)s TO %(new_column)s" sql_update_with_default = "UPDATE %(table)s SET %(column)s = %(default)s WHERE %(column)s IS NULL" sql_unique_constraint = "UNIQUE (%(columns)s)%(deferrable)s" sql_check_constraint = "CHECK (%(check)s)" sql_delete_constraint = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s" sql_constraint = "CONSTRAINT %(name)s %(constraint)s" sql_create_check = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s CHECK (%(check)s)" sql_delete_check = sql_delete_constraint sql_create_unique = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s UNIQUE (%(columns)s)%(deferrable)s" sql_delete_unique = sql_delete_constraint sql_create_fk = ( "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) " "REFERENCES %(to_table)s (%(to_column)s)%(deferrable)s" ) sql_create_inline_fk = None sql_create_column_inline_fk = None sql_delete_fk = sql_delete_constraint sql_create_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s)%(include)s%(extra)s%(condition)s" sql_create_unique_index = "CREATE UNIQUE INDEX %(name)s ON %(table)s (%(columns)s)%(include)s%(condition)s" sql_delete_index = "DROP INDEX %(name)s" sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)" sql_delete_pk = sql_delete_constraint sql_delete_procedure = 'DROP PROCEDURE %(procedure)s' def __init__(self, connection, collect_sql=False, atomic=True): self.connection = connection self.collect_sql = collect_sql if self.collect_sql: self.collected_sql = [] self.atomic_migration = self.connection.features.can_rollback_ddl and atomic # State-managing methods def __enter__(self): self.deferred_sql = [] if self.atomic_migration: self.atomic = atomic(self.connection.alias) self.atomic.__enter__() return self def __exit__(self, exc_type, exc_value, traceback): if exc_type is None: for sql in self.deferred_sql: self.execute(sql) if self.atomic_migration: self.atomic.__exit__(exc_type, exc_value, traceback) # Core utility functions def execute(self, sql, params=()): """Execute the given SQL statement, with optional parameters.""" # Don't perform the transactional DDL check if SQL is being collected # as it's not going to be executed anyway. if not self.collect_sql and self.connection.in_atomic_block and not self.connection.features.can_rollback_ddl: raise TransactionManagementError( "Executing DDL statements while in a transaction on databases " "that can't perform a rollback is prohibited." ) # Account for non-string statement objects. sql = str(sql) # Log the command we're running, then run it logger.debug("%s; (params %r)", sql, params, extra={'params': params, 'sql': sql}) if self.collect_sql: ending = "" if sql.rstrip().endswith(";") else ";" if params is not None: self.collected_sql.append((sql % tuple(map(self.quote_value, params))) + ending) else: self.collected_sql.append(sql + ending) else: with self.connection.cursor() as cursor: cursor.execute(sql, params) def quote_name(self, name): return self.connection.ops.quote_name(name) def table_sql(self, model): """Take a model and return its table definition.""" # Add any unique_togethers (always deferred, as some fields might be # created afterwards, like geometry fields with some backends). for fields in model._meta.unique_together: columns = [model._meta.get_field(field).column for field in fields] self.deferred_sql.append(self._create_unique_sql(model, columns)) # Create column SQL, add FK deferreds if needed. column_sqls = [] params = [] for field in model._meta.local_fields: # SQL. definition, extra_params = self.column_sql(model, field) if definition is None: continue # Check constraints can go on the column SQL here. db_params = field.db_parameters(connection=self.connection) if db_params['check']: definition += ' ' + self.sql_check_constraint % db_params # Autoincrement SQL (for backends with inline variant). col_type_suffix = field.db_type_suffix(connection=self.connection) if col_type_suffix: definition += ' %s' % col_type_suffix params.extend(extra_params) # FK. if field.remote_field and field.db_constraint: to_table = field.remote_field.model._meta.db_table to_column = field.remote_field.model._meta.get_field(field.remote_field.field_name).column if self.sql_create_inline_fk: definition += ' ' + self.sql_create_inline_fk % { 'to_table': self.quote_name(to_table), 'to_column': self.quote_name(to_column), } elif self.connection.features.supports_foreign_keys: self.deferred_sql.append(self._create_fk_sql(model, field, '_fk_%(to_table)s_%(to_column)s')) # Add the SQL to our big list. column_sqls.append('%s %s' % ( self.quote_name(field.column), definition, )) # Autoincrement SQL (for backends with post table definition # variant). if field.get_internal_type() in ('AutoField', 'BigAutoField', 'SmallAutoField'): autoinc_sql = self.connection.ops.autoinc_sql(model._meta.db_table, field.column) if autoinc_sql: self.deferred_sql.extend(autoinc_sql) constraints = [constraint.constraint_sql(model, self) for constraint in model._meta.constraints] sql = self.sql_create_table % { 'table': self.quote_name(model._meta.db_table), 'definition': ', '.join(constraint for constraint in (*column_sqls, *constraints) if constraint), } if model._meta.db_tablespace: tablespace_sql = self.connection.ops.tablespace_sql(model._meta.db_tablespace) if tablespace_sql: sql += ' ' + tablespace_sql return sql, params # Field <-> database mapping functions def column_sql(self, model, field, include_default=False): """ Take a field and return its column definition. The field must already have had set_attributes_from_name() called. """ # Get the column's type and use that as the basis of the SQL db_params = field.db_parameters(connection=self.connection) sql = db_params['type'] params = [] # Check for fields that aren't actually columns (e.g. M2M) if sql is None: return None, None # Collation. collation = getattr(field, 'db_collation', None) if collation: sql += self._collate_sql(collation) # Work out nullability null = field.null # If we were told to include a default value, do so include_default = include_default and not self.skip_default(field) if include_default: default_value = self.effective_default(field) column_default = ' DEFAULT ' + self._column_default_sql(field) if default_value is not None: if self.connection.features.requires_literal_defaults: # Some databases can't take defaults as a parameter (oracle) # If this is the case, the individual schema backend should # implement prepare_default sql += column_default % self.prepare_default(default_value) else: sql += column_default params += [default_value] # Oracle treats the empty string ('') as null, so coerce the null # option whenever '' is a possible value. if (field.empty_strings_allowed and not field.primary_key and self.connection.features.interprets_empty_strings_as_nulls): null = True if null and not self.connection.features.implied_column_null: sql += " NULL" elif not null: sql += " NOT NULL" # Primary key/unique outputs if field.primary_key: sql += " PRIMARY KEY" elif field.unique: sql += " UNIQUE" # Optionally add the tablespace if it's an implicitly indexed column tablespace = field.db_tablespace or model._meta.db_tablespace if tablespace and self.connection.features.supports_tablespaces and field.unique: sql += " %s" % self.connection.ops.tablespace_sql(tablespace, inline=True) # Return the sql return sql, params def skip_default(self, field): """ Some backends don't accept default values for certain columns types (i.e. MySQL longtext and longblob). """ return False def prepare_default(self, value): """ Only used for backends which have requires_literal_defaults feature """ raise NotImplementedError( 'subclasses of BaseDatabaseSchemaEditor for backends which have ' 'requires_literal_defaults must provide a prepare_default() method' ) def _column_default_sql(self, field): """ Return the SQL to use in a DEFAULT clause. The resulting string should contain a '%s' placeholder for a default value. """ return '%s' @staticmethod def _effective_default(field): # This method allows testing its logic without a connection. if field.has_default(): default = field.get_default() elif not field.null and field.blank and field.empty_strings_allowed: if field.get_internal_type() == "BinaryField": default = b'' else: default = '' elif getattr(field, 'auto_now', False) or getattr(field, 'auto_now_add', False): default = datetime.now() internal_type = field.get_internal_type() if internal_type == 'DateField': default = default.date() elif internal_type == 'TimeField': default = default.time() elif internal_type == 'DateTimeField': default = timezone.now() else: default = None return default def effective_default(self, field): """Return a field's effective database default value.""" return field.get_db_prep_save(self._effective_default(field), self.connection) def quote_value(self, value): """ Return a quoted version of the value so it's safe to use in an SQL string. This is not safe against injection from user code; it is intended only for use in making SQL scripts or preparing default values for particularly tricky backends (defaults are not user-defined, though, so this is safe). """ raise NotImplementedError() # Actions def create_model(self, model): """ Create a table and any accompanying indexes or unique constraints for the given `model`. """ sql, params = self.table_sql(model) # Prevent using [] as params, in the case a literal '%' is used in the definition self.execute(sql, params or None) # Add any field index and index_together's (deferred as SQLite _remake_table needs it) self.deferred_sql.extend(self._model_indexes_sql(model)) # Make M2M tables for field in model._meta.local_many_to_many: if field.remote_field.through._meta.auto_created: self.create_model(field.remote_field.through) def delete_model(self, model): """Delete a model from the database.""" # Handle auto-created intermediary models for field in model._meta.local_many_to_many: if field.remote_field.through._meta.auto_created: self.delete_model(field.remote_field.through) # Delete the table self.execute(self.sql_delete_table % { "table": self.quote_name(model._meta.db_table), }) # Remove all deferred statements referencing the deleted table. for sql in list(self.deferred_sql): if isinstance(sql, Statement) and sql.references_table(model._meta.db_table): self.deferred_sql.remove(sql) def add_index(self, model, index): """Add an index on a model.""" if ( index.contains_expressions and not self.connection.features.supports_expression_indexes ): return None # Index.create_sql returns interpolated SQL which makes params=None a # necessity to avoid escaping attempts on execution. self.execute(index.create_sql(model, self), params=None) def remove_index(self, model, index): """Remove an index from a model.""" if ( index.contains_expressions and not self.connection.features.supports_expression_indexes ): return None self.execute(index.remove_sql(model, self)) def add_constraint(self, model, constraint): """Add a constraint to a model.""" sql = constraint.create_sql(model, self) if sql: # Constraint.create_sql returns interpolated SQL which makes # params=None a necessity to avoid escaping attempts on execution. self.execute(sql, params=None) def remove_constraint(self, model, constraint): """Remove a constraint from a model.""" sql = constraint.remove_sql(model, self) if sql: self.execute(sql) def alter_unique_together(self, model, old_unique_together, new_unique_together): """ Deal with a model changing its unique_together. The input unique_togethers must be doubly-nested, not the single-nested ["foo", "bar"] format. """ olds = {tuple(fields) for fields in old_unique_together} news = {tuple(fields) for fields in new_unique_together} # Deleted uniques for fields in olds.difference(news): self._delete_composed_index(model, fields, {'unique': True}, self.sql_delete_unique) # Created uniques for fields in news.difference(olds): columns = [model._meta.get_field(field).column for field in fields] self.execute(self._create_unique_sql(model, columns)) def alter_index_together(self, model, old_index_together, new_index_together): """ Deal with a model changing its index_together. The input index_togethers must be doubly-nested, not the single-nested ["foo", "bar"] format. """ olds = {tuple(fields) for fields in old_index_together} news = {tuple(fields) for fields in new_index_together} # Deleted indexes for fields in olds.difference(news): self._delete_composed_index( model, fields, {'index': True, 'unique': False}, self.sql_delete_index, ) # Created indexes for field_names in news.difference(olds): fields = [model._meta.get_field(field) for field in field_names] self.execute(self._create_index_sql(model, fields=fields, suffix='_idx')) def _delete_composed_index(self, model, fields, constraint_kwargs, sql): meta_constraint_names = {constraint.name for constraint in model._meta.constraints} meta_index_names = {constraint.name for constraint in model._meta.indexes} columns = [model._meta.get_field(field).column for field in fields] constraint_names = self._constraint_names( model, columns, exclude=meta_constraint_names | meta_index_names, **constraint_kwargs ) if len(constraint_names) != 1: raise ValueError("Found wrong number (%s) of constraints for %s(%s)" % ( len(constraint_names), model._meta.db_table, ", ".join(columns), )) self.execute(self._delete_constraint_sql(sql, model, constraint_names[0])) def alter_db_table(self, model, old_db_table, new_db_table): """Rename the table a model points to.""" if (old_db_table == new_db_table or (self.connection.features.ignores_table_name_case and old_db_table.lower() == new_db_table.lower())): return self.execute(self.sql_rename_table % { "old_table": self.quote_name(old_db_table), "new_table": self.quote_name(new_db_table), }) # Rename all references to the old table name. for sql in self.deferred_sql: if isinstance(sql, Statement): sql.rename_table_references(old_db_table, new_db_table) def alter_db_tablespace(self, model, old_db_tablespace, new_db_tablespace): """Move a model's table between tablespaces.""" self.execute(self.sql_retablespace_table % { "table": self.quote_name(model._meta.db_table), "old_tablespace": self.quote_name(old_db_tablespace), "new_tablespace": self.quote_name(new_db_tablespace), }) def add_field(self, model, field): """ Create a field on a model. Usually involves adding a column, but may involve adding a table instead (for M2M fields). """ # Special-case implicit M2M tables if field.many_to_many and field.remote_field.through._meta.auto_created: return self.create_model(field.remote_field.through) # Get the column's definition definition, params = self.column_sql(model, field, include_default=True) # It might not actually have a column behind it if definition is None: return # Check constraints can go on the column SQL here db_params = field.db_parameters(connection=self.connection) if db_params['check']: definition += " " + self.sql_check_constraint % db_params if field.remote_field and self.connection.features.supports_foreign_keys and field.db_constraint: constraint_suffix = '_fk_%(to_table)s_%(to_column)s' # Add FK constraint inline, if supported. if self.sql_create_column_inline_fk: to_table = field.remote_field.model._meta.db_table to_column = field.remote_field.model._meta.get_field(field.remote_field.field_name).column namespace, _ = split_identifier(model._meta.db_table) definition += " " + self.sql_create_column_inline_fk % { 'name': self._fk_constraint_name(model, field, constraint_suffix), 'namespace': '%s.' % self.quote_name(namespace) if namespace else '', 'column': self.quote_name(field.column), 'to_table': self.quote_name(to_table), 'to_column': self.quote_name(to_column), 'deferrable': self.connection.ops.deferrable_sql() } # Otherwise, add FK constraints later. else: self.deferred_sql.append(self._create_fk_sql(model, field, constraint_suffix)) # Build the SQL and run it sql = self.sql_create_column % { "table": self.quote_name(model._meta.db_table), "column": self.quote_name(field.column), "definition": definition, } self.execute(sql, params) # Drop the default if we need to # (Django usually does not use in-database defaults) if not self.skip_default(field) and self.effective_default(field) is not None: changes_sql, params = self._alter_column_default_sql(model, None, field, drop=True) sql = self.sql_alter_column % { "table": self.quote_name(model._meta.db_table), "changes": changes_sql, } self.execute(sql, params) # Add an index, if required self.deferred_sql.extend(self._field_indexes_sql(model, field)) # Reset connection if required if self.connection.features.connection_persists_old_columns: self.connection.close() def remove_field(self, model, field): """ Remove a field from a model. Usually involves deleting a column, but for M2Ms may involve deleting a table. """ # Special-case implicit M2M tables if field.many_to_many and field.remote_field.through._meta.auto_created: return self.delete_model(field.remote_field.through) # It might not actually have a column behind it if field.db_parameters(connection=self.connection)['type'] is None: return # Drop any FK constraints, MySQL requires explicit deletion if field.remote_field: fk_names = self._constraint_names(model, [field.column], foreign_key=True) for fk_name in fk_names: self.execute(self._delete_fk_sql(model, fk_name)) # Delete the column sql = self.sql_delete_column % { "table": self.quote_name(model._meta.db_table), "column": self.quote_name(field.column), } self.execute(sql) # Reset connection if required if self.connection.features.connection_persists_old_columns: self.connection.close() # Remove all deferred statements referencing the deleted column. for sql in list(self.deferred_sql): if isinstance(sql, Statement) and sql.references_column(model._meta.db_table, field.column): self.deferred_sql.remove(sql) def alter_field(self, model, old_field, new_field, strict=False): """ Allow a field's type, uniqueness, nullability, default, column, constraints, etc. to be modified. `old_field` is required to compute the necessary changes. If `strict` is True, raise errors if the old column does not match `old_field` precisely. """ if not self._field_should_be_altered(old_field, new_field): return # Ensure this field is even column-based old_db_params = old_field.db_parameters(connection=self.connection) old_type = old_db_params['type'] new_db_params = new_field.db_parameters(connection=self.connection) new_type = new_db_params['type'] if ((old_type is None and old_field.remote_field is None) or (new_type is None and new_field.remote_field is None)): raise ValueError( "Cannot alter field %s into %s - they do not properly define " "db_type (are you using a badly-written custom field?)" % (old_field, new_field), ) elif old_type is None and new_type is None and ( old_field.remote_field.through and new_field.remote_field.through and old_field.remote_field.through._meta.auto_created and new_field.remote_field.through._meta.auto_created): return self._alter_many_to_many(model, old_field, new_field, strict) elif old_type is None and new_type is None and ( old_field.remote_field.through and new_field.remote_field.through and not old_field.remote_field.through._meta.auto_created and not new_field.remote_field.through._meta.auto_created): # Both sides have through models; this is a no-op. return elif old_type is None or new_type is None: raise ValueError( "Cannot alter field %s into %s - they are not compatible types " "(you cannot alter to or from M2M fields, or add or remove " "through= on M2M fields)" % (old_field, new_field) ) self._alter_field(model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict) def _alter_field(self, model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict=False): """Perform a "physical" (non-ManyToMany) field update.""" # Drop any FK constraints, we'll remake them later fks_dropped = set() if ( self.connection.features.supports_foreign_keys and old_field.remote_field and old_field.db_constraint ): fk_names = self._constraint_names(model, [old_field.column], foreign_key=True) if strict and len(fk_names) != 1: raise ValueError("Found wrong number (%s) of foreign key constraints for %s.%s" % ( len(fk_names), model._meta.db_table, old_field.column, )) for fk_name in fk_names: fks_dropped.add((old_field.column,)) self.execute(self._delete_fk_sql(model, fk_name)) # Has unique been removed? if old_field.unique and (not new_field.unique or self._field_became_primary_key(old_field, new_field)): # Find the unique constraint for this field meta_constraint_names = {constraint.name for constraint in model._meta.constraints} constraint_names = self._constraint_names( model, [old_field.column], unique=True, primary_key=False, exclude=meta_constraint_names, ) if strict and len(constraint_names) != 1: raise ValueError("Found wrong number (%s) of unique constraints for %s.%s" % ( len(constraint_names), model._meta.db_table, old_field.column, )) for constraint_name in constraint_names: self.execute(self._delete_unique_sql(model, constraint_name)) # Drop incoming FK constraints if the field is a primary key or unique, # which might be a to_field target, and things are going to change. drop_foreign_keys = ( self.connection.features.supports_foreign_keys and ( (old_field.primary_key and new_field.primary_key) or (old_field.unique and new_field.unique) ) and old_type != new_type ) if drop_foreign_keys: # '_meta.related_field' also contains M2M reverse fields, these # will be filtered out for _old_rel, new_rel in _related_non_m2m_objects(old_field, new_field): rel_fk_names = self._constraint_names( new_rel.related_model, [new_rel.field.column], foreign_key=True ) for fk_name in rel_fk_names: self.execute(self._delete_fk_sql(new_rel.related_model, fk_name)) # Removed an index? (no strict check, as multiple indexes are possible) # Remove indexes if db_index switched to False or a unique constraint # will now be used in lieu of an index. The following lines from the # truth table show all True cases; the rest are False: # # old_field.db_index | old_field.unique | new_field.db_index | new_field.unique # ------------------------------------------------------------------------------ # True | False | False | False # True | False | False | True # True | False | True | True if old_field.db_index and not old_field.unique and (not new_field.db_index or new_field.unique): # Find the index for this field meta_index_names = {index.name for index in model._meta.indexes} # Retrieve only BTREE indexes since this is what's created with # db_index=True. index_names = self._constraint_names( model, [old_field.column], index=True, type_=Index.suffix, exclude=meta_index_names, ) for index_name in index_names: # The only way to check if an index was created with # db_index=True or with Index(['field'], name='foo') # is to look at its name (refs #28053). self.execute(self._delete_index_sql(model, index_name)) # Change check constraints? if old_db_params['check'] != new_db_params['check'] and old_db_params['check']: meta_constraint_names = {constraint.name for constraint in model._meta.constraints} constraint_names = self._constraint_names( model, [old_field.column], check=True, exclude=meta_constraint_names, ) if strict and len(constraint_names) != 1: raise ValueError("Found wrong number (%s) of check constraints for %s.%s" % ( len(constraint_names), model._meta.db_table, old_field.column, )) for constraint_name in constraint_names: self.execute(self._delete_check_sql(model, constraint_name)) # Have they renamed the column? if old_field.column != new_field.column: self.execute(self._rename_field_sql(model._meta.db_table, old_field, new_field, new_type)) # Rename all references to the renamed column. for sql in self.deferred_sql: if isinstance(sql, Statement): sql.rename_column_references(model._meta.db_table, old_field.column, new_field.column) # Next, start accumulating actions to do actions = [] null_actions = [] post_actions = [] # Collation change? old_collation = getattr(old_field, 'db_collation', None) new_collation = getattr(new_field, 'db_collation', None) if old_collation != new_collation: # Collation change handles also a type change. fragment = self._alter_column_collation_sql(model, new_field, new_type, new_collation) actions.append(fragment) # Type change? elif old_type != new_type: fragment, other_actions = self._alter_column_type_sql(model, old_field, new_field, new_type) actions.append(fragment) post_actions.extend(other_actions) # When changing a column NULL constraint to NOT NULL with a given # default value, we need to perform 4 steps: # 1. Add a default for new incoming writes # 2. Update existing NULL rows with new default # 3. Replace NULL constraint with NOT NULL # 4. Drop the default again. # Default change? needs_database_default = False if old_field.null and not new_field.null: old_default = self.effective_default(old_field) new_default = self.effective_default(new_field) if ( not self.skip_default(new_field) and old_default != new_default and new_default is not None ): needs_database_default = True actions.append(self._alter_column_default_sql(model, old_field, new_field)) # Nullability change? if old_field.null != new_field.null: fragment = self._alter_column_null_sql(model, old_field, new_field) if fragment: null_actions.append(fragment) # Only if we have a default and there is a change from NULL to NOT NULL four_way_default_alteration = ( new_field.has_default() and (old_field.null and not new_field.null) ) if actions or null_actions: if not four_way_default_alteration: # If we don't have to do a 4-way default alteration we can # directly run a (NOT) NULL alteration actions = actions + null_actions # Combine actions together if we can (e.g. postgres) if self.connection.features.supports_combined_alters and actions: sql, params = tuple(zip(*actions)) actions = [(", ".join(sql), sum(params, []))] # Apply those actions for sql, params in actions: self.execute( self.sql_alter_column % { "table": self.quote_name(model._meta.db_table), "changes": sql, }, params, ) if four_way_default_alteration: # Update existing rows with default value self.execute( self.sql_update_with_default % { "table": self.quote_name(model._meta.db_table), "column": self.quote_name(new_field.column), "default": "%s", }, [new_default], ) # Since we didn't run a NOT NULL change before we need to do it # now for sql, params in null_actions: self.execute( self.sql_alter_column % { "table": self.quote_name(model._meta.db_table), "changes": sql, }, params, ) if post_actions: for sql, params in post_actions: self.execute(sql, params) # If primary_key changed to False, delete the primary key constraint. if old_field.primary_key and not new_field.primary_key: self._delete_primary_key(model, strict) # Added a unique? if self._unique_should_be_added(old_field, new_field): self.execute(self._create_unique_sql(model, [new_field.column])) # Added an index? Add an index if db_index switched to True or a unique # constraint will no longer be used in lieu of an index. The following # lines from the truth table show all True cases; the rest are False: # # old_field.db_index | old_field.unique | new_field.db_index | new_field.unique # ------------------------------------------------------------------------------ # False | False | True | False # False | True | True | False # True | True | True | False if (not old_field.db_index or old_field.unique) and new_field.db_index and not new_field.unique: self.execute(self._create_index_sql(model, fields=[new_field])) # Type alteration on primary key? Then we need to alter the column # referring to us. rels_to_update = [] if drop_foreign_keys: rels_to_update.extend(_related_non_m2m_objects(old_field, new_field)) # Changed to become primary key? if self._field_became_primary_key(old_field, new_field): # Make the new one self.execute(self._create_primary_key_sql(model, new_field)) # Update all referencing columns rels_to_update.extend(_related_non_m2m_objects(old_field, new_field)) # Handle our type alters on the other end of rels from the PK stuff above for old_rel, new_rel in rels_to_update: rel_db_params = new_rel.field.db_parameters(connection=self.connection) rel_type = rel_db_params['type'] fragment, other_actions = self._alter_column_type_sql( new_rel.related_model, old_rel.field, new_rel.field, rel_type ) self.execute( self.sql_alter_column % { "table": self.quote_name(new_rel.related_model._meta.db_table), "changes": fragment[0], }, fragment[1], ) for sql, params in other_actions: self.execute(sql, params) # Does it have a foreign key? if (self.connection.features.supports_foreign_keys and new_field.remote_field and (fks_dropped or not old_field.remote_field or not old_field.db_constraint) and new_field.db_constraint): self.execute(self._create_fk_sql(model, new_field, "_fk_%(to_table)s_%(to_column)s")) # Rebuild FKs that pointed to us if we previously had to drop them if drop_foreign_keys: for rel in new_field.model._meta.related_objects: if _is_relevant_relation(rel, new_field) and rel.field.db_constraint: self.execute(self._create_fk_sql(rel.related_model, rel.field, "_fk")) # Does it have check constraints we need to add? if old_db_params['check'] != new_db_params['check'] and new_db_params['check']: constraint_name = self._create_index_name(model._meta.db_table, [new_field.column], suffix='_check') self.execute(self._create_check_sql(model, constraint_name, new_db_params['check'])) # Drop the default if we need to # (Django usually does not use in-database defaults) if needs_database_default: changes_sql, params = self._alter_column_default_sql(model, old_field, new_field, drop=True) sql = self.sql_alter_column % { "table": self.quote_name(model._meta.db_table), "changes": changes_sql, } self.execute(sql, params) # Reset connection if required if self.connection.features.connection_persists_old_columns: self.connection.close() def _alter_column_null_sql(self, model, old_field, new_field): """ Hook to specialize column null alteration. Return a (sql, params) fragment to set a column to null or non-null as required by new_field, or None if no changes are required. """ if (self.connection.features.interprets_empty_strings_as_nulls and new_field.get_internal_type() in ("CharField", "TextField")): # The field is nullable in the database anyway, leave it alone. return else: new_db_params = new_field.db_parameters(connection=self.connection) sql = self.sql_alter_column_null if new_field.null else self.sql_alter_column_not_null return ( sql % { 'column': self.quote_name(new_field.column), 'type': new_db_params['type'], }, [], ) def _alter_column_default_sql(self, model, old_field, new_field, drop=False): """ Hook to specialize column default alteration. Return a (sql, params) fragment to add or drop (depending on the drop argument) a default to new_field's column. """ new_default = self.effective_default(new_field) default = self._column_default_sql(new_field) params = [new_default] if drop: params = [] elif self.connection.features.requires_literal_defaults: # Some databases (Oracle) can't take defaults as a parameter # If this is the case, the SchemaEditor for that database should # implement prepare_default(). default = self.prepare_default(new_default) params = [] new_db_params = new_field.db_parameters(connection=self.connection) if drop: if new_field.null: sql = self.sql_alter_column_no_default_null else: sql = self.sql_alter_column_no_default else: sql = self.sql_alter_column_default return ( sql % { 'column': self.quote_name(new_field.column), 'type': new_db_params['type'], 'default': default, }, params, ) def _alter_column_type_sql(self, model, old_field, new_field, new_type): """ Hook to specialize column type alteration for different backends, for cases when a creation type is different to an alteration type (e.g. SERIAL in PostgreSQL, PostGIS fields). Return a two-tuple of: an SQL fragment of (sql, params) to insert into an ALTER TABLE statement and a list of extra (sql, params) tuples to run once the field is altered. """ return ( ( self.sql_alter_column_type % { "column": self.quote_name(new_field.column), "type": new_type, }, [], ), [], ) def _alter_column_collation_sql(self, model, new_field, new_type, new_collation): return ( self.sql_alter_column_collate % { 'column': self.quote_name(new_field.column), 'type': new_type, 'collation': self._collate_sql(new_collation) if new_collation else '', }, [], ) def _alter_many_to_many(self, model, old_field, new_field, strict): """Alter M2Ms to repoint their to= endpoints.""" # Rename the through table if old_field.remote_field.through._meta.db_table != new_field.remote_field.through._meta.db_table: self.alter_db_table(old_field.remote_field.through, old_field.remote_field.through._meta.db_table, new_field.remote_field.through._meta.db_table) # Repoint the FK to the other side self.alter_field( new_field.remote_field.through, # We need the field that points to the target model, so we can tell alter_field to change it - # this is m2m_reverse_field_name() (as opposed to m2m_field_name, which points to our model) old_field.remote_field.through._meta.get_field(old_field.m2m_reverse_field_name()), new_field.remote_field.through._meta.get_field(new_field.m2m_reverse_field_name()), ) self.alter_field( new_field.remote_field.through, # for self-referential models we need to alter field from the other end too old_field.remote_field.through._meta.get_field(old_field.m2m_field_name()), new_field.remote_field.through._meta.get_field(new_field.m2m_field_name()), ) def _create_index_name(self, table_name, column_names, suffix=""): """ Generate a unique name for an index/unique constraint. The name is divided into 3 parts: the table name, the column names, and a unique digest and suffix. """ _, table_name = split_identifier(table_name) hash_suffix_part = '%s%s' % (names_digest(table_name, *column_names, length=8), suffix) max_length = self.connection.ops.max_name_length() or 200 # If everything fits into max_length, use that name. index_name = '%s_%s_%s' % (table_name, '_'.join(column_names), hash_suffix_part) if len(index_name) <= max_length: return index_name # Shorten a long suffix. if len(hash_suffix_part) > max_length / 3: hash_suffix_part = hash_suffix_part[:max_length // 3] other_length = (max_length - len(hash_suffix_part)) // 2 - 1 index_name = '%s_%s_%s' % ( table_name[:other_length], '_'.join(column_names)[:other_length], hash_suffix_part, ) # Prepend D if needed to prevent the name from starting with an # underscore or a number (not permitted on Oracle). if index_name[0] == "_" or index_name[0].isdigit(): index_name = "D%s" % index_name[:-1] return index_name def _get_index_tablespace_sql(self, model, fields, db_tablespace=None): if db_tablespace is None: if len(fields) == 1 and fields[0].db_tablespace: db_tablespace = fields[0].db_tablespace elif model._meta.db_tablespace: db_tablespace = model._meta.db_tablespace if db_tablespace is not None: return ' ' + self.connection.ops.tablespace_sql(db_tablespace) return '' def _index_condition_sql(self, condition): if condition: return ' WHERE ' + condition return '' def _index_include_sql(self, model, columns): if not columns or not self.connection.features.supports_covering_indexes: return '' return Statement( ' INCLUDE (%(columns)s)', columns=Columns(model._meta.db_table, columns, self.quote_name), ) def _create_index_sql(self, model, *, fields=None, name=None, suffix='', using='', db_tablespace=None, col_suffixes=(), sql=None, opclasses=(), condition=None, include=None, expressions=None): """ Return the SQL statement to create the index for one or several fields or expressions. `sql` can be specified if the syntax differs from the standard (GIS indexes, ...). """ fields = fields or [] expressions = expressions or [] compiler = Query(model, alias_cols=False).get_compiler( connection=self.connection, ) tablespace_sql = self._get_index_tablespace_sql(model, fields, db_tablespace=db_tablespace) columns = [field.column for field in fields] sql_create_index = sql or self.sql_create_index table = model._meta.db_table def create_index_name(*args, **kwargs): nonlocal name if name is None: name = self._create_index_name(*args, **kwargs) return self.quote_name(name) return Statement( sql_create_index, table=Table(table, self.quote_name), name=IndexName(table, columns, suffix, create_index_name), using=using, columns=( self._index_columns(table, columns, col_suffixes, opclasses) if columns else Expressions(table, expressions, compiler, self.quote_value) ), extra=tablespace_sql, condition=self._index_condition_sql(condition), include=self._index_include_sql(model, include), ) def _delete_index_sql(self, model, name, sql=None): return Statement( sql or self.sql_delete_index, table=Table(model._meta.db_table, self.quote_name), name=self.quote_name(name), ) def _index_columns(self, table, columns, col_suffixes, opclasses): return Columns(table, columns, self.quote_name, col_suffixes=col_suffixes) def _model_indexes_sql(self, model): """ Return a list of all index SQL statements (field indexes, index_together, Meta.indexes) for the specified model. """ if not model._meta.managed or model._meta.proxy or model._meta.swapped: return [] output = [] for field in model._meta.local_fields: output.extend(self._field_indexes_sql(model, field)) for field_names in model._meta.index_together: fields = [model._meta.get_field(field) for field in field_names] output.append(self._create_index_sql(model, fields=fields, suffix='_idx')) for index in model._meta.indexes: if ( not index.contains_expressions or self.connection.features.supports_expression_indexes ): output.append(index.create_sql(model, self)) return output def _field_indexes_sql(self, model, field): """ Return a list of all index SQL statements for the specified field. """ output = [] if self._field_should_be_indexed(model, field): output.append(self._create_index_sql(model, fields=[field])) return output def _field_should_be_altered(self, old_field, new_field): _, old_path, old_args, old_kwargs = old_field.deconstruct() _, new_path, new_args, new_kwargs = new_field.deconstruct() # Don't alter when: # - changing only a field name # - changing an attribute that doesn't affect the schema # - adding only a db_column and the column name is not changed non_database_attrs = [ 'blank', 'db_column', 'editable', 'error_messages', 'help_text', 'limit_choices_to', # Database-level options are not supported, see #21961. 'on_delete', 'related_name', 'related_query_name', 'validators', 'verbose_name', ] for attr in non_database_attrs: old_kwargs.pop(attr, None) new_kwargs.pop(attr, None) return ( self.quote_name(old_field.column) != self.quote_name(new_field.column) or (old_path, old_args, old_kwargs) != (new_path, new_args, new_kwargs) ) def _field_should_be_indexed(self, model, field): return field.db_index and not field.unique def _field_became_primary_key(self, old_field, new_field): return not old_field.primary_key and new_field.primary_key def _unique_should_be_added(self, old_field, new_field): return (not old_field.unique and new_field.unique) or ( old_field.primary_key and not new_field.primary_key and new_field.unique ) def _rename_field_sql(self, table, old_field, new_field, new_type): return self.sql_rename_column % { "table": self.quote_name(table), "old_column": self.quote_name(old_field.column), "new_column": self.quote_name(new_field.column), "type": new_type, } def _create_fk_sql(self, model, field, suffix): table = Table(model._meta.db_table, self.quote_name) name = self._fk_constraint_name(model, field, suffix) column = Columns(model._meta.db_table, [field.column], self.quote_name) to_table = Table(field.target_field.model._meta.db_table, self.quote_name) to_column = Columns(field.target_field.model._meta.db_table, [field.target_field.column], self.quote_name) deferrable = self.connection.ops.deferrable_sql() return Statement( self.sql_create_fk, table=table, name=name, column=column, to_table=to_table, to_column=to_column, deferrable=deferrable, ) def _fk_constraint_name(self, model, field, suffix): def create_fk_name(*args, **kwargs): return self.quote_name(self._create_index_name(*args, **kwargs)) return ForeignKeyName( model._meta.db_table, [field.column], split_identifier(field.target_field.model._meta.db_table)[1], [field.target_field.column], suffix, create_fk_name, ) def _delete_fk_sql(self, model, name): return self._delete_constraint_sql(self.sql_delete_fk, model, name) def _deferrable_constraint_sql(self, deferrable): if deferrable is None: return '' if deferrable == Deferrable.DEFERRED: return ' DEFERRABLE INITIALLY DEFERRED' if deferrable == Deferrable.IMMEDIATE: return ' DEFERRABLE INITIALLY IMMEDIATE' def _unique_sql( self, model, fields, name, condition=None, deferrable=None, include=None, opclasses=None, expressions=None, ): if ( deferrable and not self.connection.features.supports_deferrable_unique_constraints ): return None if condition or include or opclasses or expressions: # Databases support conditional, covering, and functional unique # constraints via a unique index. sql = self._create_unique_sql( model, fields, name=name, condition=condition, include=include, opclasses=opclasses, expressions=expressions, ) if sql: self.deferred_sql.append(sql) return None constraint = self.sql_unique_constraint % { 'columns': ', '.join(map(self.quote_name, fields)), 'deferrable': self._deferrable_constraint_sql(deferrable), } return self.sql_constraint % { 'name': self.quote_name(name), 'constraint': constraint, } def _create_unique_sql( self, model, columns, name=None, condition=None, deferrable=None, include=None, opclasses=None, expressions=None, ): if ( ( deferrable and not self.connection.features.supports_deferrable_unique_constraints ) or (condition and not self.connection.features.supports_partial_indexes) or (include and not self.connection.features.supports_covering_indexes) or (expressions and not self.connection.features.supports_expression_indexes) ): return None def create_unique_name(*args, **kwargs): return self.quote_name(self._create_index_name(*args, **kwargs)) compiler = Query(model, alias_cols=False).get_compiler(connection=self.connection) table = Table(model._meta.db_table, self.quote_name) if name is None: name = IndexName(model._meta.db_table, columns, '_uniq', create_unique_name) else: name = self.quote_name(name) if condition or include or opclasses or expressions: sql = self.sql_create_unique_index else: sql = self.sql_create_unique if columns: columns = self._index_columns(table, columns, col_suffixes=(), opclasses=opclasses) else: columns = Expressions(model._meta.db_table, expressions, compiler, self.quote_value) return Statement( sql, table=table, name=name, columns=columns, condition=self._index_condition_sql(condition), deferrable=self._deferrable_constraint_sql(deferrable), include=self._index_include_sql(model, include), ) def _delete_unique_sql( self, model, name, condition=None, deferrable=None, include=None, opclasses=None, expressions=None, ): if ( ( deferrable and not self.connection.features.supports_deferrable_unique_constraints ) or (condition and not self.connection.features.supports_partial_indexes) or (include and not self.connection.features.supports_covering_indexes) or (expressions and not self.connection.features.supports_expression_indexes) ): return None if condition or include or opclasses or expressions: sql = self.sql_delete_index else: sql = self.sql_delete_unique return self._delete_constraint_sql(sql, model, name) def _check_sql(self, name, check): return self.sql_constraint % { 'name': self.quote_name(name), 'constraint': self.sql_check_constraint % {'check': check}, } def _create_check_sql(self, model, name, check): return Statement( self.sql_create_check, table=Table(model._meta.db_table, self.quote_name), name=self.quote_name(name), check=check, ) def _delete_check_sql(self, model, name): return self._delete_constraint_sql(self.sql_delete_check, model, name) def _delete_constraint_sql(self, template, model, name): return Statement( template, table=Table(model._meta.db_table, self.quote_name), name=self.quote_name(name), ) def _constraint_names(self, model, column_names=None, unique=None, primary_key=None, index=None, foreign_key=None, check=None, type_=None, exclude=None): """Return all constraint names matching the columns and conditions.""" if column_names is not None: column_names = [ self.connection.introspection.identifier_converter(name) for name in column_names ] with self.connection.cursor() as cursor: constraints = self.connection.introspection.get_constraints(cursor, model._meta.db_table) result = [] for name, infodict in constraints.items(): if column_names is None or column_names == infodict['columns']: if unique is not None and infodict['unique'] != unique: continue if primary_key is not None and infodict['primary_key'] != primary_key: continue if index is not None and infodict['index'] != index: continue if check is not None and infodict['check'] != check: continue if foreign_key is not None and not infodict['foreign_key']: continue if type_ is not None and infodict['type'] != type_: continue if not exclude or name not in exclude: result.append(name) return result def _delete_primary_key(self, model, strict=False): constraint_names = self._constraint_names(model, primary_key=True) if strict and len(constraint_names) != 1: raise ValueError('Found wrong number (%s) of PK constraints for %s' % ( len(constraint_names), model._meta.db_table, )) for constraint_name in constraint_names: self.execute(self._delete_primary_key_sql(model, constraint_name)) def _create_primary_key_sql(self, model, field): return Statement( self.sql_create_pk, table=Table(model._meta.db_table, self.quote_name), name=self.quote_name( self._create_index_name(model._meta.db_table, [field.column], suffix="_pk") ), columns=Columns(model._meta.db_table, [field.column], self.quote_name), ) def _delete_primary_key_sql(self, model, name): return self._delete_constraint_sql(self.sql_delete_pk, model, name) def _collate_sql(self, collation): return ' COLLATE ' + self.quote_name(collation) def remove_procedure(self, procedure_name, param_types=()): sql = self.sql_delete_procedure % { 'procedure': self.quote_name(procedure_name), 'param_types': ','.join(param_types), } self.execute(sql)
26280f798f0d1d5b558d9707336a7491f234fc8d28e0ba1796f8e9390e0d595e
from urllib.parse import urlencode from urllib.request import urlopen from django.apps import apps as django_apps from django.conf import settings from django.core import paginator from django.core.exceptions import ImproperlyConfigured from django.urls import NoReverseMatch, reverse from django.utils import translation PING_URL = "https://www.google.com/webmasters/tools/ping" class SitemapNotFound(Exception): pass def ping_google(sitemap_url=None, ping_url=PING_URL, sitemap_uses_https=True): """ Alert Google that the sitemap for the current site has been updated. If sitemap_url is provided, it should be an absolute path to the sitemap for this site -- e.g., '/sitemap.xml'. If sitemap_url is not provided, this function will attempt to deduce it by using urls.reverse(). """ sitemap_full_url = _get_sitemap_full_url(sitemap_url, sitemap_uses_https) params = urlencode({'sitemap': sitemap_full_url}) urlopen('%s?%s' % (ping_url, params)) def _get_sitemap_full_url(sitemap_url, sitemap_uses_https=True): if not django_apps.is_installed('django.contrib.sites'): raise ImproperlyConfigured("ping_google requires django.contrib.sites, which isn't installed.") if sitemap_url is None: try: # First, try to get the "index" sitemap URL. sitemap_url = reverse('django.contrib.sitemaps.views.index') except NoReverseMatch: try: # Next, try for the "global" sitemap URL. sitemap_url = reverse('django.contrib.sitemaps.views.sitemap') except NoReverseMatch: pass if sitemap_url is None: raise SitemapNotFound("You didn't provide a sitemap_url, and the sitemap URL couldn't be auto-detected.") Site = django_apps.get_model('sites.Site') current_site = Site.objects.get_current() scheme = 'https' if sitemap_uses_https else 'http' return '%s://%s%s' % (scheme, current_site.domain, sitemap_url) class Sitemap: # This limit is defined by Google. See the index documentation at # https://www.sitemaps.org/protocol.html#index. limit = 50000 # If protocol is None, the URLs in the sitemap will use the protocol # with which the sitemap was requested. protocol = None # Enables generating URLs for all languages. i18n = False # Override list of languages to use. languages = None # Enables generating alternate/hreflang links. alternates = False # Add an alternate/hreflang link with value 'x-default'. x_default = False def _get(self, name, item, default=None): try: attr = getattr(self, name) except AttributeError: return default if callable(attr): if self.i18n: # Split the (item, lang_code) tuples again for the location, # priority, lastmod and changefreq method calls. item, lang_code = item return attr(item) return attr def _languages(self): if self.languages is not None: return self.languages return [lang_code for lang_code, _ in settings.LANGUAGES] def _items(self): if self.i18n: # Create (item, lang_code) tuples for all items and languages. # This is necessary to paginate with all languages already considered. items = [ (item, lang_code) for lang_code in self._languages() for item in self.items() ] return items return self.items() def _location(self, item, force_lang_code=None): if self.i18n: obj, lang_code = item # Activate language from item-tuple or forced one before calling location. with translation.override(force_lang_code or lang_code): return self._get('location', item) return self._get('location', item) @property def paginator(self): return paginator.Paginator(self._items(), self.limit) def items(self): return [] def location(self, item): return item.get_absolute_url() def get_protocol(self, protocol=None): # Determine protocol return self.protocol or protocol or 'http' def get_domain(self, site=None): # Determine domain if site is None: if django_apps.is_installed('django.contrib.sites'): Site = django_apps.get_model('sites.Site') try: site = Site.objects.get_current() except Site.DoesNotExist: pass if site is None: raise ImproperlyConfigured( "To use sitemaps, either enable the sites framework or pass " "a Site/RequestSite object in your view." ) return site.domain def get_urls(self, page=1, site=None, protocol=None): protocol = self.get_protocol(protocol) domain = self.get_domain(site) return self._urls(page, protocol, domain) def _urls(self, page, protocol, domain): urls = [] latest_lastmod = None all_items_lastmod = True # track if all items have a lastmod paginator_page = self.paginator.page(page) for item in paginator_page.object_list: loc = f'{protocol}://{domain}{self._location(item)}' priority = self._get('priority', item) lastmod = self._get('lastmod', item) if all_items_lastmod: all_items_lastmod = lastmod is not None if (all_items_lastmod and (latest_lastmod is None or lastmod > latest_lastmod)): latest_lastmod = lastmod url_info = { 'item': item, 'location': loc, 'lastmod': lastmod, 'changefreq': self._get('changefreq', item), 'priority': str(priority if priority is not None else ''), 'alternates': [], } if self.i18n and self.alternates: for lang_code in self._languages(): loc = f'{protocol}://{domain}{self._location(item, lang_code)}' url_info['alternates'].append({ 'location': loc, 'lang_code': lang_code, }) if self.x_default: lang_code = settings.LANGUAGE_CODE loc = f'{protocol}://{domain}{self._location(item, lang_code)}' loc = loc.replace(f'/{lang_code}/', '/', 1) url_info['alternates'].append({ 'location': loc, 'lang_code': 'x-default', }) urls.append(url_info) if all_items_lastmod and latest_lastmod: self.latest_lastmod = latest_lastmod return urls class GenericSitemap(Sitemap): priority = None changefreq = None def __init__(self, info_dict, priority=None, changefreq=None, protocol=None): self.queryset = info_dict['queryset'] self.date_field = info_dict.get('date_field') self.priority = self.priority or priority self.changefreq = self.changefreq or changefreq self.protocol = self.protocol or protocol def items(self): # Make sure to return a clone; we don't want premature evaluation. return self.queryset.filter() def lastmod(self, item): if self.date_field is not None: return getattr(item, self.date_field) return None
13dea5043c0694a35f2e3e1549aefe7bf6db1fe8666a0354d98717a6642d4167
import copy import json import operator import re from functools import partial, reduce, update_wrapper from urllib.parse import quote as urlquote from django import forms from django.conf import settings from django.contrib import messages from django.contrib.admin import helpers, widgets from django.contrib.admin.checks import ( BaseModelAdminChecks, InlineModelAdminChecks, ModelAdminChecks, ) from django.contrib.admin.decorators import display from django.contrib.admin.exceptions import DisallowedModelAdminToField from django.contrib.admin.templatetags.admin_urls import add_preserved_filters from django.contrib.admin.utils import ( NestedObjects, construct_change_message, flatten_fieldsets, get_deleted_objects, lookup_needs_distinct, model_format_dict, model_ngettext, quote, unquote, ) from django.contrib.admin.widgets import ( AutocompleteSelect, AutocompleteSelectMultiple, ) from django.contrib.auth import get_permission_codename from django.core.exceptions import ( FieldDoesNotExist, FieldError, PermissionDenied, ValidationError, ) from django.core.paginator import Paginator from django.db import models, router, transaction from django.db.models.constants import LOOKUP_SEP from django.forms.formsets import DELETION_FIELD_NAME, all_valid from django.forms.models import ( BaseInlineFormSet, inlineformset_factory, modelform_defines_fields, modelform_factory, modelformset_factory, ) from django.forms.widgets import CheckboxSelectMultiple, SelectMultiple from django.http import HttpResponseRedirect from django.http.response import HttpResponseBase from django.template.response import SimpleTemplateResponse, TemplateResponse from django.urls import reverse from django.utils.decorators import method_decorator from django.utils.html import format_html from django.utils.http import urlencode from django.utils.safestring import mark_safe from django.utils.text import ( capfirst, format_lazy, get_text_list, smart_split, unescape_string_literal, ) from django.utils.translation import gettext as _, ngettext from django.views.decorators.csrf import csrf_protect from django.views.generic import RedirectView IS_POPUP_VAR = '_popup' TO_FIELD_VAR = '_to_field' HORIZONTAL, VERTICAL = 1, 2 def get_content_type_for_model(obj): # Since this module gets imported in the application's root package, # it cannot import models from other applications at the module level. from django.contrib.contenttypes.models import ContentType return ContentType.objects.get_for_model(obj, for_concrete_model=False) def get_ul_class(radio_style): return 'radiolist' if radio_style == VERTICAL else 'radiolist inline' class IncorrectLookupParameters(Exception): pass # Defaults for formfield_overrides. ModelAdmin subclasses can change this # by adding to ModelAdmin.formfield_overrides. FORMFIELD_FOR_DBFIELD_DEFAULTS = { models.DateTimeField: { 'form_class': forms.SplitDateTimeField, 'widget': widgets.AdminSplitDateTime }, models.DateField: {'widget': widgets.AdminDateWidget}, models.TimeField: {'widget': widgets.AdminTimeWidget}, models.TextField: {'widget': widgets.AdminTextareaWidget}, models.URLField: {'widget': widgets.AdminURLFieldWidget}, models.IntegerField: {'widget': widgets.AdminIntegerFieldWidget}, models.BigIntegerField: {'widget': widgets.AdminBigIntegerFieldWidget}, models.CharField: {'widget': widgets.AdminTextInputWidget}, models.ImageField: {'widget': widgets.AdminFileWidget}, models.FileField: {'widget': widgets.AdminFileWidget}, models.EmailField: {'widget': widgets.AdminEmailInputWidget}, models.UUIDField: {'widget': widgets.AdminUUIDInputWidget}, } csrf_protect_m = method_decorator(csrf_protect) class BaseModelAdmin(metaclass=forms.MediaDefiningClass): """Functionality common to both ModelAdmin and InlineAdmin.""" autocomplete_fields = () raw_id_fields = () fields = None exclude = None fieldsets = None form = forms.ModelForm filter_vertical = () filter_horizontal = () radio_fields = {} prepopulated_fields = {} formfield_overrides = {} readonly_fields = () ordering = None sortable_by = None view_on_site = True show_full_result_count = True checks_class = BaseModelAdminChecks def check(self, **kwargs): return self.checks_class().check(self, **kwargs) def __init__(self): # Merge FORMFIELD_FOR_DBFIELD_DEFAULTS with the formfield_overrides # rather than simply overwriting. overrides = copy.deepcopy(FORMFIELD_FOR_DBFIELD_DEFAULTS) for k, v in self.formfield_overrides.items(): overrides.setdefault(k, {}).update(v) self.formfield_overrides = overrides def formfield_for_dbfield(self, db_field, request, **kwargs): """ Hook for specifying the form Field instance for a given database Field instance. If kwargs are given, they're passed to the form Field's constructor. """ # If the field specifies choices, we don't need to look for special # admin widgets - we just need to use a select widget of some kind. if db_field.choices: return self.formfield_for_choice_field(db_field, request, **kwargs) # ForeignKey or ManyToManyFields if isinstance(db_field, (models.ForeignKey, models.ManyToManyField)): # Combine the field kwargs with any options for formfield_overrides. # Make sure the passed in **kwargs override anything in # formfield_overrides because **kwargs is more specific, and should # always win. if db_field.__class__ in self.formfield_overrides: kwargs = {**self.formfield_overrides[db_field.__class__], **kwargs} # Get the correct formfield. if isinstance(db_field, models.ForeignKey): formfield = self.formfield_for_foreignkey(db_field, request, **kwargs) elif isinstance(db_field, models.ManyToManyField): formfield = self.formfield_for_manytomany(db_field, request, **kwargs) # For non-raw_id fields, wrap the widget with a wrapper that adds # extra HTML -- the "add other" interface -- to the end of the # rendered output. formfield can be None if it came from a # OneToOneField with parent_link=True or a M2M intermediary. if formfield and db_field.name not in self.raw_id_fields: related_modeladmin = self.admin_site._registry.get(db_field.remote_field.model) wrapper_kwargs = {} if related_modeladmin: wrapper_kwargs.update( can_add_related=related_modeladmin.has_add_permission(request), can_change_related=related_modeladmin.has_change_permission(request), can_delete_related=related_modeladmin.has_delete_permission(request), can_view_related=related_modeladmin.has_view_permission(request), ) formfield.widget = widgets.RelatedFieldWidgetWrapper( formfield.widget, db_field.remote_field, self.admin_site, **wrapper_kwargs ) return formfield # If we've got overrides for the formfield defined, use 'em. **kwargs # passed to formfield_for_dbfield override the defaults. for klass in db_field.__class__.mro(): if klass in self.formfield_overrides: kwargs = {**copy.deepcopy(self.formfield_overrides[klass]), **kwargs} return db_field.formfield(**kwargs) # For any other type of field, just call its formfield() method. return db_field.formfield(**kwargs) def formfield_for_choice_field(self, db_field, request, **kwargs): """ Get a form Field for a database Field that has declared choices. """ # If the field is named as a radio_field, use a RadioSelect if db_field.name in self.radio_fields: # Avoid stomping on custom widget/choices arguments. if 'widget' not in kwargs: kwargs['widget'] = widgets.AdminRadioSelect(attrs={ 'class': get_ul_class(self.radio_fields[db_field.name]), }) if 'choices' not in kwargs: kwargs['choices'] = db_field.get_choices( include_blank=db_field.blank, blank_choice=[('', _('None'))] ) return db_field.formfield(**kwargs) def get_field_queryset(self, db, db_field, request): """ If the ModelAdmin specifies ordering, the queryset should respect that ordering. Otherwise don't specify the queryset, let the field decide (return None in that case). """ related_admin = self.admin_site._registry.get(db_field.remote_field.model) if related_admin is not None: ordering = related_admin.get_ordering(request) if ordering is not None and ordering != (): return db_field.remote_field.model._default_manager.using(db).order_by(*ordering) return None def formfield_for_foreignkey(self, db_field, request, **kwargs): """ Get a form Field for a ForeignKey. """ db = kwargs.get('using') if 'widget' not in kwargs: if db_field.name in self.get_autocomplete_fields(request): kwargs['widget'] = AutocompleteSelect(db_field, self.admin_site, using=db) elif db_field.name in self.raw_id_fields: kwargs['widget'] = widgets.ForeignKeyRawIdWidget(db_field.remote_field, self.admin_site, using=db) elif db_field.name in self.radio_fields: kwargs['widget'] = widgets.AdminRadioSelect(attrs={ 'class': get_ul_class(self.radio_fields[db_field.name]), }) kwargs['empty_label'] = _('None') if db_field.blank else None if 'queryset' not in kwargs: queryset = self.get_field_queryset(db, db_field, request) if queryset is not None: kwargs['queryset'] = queryset return db_field.formfield(**kwargs) def formfield_for_manytomany(self, db_field, request, **kwargs): """ Get a form Field for a ManyToManyField. """ # If it uses an intermediary model that isn't auto created, don't show # a field in admin. if not db_field.remote_field.through._meta.auto_created: return None db = kwargs.get('using') if 'widget' not in kwargs: autocomplete_fields = self.get_autocomplete_fields(request) if db_field.name in autocomplete_fields: kwargs['widget'] = AutocompleteSelectMultiple( db_field, self.admin_site, using=db, ) elif db_field.name in self.raw_id_fields: kwargs['widget'] = widgets.ManyToManyRawIdWidget( db_field.remote_field, self.admin_site, using=db, ) elif db_field.name in [*self.filter_vertical, *self.filter_horizontal]: kwargs['widget'] = widgets.FilteredSelectMultiple( db_field.verbose_name, db_field.name in self.filter_vertical ) if 'queryset' not in kwargs: queryset = self.get_field_queryset(db, db_field, request) if queryset is not None: kwargs['queryset'] = queryset form_field = db_field.formfield(**kwargs) if (isinstance(form_field.widget, SelectMultiple) and not isinstance(form_field.widget, (CheckboxSelectMultiple, AutocompleteSelectMultiple))): msg = _('Hold down “Control”, or “Command” on a Mac, to select more than one.') help_text = form_field.help_text form_field.help_text = format_lazy('{} {}', help_text, msg) if help_text else msg return form_field def get_autocomplete_fields(self, request): """ Return a list of ForeignKey and/or ManyToMany fields which should use an autocomplete widget. """ return self.autocomplete_fields def get_view_on_site_url(self, obj=None): if obj is None or not self.view_on_site: return None if callable(self.view_on_site): return self.view_on_site(obj) elif hasattr(obj, 'get_absolute_url'): # use the ContentType lookup if view_on_site is True return reverse('admin:view_on_site', kwargs={ 'content_type_id': get_content_type_for_model(obj).pk, 'object_id': obj.pk }) def get_empty_value_display(self): """ Return the empty_value_display set on ModelAdmin or AdminSite. """ try: return mark_safe(self.empty_value_display) except AttributeError: return mark_safe(self.admin_site.empty_value_display) def get_exclude(self, request, obj=None): """ Hook for specifying exclude. """ return self.exclude def get_fields(self, request, obj=None): """ Hook for specifying fields. """ if self.fields: return self.fields # _get_form_for_get_fields() is implemented in subclasses. form = self._get_form_for_get_fields(request, obj) return [*form.base_fields, *self.get_readonly_fields(request, obj)] def get_fieldsets(self, request, obj=None): """ Hook for specifying fieldsets. """ if self.fieldsets: return self.fieldsets return [(None, {'fields': self.get_fields(request, obj)})] def get_inlines(self, request, obj): """Hook for specifying custom inlines.""" return self.inlines def get_ordering(self, request): """ Hook for specifying field ordering. """ return self.ordering or () # otherwise we might try to *None, which is bad ;) def get_readonly_fields(self, request, obj=None): """ Hook for specifying custom readonly fields. """ return self.readonly_fields def get_prepopulated_fields(self, request, obj=None): """ Hook for specifying custom prepopulated fields. """ return self.prepopulated_fields def get_queryset(self, request): """ Return a QuerySet of all model instances that can be edited by the admin site. This is used by changelist_view. """ qs = self.model._default_manager.get_queryset() # TODO: this should be handled by some parameter to the ChangeList. ordering = self.get_ordering(request) if ordering: qs = qs.order_by(*ordering) return qs def get_sortable_by(self, request): """Hook for specifying which fields can be sorted in the changelist.""" return self.sortable_by if self.sortable_by is not None else self.get_list_display(request) def lookup_allowed(self, lookup, value): from django.contrib.admin.filters import SimpleListFilter model = self.model # Check FKey lookups that are allowed, so that popups produced by # ForeignKeyRawIdWidget, on the basis of ForeignKey.limit_choices_to, # are allowed to work. for fk_lookup in model._meta.related_fkey_lookups: # As ``limit_choices_to`` can be a callable, invoke it here. if callable(fk_lookup): fk_lookup = fk_lookup() if (lookup, value) in widgets.url_params_from_lookup_dict(fk_lookup).items(): return True relation_parts = [] prev_field = None for part in lookup.split(LOOKUP_SEP): try: field = model._meta.get_field(part) except FieldDoesNotExist: # Lookups on nonexistent fields are ok, since they're ignored # later. break # It is allowed to filter on values that would be found from local # model anyways. For example, if you filter on employee__department__id, # then the id value would be found already from employee__department_id. if not prev_field or (prev_field.is_relation and field not in prev_field.get_path_info()[-1].target_fields): relation_parts.append(part) if not getattr(field, 'get_path_info', None): # This is not a relational field, so further parts # must be transforms. break prev_field = field model = field.get_path_info()[-1].to_opts.model if len(relation_parts) <= 1: # Either a local field filter, or no fields at all. return True valid_lookups = {self.date_hierarchy} for filter_item in self.list_filter: if isinstance(filter_item, type) and issubclass(filter_item, SimpleListFilter): valid_lookups.add(filter_item.parameter_name) elif isinstance(filter_item, (list, tuple)): valid_lookups.add(filter_item[0]) else: valid_lookups.add(filter_item) # Is it a valid relational lookup? return not { LOOKUP_SEP.join(relation_parts), LOOKUP_SEP.join(relation_parts + [part]) }.isdisjoint(valid_lookups) def to_field_allowed(self, request, to_field): """ Return True if the model associated with this admin should be allowed to be referenced by the specified field. """ opts = self.model._meta try: field = opts.get_field(to_field) except FieldDoesNotExist: return False # Always allow referencing the primary key since it's already possible # to get this information from the change view URL. if field.primary_key: return True # Allow reverse relationships to models defining m2m fields if they # target the specified field. for many_to_many in opts.many_to_many: if many_to_many.m2m_target_field_name() == to_field: return True # Make sure at least one of the models registered for this site # references this field through a FK or a M2M relationship. registered_models = set() for model, admin in self.admin_site._registry.items(): registered_models.add(model) for inline in admin.inlines: registered_models.add(inline.model) related_objects = ( f for f in opts.get_fields(include_hidden=True) if (f.auto_created and not f.concrete) ) for related_object in related_objects: related_model = related_object.related_model remote_field = related_object.field.remote_field if (any(issubclass(model, related_model) for model in registered_models) and hasattr(remote_field, 'get_related_field') and remote_field.get_related_field() == field): return True return False def has_add_permission(self, request): """ Return True if the given request has permission to add an object. Can be overridden by the user in subclasses. """ opts = self.opts codename = get_permission_codename('add', opts) return request.user.has_perm("%s.%s" % (opts.app_label, codename)) def has_change_permission(self, request, obj=None): """ Return True if the given request has permission to change the given Django model instance, the default implementation doesn't examine the `obj` parameter. Can be overridden by the user in subclasses. In such case it should return True if the given request has permission to change the `obj` model instance. If `obj` is None, this should return True if the given request has permission to change *any* object of the given type. """ opts = self.opts codename = get_permission_codename('change', opts) return request.user.has_perm("%s.%s" % (opts.app_label, codename)) def has_delete_permission(self, request, obj=None): """ Return True if the given request has permission to change the given Django model instance, the default implementation doesn't examine the `obj` parameter. Can be overridden by the user in subclasses. In such case it should return True if the given request has permission to delete the `obj` model instance. If `obj` is None, this should return True if the given request has permission to delete *any* object of the given type. """ opts = self.opts codename = get_permission_codename('delete', opts) return request.user.has_perm("%s.%s" % (opts.app_label, codename)) def has_view_permission(self, request, obj=None): """ Return True if the given request has permission to view the given Django model instance. The default implementation doesn't examine the `obj` parameter. If overridden by the user in subclasses, it should return True if the given request has permission to view the `obj` model instance. If `obj` is None, it should return True if the request has permission to view any object of the given type. """ opts = self.opts codename_view = get_permission_codename('view', opts) codename_change = get_permission_codename('change', opts) return ( request.user.has_perm('%s.%s' % (opts.app_label, codename_view)) or request.user.has_perm('%s.%s' % (opts.app_label, codename_change)) ) def has_view_or_change_permission(self, request, obj=None): return self.has_view_permission(request, obj) or self.has_change_permission(request, obj) def has_module_permission(self, request): """ Return True if the given request has any permission in the given app label. Can be overridden by the user in subclasses. In such case it should return True if the given request has permission to view the module on the admin index page and access the module's index page. Overriding it does not restrict access to the add, change or delete views. Use `ModelAdmin.has_(add|change|delete)_permission` for that. """ return request.user.has_module_perms(self.opts.app_label) class ModelAdmin(BaseModelAdmin): """Encapsulate all admin options and functionality for a given model.""" list_display = ('__str__',) list_display_links = () list_filter = () list_select_related = False list_per_page = 100 list_max_show_all = 200 list_editable = () search_fields = () date_hierarchy = None save_as = False save_as_continue = True save_on_top = False paginator = Paginator preserve_filters = True inlines = [] # Custom templates (designed to be over-ridden in subclasses) add_form_template = None change_form_template = None change_list_template = None delete_confirmation_template = None delete_selected_confirmation_template = None object_history_template = None popup_response_template = None # Actions actions = [] action_form = helpers.ActionForm actions_on_top = True actions_on_bottom = False actions_selection_counter = True checks_class = ModelAdminChecks def __init__(self, model, admin_site): self.model = model self.opts = model._meta self.admin_site = admin_site super().__init__() def __str__(self): return "%s.%s" % (self.model._meta.app_label, self.__class__.__name__) def get_inline_instances(self, request, obj=None): inline_instances = [] for inline_class in self.get_inlines(request, obj): inline = inline_class(self.model, self.admin_site) if request: if not (inline.has_view_or_change_permission(request, obj) or inline.has_add_permission(request, obj) or inline.has_delete_permission(request, obj)): continue if not inline.has_add_permission(request, obj): inline.max_num = 0 inline_instances.append(inline) return inline_instances def get_urls(self): from django.urls import path def wrap(view): def wrapper(*args, **kwargs): return self.admin_site.admin_view(view)(*args, **kwargs) wrapper.model_admin = self return update_wrapper(wrapper, view) info = self.model._meta.app_label, self.model._meta.model_name return [ path('', wrap(self.changelist_view), name='%s_%s_changelist' % info), path('add/', wrap(self.add_view), name='%s_%s_add' % info), path('<path:object_id>/history/', wrap(self.history_view), name='%s_%s_history' % info), path('<path:object_id>/delete/', wrap(self.delete_view), name='%s_%s_delete' % info), path('<path:object_id>/change/', wrap(self.change_view), name='%s_%s_change' % info), # For backwards compatibility (was the change url before 1.9) path('<path:object_id>/', wrap(RedirectView.as_view( pattern_name='%s:%s_%s_change' % ((self.admin_site.name,) + info) ))), ] @property def urls(self): return self.get_urls() @property def media(self): extra = '' if settings.DEBUG else '.min' js = [ 'vendor/jquery/jquery%s.js' % extra, 'jquery.init.js', 'core.js', 'admin/RelatedObjectLookups.js', 'actions.js', 'urlify.js', 'prepopulate.js', 'vendor/xregexp/xregexp%s.js' % extra, ] return forms.Media(js=['admin/js/%s' % url for url in js]) def get_model_perms(self, request): """ Return a dict of all perms for this model. This dict has the keys ``add``, ``change``, ``delete``, and ``view`` mapping to the True/False for each of those actions. """ return { 'add': self.has_add_permission(request), 'change': self.has_change_permission(request), 'delete': self.has_delete_permission(request), 'view': self.has_view_permission(request), } def _get_form_for_get_fields(self, request, obj): return self.get_form(request, obj, fields=None) def get_form(self, request, obj=None, change=False, **kwargs): """ Return a Form class for use in the admin add view. This is used by add_view and change_view. """ if 'fields' in kwargs: fields = kwargs.pop('fields') else: fields = flatten_fieldsets(self.get_fieldsets(request, obj)) excluded = self.get_exclude(request, obj) exclude = [] if excluded is None else list(excluded) readonly_fields = self.get_readonly_fields(request, obj) exclude.extend(readonly_fields) # Exclude all fields if it's a change form and the user doesn't have # the change permission. if change and hasattr(request, 'user') and not self.has_change_permission(request, obj): exclude.extend(fields) if excluded is None and hasattr(self.form, '_meta') and self.form._meta.exclude: # Take the custom ModelForm's Meta.exclude into account only if the # ModelAdmin doesn't define its own. exclude.extend(self.form._meta.exclude) # if exclude is an empty list we pass None to be consistent with the # default on modelform_factory exclude = exclude or None # Remove declared form fields which are in readonly_fields. new_attrs = dict.fromkeys(f for f in readonly_fields if f in self.form.declared_fields) form = type(self.form.__name__, (self.form,), new_attrs) defaults = { 'form': form, 'fields': fields, 'exclude': exclude, 'formfield_callback': partial(self.formfield_for_dbfield, request=request), **kwargs, } if defaults['fields'] is None and not modelform_defines_fields(defaults['form']): defaults['fields'] = forms.ALL_FIELDS try: return modelform_factory(self.model, **defaults) except FieldError as e: raise FieldError( '%s. Check fields/fieldsets/exclude attributes of class %s.' % (e, self.__class__.__name__) ) def get_changelist(self, request, **kwargs): """ Return the ChangeList class for use on the changelist page. """ from django.contrib.admin.views.main import ChangeList return ChangeList def get_changelist_instance(self, request): """ Return a `ChangeList` instance based on `request`. May raise `IncorrectLookupParameters`. """ list_display = self.get_list_display(request) list_display_links = self.get_list_display_links(request, list_display) # Add the action checkboxes if any actions are available. if self.get_actions(request): list_display = ['action_checkbox', *list_display] sortable_by = self.get_sortable_by(request) ChangeList = self.get_changelist(request) return ChangeList( request, self.model, list_display, list_display_links, self.get_list_filter(request), self.date_hierarchy, self.get_search_fields(request), self.get_list_select_related(request), self.list_per_page, self.list_max_show_all, self.list_editable, self, sortable_by, ) def get_object(self, request, object_id, from_field=None): """ Return an instance matching the field and value provided, the primary key is used if no field is provided. Return ``None`` if no match is found or the object_id fails validation. """ queryset = self.get_queryset(request) model = queryset.model field = model._meta.pk if from_field is None else model._meta.get_field(from_field) try: object_id = field.to_python(object_id) return queryset.get(**{field.name: object_id}) except (model.DoesNotExist, ValidationError, ValueError): return None def get_changelist_form(self, request, **kwargs): """ Return a Form class for use in the Formset on the changelist page. """ defaults = { 'formfield_callback': partial(self.formfield_for_dbfield, request=request), **kwargs, } if defaults.get('fields') is None and not modelform_defines_fields(defaults.get('form')): defaults['fields'] = forms.ALL_FIELDS return modelform_factory(self.model, **defaults) def get_changelist_formset(self, request, **kwargs): """ Return a FormSet class for use on the changelist page if list_editable is used. """ defaults = { 'formfield_callback': partial(self.formfield_for_dbfield, request=request), **kwargs, } return modelformset_factory( self.model, self.get_changelist_form(request), extra=0, fields=self.list_editable, **defaults ) def get_formsets_with_inlines(self, request, obj=None): """ Yield formsets and the corresponding inlines. """ for inline in self.get_inline_instances(request, obj): yield inline.get_formset(request, obj), inline def get_paginator(self, request, queryset, per_page, orphans=0, allow_empty_first_page=True): return self.paginator(queryset, per_page, orphans, allow_empty_first_page) def log_addition(self, request, object, message): """ Log that an object has been successfully added. The default implementation creates an admin LogEntry object. """ from django.contrib.admin.models import ADDITION, LogEntry return LogEntry.objects.log_action( user_id=request.user.pk, content_type_id=get_content_type_for_model(object).pk, object_id=object.pk, object_repr=str(object), action_flag=ADDITION, change_message=message, ) def log_change(self, request, object, message): """ Log that an object has been successfully changed. The default implementation creates an admin LogEntry object. """ from django.contrib.admin.models import CHANGE, LogEntry return LogEntry.objects.log_action( user_id=request.user.pk, content_type_id=get_content_type_for_model(object).pk, object_id=object.pk, object_repr=str(object), action_flag=CHANGE, change_message=message, ) def log_deletion(self, request, object, object_repr): """ Log that an object will be deleted. Note that this method must be called before the deletion. The default implementation creates an admin LogEntry object. """ from django.contrib.admin.models import DELETION, LogEntry return LogEntry.objects.log_action( user_id=request.user.pk, content_type_id=get_content_type_for_model(object).pk, object_id=object.pk, object_repr=object_repr, action_flag=DELETION, ) @display(description=mark_safe('<input type="checkbox" id="action-toggle">')) def action_checkbox(self, obj): """ A list_display column containing a checkbox widget. """ return helpers.checkbox.render(helpers.ACTION_CHECKBOX_NAME, str(obj.pk)) @staticmethod def _get_action_description(func, name): return getattr(func, 'short_description', capfirst(name.replace('_', ' '))) def _get_base_actions(self): """Return the list of actions, prior to any request-based filtering.""" actions = [] base_actions = (self.get_action(action) for action in self.actions or []) # get_action might have returned None, so filter any of those out. base_actions = [action for action in base_actions if action] base_action_names = {name for _, name, _ in base_actions} # Gather actions from the admin site first for (name, func) in self.admin_site.actions: if name in base_action_names: continue description = self._get_action_description(func, name) actions.append((func, name, description)) # Add actions from this ModelAdmin. actions.extend(base_actions) return actions def _filter_actions_by_permissions(self, request, actions): """Filter out any actions that the user doesn't have access to.""" filtered_actions = [] for action in actions: callable = action[0] if not hasattr(callable, 'allowed_permissions'): filtered_actions.append(action) continue permission_checks = ( getattr(self, 'has_%s_permission' % permission) for permission in callable.allowed_permissions ) if any(has_permission(request) for has_permission in permission_checks): filtered_actions.append(action) return filtered_actions def get_actions(self, request): """ Return a dictionary mapping the names of all actions for this ModelAdmin to a tuple of (callable, name, description) for each action. """ # If self.actions is set to None that means actions are disabled on # this page. if self.actions is None or IS_POPUP_VAR in request.GET: return {} actions = self._filter_actions_by_permissions(request, self._get_base_actions()) return {name: (func, name, desc) for func, name, desc in actions} def get_action_choices(self, request, default_choices=models.BLANK_CHOICE_DASH): """ Return a list of choices for use in a form object. Each choice is a tuple (name, description). """ choices = [] + default_choices for func, name, description in self.get_actions(request).values(): choice = (name, description % model_format_dict(self.opts)) choices.append(choice) return choices def get_action(self, action): """ Return a given action from a parameter, which can either be a callable, or the name of a method on the ModelAdmin. Return is a tuple of (callable, name, description). """ # If the action is a callable, just use it. if callable(action): func = action action = action.__name__ # Next, look for a method. Grab it off self.__class__ to get an unbound # method instead of a bound one; this ensures that the calling # conventions are the same for functions and methods. elif hasattr(self.__class__, action): func = getattr(self.__class__, action) # Finally, look for a named method on the admin site else: try: func = self.admin_site.get_action(action) except KeyError: return None description = self._get_action_description(func, action) return func, action, description def get_list_display(self, request): """ Return a sequence containing the fields to be displayed on the changelist. """ return self.list_display def get_list_display_links(self, request, list_display): """ Return a sequence containing the fields to be displayed as links on the changelist. The list_display parameter is the list of fields returned by get_list_display(). """ if self.list_display_links or self.list_display_links is None or not list_display: return self.list_display_links else: # Use only the first item in list_display as link return list(list_display)[:1] def get_list_filter(self, request): """ Return a sequence containing the fields to be displayed as filters in the right sidebar of the changelist page. """ return self.list_filter def get_list_select_related(self, request): """ Return a list of fields to add to the select_related() part of the changelist items query. """ return self.list_select_related def get_search_fields(self, request): """ Return a sequence containing the fields to be searched whenever somebody submits a search query. """ return self.search_fields def get_search_results(self, request, queryset, search_term): """ Return a tuple containing a queryset to implement the search and a boolean indicating if the results may contain duplicates. """ # Apply keyword searches. def construct_search(field_name): if field_name.startswith('^'): return "%s__istartswith" % field_name[1:] elif field_name.startswith('='): return "%s__iexact" % field_name[1:] elif field_name.startswith('@'): return "%s__search" % field_name[1:] # Use field_name if it includes a lookup. opts = queryset.model._meta lookup_fields = field_name.split(LOOKUP_SEP) # Go through the fields, following all relations. prev_field = None for path_part in lookup_fields: if path_part == 'pk': path_part = opts.pk.name try: field = opts.get_field(path_part) except FieldDoesNotExist: # Use valid query lookups. if prev_field and prev_field.get_lookup(path_part): return field_name else: prev_field = field if hasattr(field, 'get_path_info'): # Update opts to follow the relation. opts = field.get_path_info()[-1].to_opts # Otherwise, use the field with icontains. return "%s__icontains" % field_name may_have_duplicates = False search_fields = self.get_search_fields(request) if search_fields and search_term: orm_lookups = [construct_search(str(search_field)) for search_field in search_fields] for bit in smart_split(search_term): if bit.startswith(('"', "'")) and bit[0] == bit[-1]: bit = unescape_string_literal(bit) or_queries = [models.Q(**{orm_lookup: bit}) for orm_lookup in orm_lookups] queryset = queryset.filter(reduce(operator.or_, or_queries)) may_have_duplicates |= any( lookup_needs_distinct(self.opts, search_spec) for search_spec in orm_lookups ) return queryset, may_have_duplicates def get_preserved_filters(self, request): """ Return the preserved filters querystring. """ match = request.resolver_match if self.preserve_filters and match: opts = self.model._meta current_url = '%s:%s' % (match.app_name, match.url_name) changelist_url = 'admin:%s_%s_changelist' % (opts.app_label, opts.model_name) if current_url == changelist_url: preserved_filters = request.GET.urlencode() else: preserved_filters = request.GET.get('_changelist_filters') if preserved_filters: return urlencode({'_changelist_filters': preserved_filters}) return '' def construct_change_message(self, request, form, formsets, add=False): """ Construct a JSON structure describing changes from a changed object. """ return construct_change_message(form, formsets, add) def message_user(self, request, message, level=messages.INFO, extra_tags='', fail_silently=False): """ Send a message to the user. The default implementation posts a message using the django.contrib.messages backend. Exposes almost the same API as messages.add_message(), but accepts the positional arguments in a different order to maintain backwards compatibility. For convenience, it accepts the `level` argument as a string rather than the usual level number. """ if not isinstance(level, int): # attempt to get the level if passed a string try: level = getattr(messages.constants, level.upper()) except AttributeError: levels = messages.constants.DEFAULT_TAGS.values() levels_repr = ', '.join('`%s`' % level for level in levels) raise ValueError( 'Bad message level string: `%s`. Possible values are: %s' % (level, levels_repr) ) messages.add_message(request, level, message, extra_tags=extra_tags, fail_silently=fail_silently) def save_form(self, request, form, change): """ Given a ModelForm return an unsaved instance. ``change`` is True if the object is being changed, and False if it's being added. """ return form.save(commit=False) def save_model(self, request, obj, form, change): """ Given a model instance save it to the database. """ obj.save() def delete_model(self, request, obj): """ Given a model instance delete it from the database. """ obj.delete() def delete_queryset(self, request, queryset): """Given a queryset, delete it from the database.""" queryset.delete() def save_formset(self, request, form, formset, change): """ Given an inline formset save it to the database. """ formset.save() def save_related(self, request, form, formsets, change): """ Given the ``HttpRequest``, the parent ``ModelForm`` instance, the list of inline formsets and a boolean value based on whether the parent is being added or changed, save the related objects to the database. Note that at this point save_form() and save_model() have already been called. """ form.save_m2m() for formset in formsets: self.save_formset(request, form, formset, change=change) def render_change_form(self, request, context, add=False, change=False, form_url='', obj=None): opts = self.model._meta app_label = opts.app_label preserved_filters = self.get_preserved_filters(request) form_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, form_url) view_on_site_url = self.get_view_on_site_url(obj) has_editable_inline_admin_formsets = False for inline in context['inline_admin_formsets']: if inline.has_add_permission or inline.has_change_permission or inline.has_delete_permission: has_editable_inline_admin_formsets = True break context.update({ 'add': add, 'change': change, 'has_view_permission': self.has_view_permission(request, obj), 'has_add_permission': self.has_add_permission(request), 'has_change_permission': self.has_change_permission(request, obj), 'has_delete_permission': self.has_delete_permission(request, obj), 'has_editable_inline_admin_formsets': has_editable_inline_admin_formsets, 'has_file_field': context['adminform'].form.is_multipart() or any( admin_formset.formset.is_multipart() for admin_formset in context['inline_admin_formsets'] ), 'has_absolute_url': view_on_site_url is not None, 'absolute_url': view_on_site_url, 'form_url': form_url, 'opts': opts, 'content_type_id': get_content_type_for_model(self.model).pk, 'save_as': self.save_as, 'save_on_top': self.save_on_top, 'to_field_var': TO_FIELD_VAR, 'is_popup_var': IS_POPUP_VAR, 'app_label': app_label, }) if add and self.add_form_template is not None: form_template = self.add_form_template else: form_template = self.change_form_template request.current_app = self.admin_site.name return TemplateResponse(request, form_template or [ "admin/%s/%s/change_form.html" % (app_label, opts.model_name), "admin/%s/change_form.html" % app_label, "admin/change_form.html" ], context) def response_add(self, request, obj, post_url_continue=None): """ Determine the HttpResponse for the add_view stage. """ opts = obj._meta preserved_filters = self.get_preserved_filters(request) obj_url = reverse( 'admin:%s_%s_change' % (opts.app_label, opts.model_name), args=(quote(obj.pk),), current_app=self.admin_site.name, ) # Add a link to the object's change form if the user can edit the obj. if self.has_change_permission(request, obj): obj_repr = format_html('<a href="{}">{}</a>', urlquote(obj_url), obj) else: obj_repr = str(obj) msg_dict = { 'name': opts.verbose_name, 'obj': obj_repr, } # Here, we distinguish between different save types by checking for # the presence of keys in request.POST. if IS_POPUP_VAR in request.POST: to_field = request.POST.get(TO_FIELD_VAR) if to_field: attr = str(to_field) else: attr = obj._meta.pk.attname value = obj.serializable_value(attr) popup_response_data = json.dumps({ 'value': str(value), 'obj': str(obj), }) return TemplateResponse(request, self.popup_response_template or [ 'admin/%s/%s/popup_response.html' % (opts.app_label, opts.model_name), 'admin/%s/popup_response.html' % opts.app_label, 'admin/popup_response.html', ], { 'popup_response_data': popup_response_data, }) elif "_continue" in request.POST or ( # Redirecting after "Save as new". "_saveasnew" in request.POST and self.save_as_continue and self.has_change_permission(request, obj) ): msg = _('The {name} “{obj}” was added successfully.') if self.has_change_permission(request, obj): msg += ' ' + _('You may edit it again below.') self.message_user(request, format_html(msg, **msg_dict), messages.SUCCESS) if post_url_continue is None: post_url_continue = obj_url post_url_continue = add_preserved_filters( {'preserved_filters': preserved_filters, 'opts': opts}, post_url_continue ) return HttpResponseRedirect(post_url_continue) elif "_addanother" in request.POST: msg = format_html( _('The {name} “{obj}” was added successfully. You may add another {name} below.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) redirect_url = request.path redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url) return HttpResponseRedirect(redirect_url) else: msg = format_html( _('The {name} “{obj}” was added successfully.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) return self.response_post_save_add(request, obj) def response_change(self, request, obj): """ Determine the HttpResponse for the change_view stage. """ if IS_POPUP_VAR in request.POST: opts = obj._meta to_field = request.POST.get(TO_FIELD_VAR) attr = str(to_field) if to_field else opts.pk.attname value = request.resolver_match.kwargs['object_id'] new_value = obj.serializable_value(attr) popup_response_data = json.dumps({ 'action': 'change', 'value': str(value), 'obj': str(obj), 'new_value': str(new_value), }) return TemplateResponse(request, self.popup_response_template or [ 'admin/%s/%s/popup_response.html' % (opts.app_label, opts.model_name), 'admin/%s/popup_response.html' % opts.app_label, 'admin/popup_response.html', ], { 'popup_response_data': popup_response_data, }) opts = self.model._meta preserved_filters = self.get_preserved_filters(request) msg_dict = { 'name': opts.verbose_name, 'obj': format_html('<a href="{}">{}</a>', urlquote(request.path), obj), } if "_continue" in request.POST: msg = format_html( _('The {name} “{obj}” was changed successfully. You may edit it again below.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) redirect_url = request.path redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url) return HttpResponseRedirect(redirect_url) elif "_saveasnew" in request.POST: msg = format_html( _('The {name} “{obj}” was added successfully. You may edit it again below.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) redirect_url = reverse('admin:%s_%s_change' % (opts.app_label, opts.model_name), args=(obj.pk,), current_app=self.admin_site.name) redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url) return HttpResponseRedirect(redirect_url) elif "_addanother" in request.POST: msg = format_html( _('The {name} “{obj}” was changed successfully. You may add another {name} below.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) redirect_url = reverse('admin:%s_%s_add' % (opts.app_label, opts.model_name), current_app=self.admin_site.name) redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url) return HttpResponseRedirect(redirect_url) else: msg = format_html( _('The {name} “{obj}” was changed successfully.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) return self.response_post_save_change(request, obj) def _response_post_save(self, request, obj): opts = self.model._meta if self.has_view_or_change_permission(request): post_url = reverse('admin:%s_%s_changelist' % (opts.app_label, opts.model_name), current_app=self.admin_site.name) preserved_filters = self.get_preserved_filters(request) post_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, post_url) else: post_url = reverse('admin:index', current_app=self.admin_site.name) return HttpResponseRedirect(post_url) def response_post_save_add(self, request, obj): """ Figure out where to redirect after the 'Save' button has been pressed when adding a new object. """ return self._response_post_save(request, obj) def response_post_save_change(self, request, obj): """ Figure out where to redirect after the 'Save' button has been pressed when editing an existing object. """ return self._response_post_save(request, obj) def response_action(self, request, queryset): """ Handle an admin action. This is called if a request is POSTed to the changelist; it returns an HttpResponse if the action was handled, and None otherwise. """ # There can be multiple action forms on the page (at the top # and bottom of the change list, for example). Get the action # whose button was pushed. try: action_index = int(request.POST.get('index', 0)) except ValueError: action_index = 0 # Construct the action form. data = request.POST.copy() data.pop(helpers.ACTION_CHECKBOX_NAME, None) data.pop("index", None) # Use the action whose button was pushed try: data.update({'action': data.getlist('action')[action_index]}) except IndexError: # If we didn't get an action from the chosen form that's invalid # POST data, so by deleting action it'll fail the validation check # below. So no need to do anything here pass action_form = self.action_form(data, auto_id=None) action_form.fields['action'].choices = self.get_action_choices(request) # If the form's valid we can handle the action. if action_form.is_valid(): action = action_form.cleaned_data['action'] select_across = action_form.cleaned_data['select_across'] func = self.get_actions(request)[action][0] # Get the list of selected PKs. If nothing's selected, we can't # perform an action on it, so bail. Except we want to perform # the action explicitly on all objects. selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME) if not selected and not select_across: # Reminder that something needs to be selected or nothing will happen msg = _("Items must be selected in order to perform " "actions on them. No items have been changed.") self.message_user(request, msg, messages.WARNING) return None if not select_across: # Perform the action only on the selected objects queryset = queryset.filter(pk__in=selected) response = func(self, request, queryset) # Actions may return an HttpResponse-like object, which will be # used as the response from the POST. If not, we'll be a good # little HTTP citizen and redirect back to the changelist page. if isinstance(response, HttpResponseBase): return response else: return HttpResponseRedirect(request.get_full_path()) else: msg = _("No action selected.") self.message_user(request, msg, messages.WARNING) return None def response_delete(self, request, obj_display, obj_id): """ Determine the HttpResponse for the delete_view stage. """ opts = self.model._meta if IS_POPUP_VAR in request.POST: popup_response_data = json.dumps({ 'action': 'delete', 'value': str(obj_id), }) return TemplateResponse(request, self.popup_response_template or [ 'admin/%s/%s/popup_response.html' % (opts.app_label, opts.model_name), 'admin/%s/popup_response.html' % opts.app_label, 'admin/popup_response.html', ], { 'popup_response_data': popup_response_data, }) self.message_user( request, _('The %(name)s “%(obj)s” was deleted successfully.') % { 'name': opts.verbose_name, 'obj': obj_display, }, messages.SUCCESS, ) if self.has_change_permission(request, None): post_url = reverse( 'admin:%s_%s_changelist' % (opts.app_label, opts.model_name), current_app=self.admin_site.name, ) preserved_filters = self.get_preserved_filters(request) post_url = add_preserved_filters( {'preserved_filters': preserved_filters, 'opts': opts}, post_url ) else: post_url = reverse('admin:index', current_app=self.admin_site.name) return HttpResponseRedirect(post_url) def render_delete_form(self, request, context): opts = self.model._meta app_label = opts.app_label request.current_app = self.admin_site.name context.update( to_field_var=TO_FIELD_VAR, is_popup_var=IS_POPUP_VAR, media=self.media, ) return TemplateResponse( request, self.delete_confirmation_template or [ "admin/{}/{}/delete_confirmation.html".format(app_label, opts.model_name), "admin/{}/delete_confirmation.html".format(app_label), "admin/delete_confirmation.html", ], context, ) def get_inline_formsets(self, request, formsets, inline_instances, obj=None): # Edit permissions on parent model are required for editable inlines. can_edit_parent = self.has_change_permission(request, obj) if obj else self.has_add_permission(request) inline_admin_formsets = [] for inline, formset in zip(inline_instances, formsets): fieldsets = list(inline.get_fieldsets(request, obj)) readonly = list(inline.get_readonly_fields(request, obj)) if can_edit_parent: has_add_permission = inline.has_add_permission(request, obj) has_change_permission = inline.has_change_permission(request, obj) has_delete_permission = inline.has_delete_permission(request, obj) else: # Disable all edit-permissions, and overide formset settings. has_add_permission = has_change_permission = has_delete_permission = False formset.extra = formset.max_num = 0 has_view_permission = inline.has_view_permission(request, obj) prepopulated = dict(inline.get_prepopulated_fields(request, obj)) inline_admin_formset = helpers.InlineAdminFormSet( inline, formset, fieldsets, prepopulated, readonly, model_admin=self, has_add_permission=has_add_permission, has_change_permission=has_change_permission, has_delete_permission=has_delete_permission, has_view_permission=has_view_permission, ) inline_admin_formsets.append(inline_admin_formset) return inline_admin_formsets def get_changeform_initial_data(self, request): """ Get the initial form data from the request's GET params. """ initial = dict(request.GET.items()) for k in initial: try: f = self.model._meta.get_field(k) except FieldDoesNotExist: continue # We have to special-case M2Ms as a list of comma-separated PKs. if isinstance(f, models.ManyToManyField): initial[k] = initial[k].split(",") return initial def _get_obj_does_not_exist_redirect(self, request, opts, object_id): """ Create a message informing the user that the object doesn't exist and return a redirect to the admin index page. """ msg = _('%(name)s with ID “%(key)s” doesn’t exist. Perhaps it was deleted?') % { 'name': opts.verbose_name, 'key': unquote(object_id), } self.message_user(request, msg, messages.WARNING) url = reverse('admin:index', current_app=self.admin_site.name) return HttpResponseRedirect(url) @csrf_protect_m def changeform_view(self, request, object_id=None, form_url='', extra_context=None): with transaction.atomic(using=router.db_for_write(self.model)): return self._changeform_view(request, object_id, form_url, extra_context) def _changeform_view(self, request, object_id, form_url, extra_context): to_field = request.POST.get(TO_FIELD_VAR, request.GET.get(TO_FIELD_VAR)) if to_field and not self.to_field_allowed(request, to_field): raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field) model = self.model opts = model._meta if request.method == 'POST' and '_saveasnew' in request.POST: object_id = None add = object_id is None if add: if not self.has_add_permission(request): raise PermissionDenied obj = None else: obj = self.get_object(request, unquote(object_id), to_field) if request.method == 'POST': if not self.has_change_permission(request, obj): raise PermissionDenied else: if not self.has_view_or_change_permission(request, obj): raise PermissionDenied if obj is None: return self._get_obj_does_not_exist_redirect(request, opts, object_id) fieldsets = self.get_fieldsets(request, obj) ModelForm = self.get_form( request, obj, change=not add, fields=flatten_fieldsets(fieldsets) ) if request.method == 'POST': form = ModelForm(request.POST, request.FILES, instance=obj) form_validated = form.is_valid() if form_validated: new_object = self.save_form(request, form, change=not add) else: new_object = form.instance formsets, inline_instances = self._create_formsets(request, new_object, change=not add) if all_valid(formsets) and form_validated: self.save_model(request, new_object, form, not add) self.save_related(request, form, formsets, not add) change_message = self.construct_change_message(request, form, formsets, add) if add: self.log_addition(request, new_object, change_message) return self.response_add(request, new_object) else: self.log_change(request, new_object, change_message) return self.response_change(request, new_object) else: form_validated = False else: if add: initial = self.get_changeform_initial_data(request) form = ModelForm(initial=initial) formsets, inline_instances = self._create_formsets(request, form.instance, change=False) else: form = ModelForm(instance=obj) formsets, inline_instances = self._create_formsets(request, obj, change=True) if not add and not self.has_change_permission(request, obj): readonly_fields = flatten_fieldsets(fieldsets) else: readonly_fields = self.get_readonly_fields(request, obj) adminForm = helpers.AdminForm( form, list(fieldsets), # Clear prepopulated fields on a view-only form to avoid a crash. self.get_prepopulated_fields(request, obj) if add or self.has_change_permission(request, obj) else {}, readonly_fields, model_admin=self) media = self.media + adminForm.media inline_formsets = self.get_inline_formsets(request, formsets, inline_instances, obj) for inline_formset in inline_formsets: media = media + inline_formset.media if add: title = _('Add %s') elif self.has_change_permission(request, obj): title = _('Change %s') else: title = _('View %s') context = { **self.admin_site.each_context(request), 'title': title % opts.verbose_name, 'subtitle': str(obj) if obj else None, 'adminform': adminForm, 'object_id': object_id, 'original': obj, 'is_popup': IS_POPUP_VAR in request.POST or IS_POPUP_VAR in request.GET, 'to_field': to_field, 'media': media, 'inline_admin_formsets': inline_formsets, 'errors': helpers.AdminErrorList(form, formsets), 'preserved_filters': self.get_preserved_filters(request), } # Hide the "Save" and "Save and continue" buttons if "Save as New" was # previously chosen to prevent the interface from getting confusing. if request.method == 'POST' and not form_validated and "_saveasnew" in request.POST: context['show_save'] = False context['show_save_and_continue'] = False # Use the change template instead of the add template. add = False context.update(extra_context or {}) return self.render_change_form(request, context, add=add, change=not add, obj=obj, form_url=form_url) def add_view(self, request, form_url='', extra_context=None): return self.changeform_view(request, None, form_url, extra_context) def change_view(self, request, object_id, form_url='', extra_context=None): return self.changeform_view(request, object_id, form_url, extra_context) def _get_edited_object_pks(self, request, prefix): """Return POST data values of list_editable primary keys.""" pk_pattern = re.compile( r'{}-\d+-{}$'.format(re.escape(prefix), self.model._meta.pk.name) ) return [value for key, value in request.POST.items() if pk_pattern.match(key)] def _get_list_editable_queryset(self, request, prefix): """ Based on POST data, return a queryset of the objects that were edited via list_editable. """ object_pks = self._get_edited_object_pks(request, prefix) queryset = self.get_queryset(request) validate = queryset.model._meta.pk.to_python try: for pk in object_pks: validate(pk) except ValidationError: # Disable the optimization if the POST data was tampered with. return queryset return queryset.filter(pk__in=object_pks) @csrf_protect_m def changelist_view(self, request, extra_context=None): """ The 'change list' admin view for this model. """ from django.contrib.admin.views.main import ERROR_FLAG opts = self.model._meta app_label = opts.app_label if not self.has_view_or_change_permission(request): raise PermissionDenied try: cl = self.get_changelist_instance(request) except IncorrectLookupParameters: # Wacky lookup parameters were given, so redirect to the main # changelist page, without parameters, and pass an 'invalid=1' # parameter via the query string. If wacky parameters were given # and the 'invalid=1' parameter was already in the query string, # something is screwed up with the database, so display an error # page. if ERROR_FLAG in request.GET: return SimpleTemplateResponse('admin/invalid_setup.html', { 'title': _('Database error'), }) return HttpResponseRedirect(request.path + '?' + ERROR_FLAG + '=1') # If the request was POSTed, this might be a bulk action or a bulk # edit. Try to look up an action or confirmation first, but if this # isn't an action the POST will fall through to the bulk edit check, # below. action_failed = False selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME) actions = self.get_actions(request) # Actions with no confirmation if (actions and request.method == 'POST' and 'index' in request.POST and '_save' not in request.POST): if selected: response = self.response_action(request, queryset=cl.get_queryset(request)) if response: return response else: action_failed = True else: msg = _("Items must be selected in order to perform " "actions on them. No items have been changed.") self.message_user(request, msg, messages.WARNING) action_failed = True # Actions with confirmation if (actions and request.method == 'POST' and helpers.ACTION_CHECKBOX_NAME in request.POST and 'index' not in request.POST and '_save' not in request.POST): if selected: response = self.response_action(request, queryset=cl.get_queryset(request)) if response: return response else: action_failed = True if action_failed: # Redirect back to the changelist page to avoid resubmitting the # form if the user refreshes the browser or uses the "No, take # me back" button on the action confirmation page. return HttpResponseRedirect(request.get_full_path()) # If we're allowing changelist editing, we need to construct a formset # for the changelist given all the fields to be edited. Then we'll # use the formset to validate/process POSTed data. formset = cl.formset = None # Handle POSTed bulk-edit data. if request.method == 'POST' and cl.list_editable and '_save' in request.POST: if not self.has_change_permission(request): raise PermissionDenied FormSet = self.get_changelist_formset(request) modified_objects = self._get_list_editable_queryset(request, FormSet.get_default_prefix()) formset = cl.formset = FormSet(request.POST, request.FILES, queryset=modified_objects) if formset.is_valid(): changecount = 0 for form in formset.forms: if form.has_changed(): obj = self.save_form(request, form, change=True) self.save_model(request, obj, form, change=True) self.save_related(request, form, formsets=[], change=True) change_msg = self.construct_change_message(request, form, None) self.log_change(request, obj, change_msg) changecount += 1 if changecount: msg = ngettext( "%(count)s %(name)s was changed successfully.", "%(count)s %(name)s were changed successfully.", changecount ) % { 'count': changecount, 'name': model_ngettext(opts, changecount), } self.message_user(request, msg, messages.SUCCESS) return HttpResponseRedirect(request.get_full_path()) # Handle GET -- construct a formset for display. elif cl.list_editable and self.has_change_permission(request): FormSet = self.get_changelist_formset(request) formset = cl.formset = FormSet(queryset=cl.result_list) # Build the list of media to be used by the formset. if formset: media = self.media + formset.media else: media = self.media # Build the action form and populate it with available actions. if actions: action_form = self.action_form(auto_id=None) action_form.fields['action'].choices = self.get_action_choices(request) media += action_form.media else: action_form = None selection_note_all = ngettext( '%(total_count)s selected', 'All %(total_count)s selected', cl.result_count ) context = { **self.admin_site.each_context(request), 'module_name': str(opts.verbose_name_plural), 'selection_note': _('0 of %(cnt)s selected') % {'cnt': len(cl.result_list)}, 'selection_note_all': selection_note_all % {'total_count': cl.result_count}, 'title': cl.title, 'subtitle': None, 'is_popup': cl.is_popup, 'to_field': cl.to_field, 'cl': cl, 'media': media, 'has_add_permission': self.has_add_permission(request), 'opts': cl.opts, 'action_form': action_form, 'actions_on_top': self.actions_on_top, 'actions_on_bottom': self.actions_on_bottom, 'actions_selection_counter': self.actions_selection_counter, 'preserved_filters': self.get_preserved_filters(request), **(extra_context or {}), } request.current_app = self.admin_site.name return TemplateResponse(request, self.change_list_template or [ 'admin/%s/%s/change_list.html' % (app_label, opts.model_name), 'admin/%s/change_list.html' % app_label, 'admin/change_list.html' ], context) def get_deleted_objects(self, objs, request): """ Hook for customizing the delete process for the delete view and the "delete selected" action. """ return get_deleted_objects(objs, request, self.admin_site) @csrf_protect_m def delete_view(self, request, object_id, extra_context=None): with transaction.atomic(using=router.db_for_write(self.model)): return self._delete_view(request, object_id, extra_context) def _delete_view(self, request, object_id, extra_context): "The 'delete' admin view for this model." opts = self.model._meta app_label = opts.app_label to_field = request.POST.get(TO_FIELD_VAR, request.GET.get(TO_FIELD_VAR)) if to_field and not self.to_field_allowed(request, to_field): raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field) obj = self.get_object(request, unquote(object_id), to_field) if not self.has_delete_permission(request, obj): raise PermissionDenied if obj is None: return self._get_obj_does_not_exist_redirect(request, opts, object_id) # Populate deleted_objects, a data structure of all related objects that # will also be deleted. deleted_objects, model_count, perms_needed, protected = self.get_deleted_objects([obj], request) if request.POST and not protected: # The user has confirmed the deletion. if perms_needed: raise PermissionDenied obj_display = str(obj) attr = str(to_field) if to_field else opts.pk.attname obj_id = obj.serializable_value(attr) self.log_deletion(request, obj, obj_display) self.delete_model(request, obj) return self.response_delete(request, obj_display, obj_id) object_name = str(opts.verbose_name) if perms_needed or protected: title = _("Cannot delete %(name)s") % {"name": object_name} else: title = _("Are you sure?") context = { **self.admin_site.each_context(request), 'title': title, 'subtitle': None, 'object_name': object_name, 'object': obj, 'deleted_objects': deleted_objects, 'model_count': dict(model_count).items(), 'perms_lacking': perms_needed, 'protected': protected, 'opts': opts, 'app_label': app_label, 'preserved_filters': self.get_preserved_filters(request), 'is_popup': IS_POPUP_VAR in request.POST or IS_POPUP_VAR in request.GET, 'to_field': to_field, **(extra_context or {}), } return self.render_delete_form(request, context) def history_view(self, request, object_id, extra_context=None): "The 'history' admin view for this model." from django.contrib.admin.models import LogEntry # First check if the user can see this history. model = self.model obj = self.get_object(request, unquote(object_id)) if obj is None: return self._get_obj_does_not_exist_redirect(request, model._meta, object_id) if not self.has_view_or_change_permission(request, obj): raise PermissionDenied # Then get the history for this object. opts = model._meta app_label = opts.app_label action_list = LogEntry.objects.filter( object_id=unquote(object_id), content_type=get_content_type_for_model(model) ).select_related().order_by('action_time') context = { **self.admin_site.each_context(request), 'title': _('Change history: %s') % obj, 'subtitle': None, 'action_list': action_list, 'module_name': str(capfirst(opts.verbose_name_plural)), 'object': obj, 'opts': opts, 'preserved_filters': self.get_preserved_filters(request), **(extra_context or {}), } request.current_app = self.admin_site.name return TemplateResponse(request, self.object_history_template or [ "admin/%s/%s/object_history.html" % (app_label, opts.model_name), "admin/%s/object_history.html" % app_label, "admin/object_history.html" ], context) def get_formset_kwargs(self, request, obj, inline, prefix): formset_params = { 'instance': obj, 'prefix': prefix, 'queryset': inline.get_queryset(request), } if request.method == 'POST': formset_params.update({ 'data': request.POST.copy(), 'files': request.FILES, 'save_as_new': '_saveasnew' in request.POST }) return formset_params def _create_formsets(self, request, obj, change): "Helper function to generate formsets for add/change_view." formsets = [] inline_instances = [] prefixes = {} get_formsets_args = [request] if change: get_formsets_args.append(obj) for FormSet, inline in self.get_formsets_with_inlines(*get_formsets_args): prefix = FormSet.get_default_prefix() prefixes[prefix] = prefixes.get(prefix, 0) + 1 if prefixes[prefix] != 1 or not prefix: prefix = "%s-%s" % (prefix, prefixes[prefix]) formset_params = self.get_formset_kwargs(request, obj, inline, prefix) formset = FormSet(**formset_params) def user_deleted_form(request, obj, formset, index): """Return whether or not the user deleted the form.""" return ( inline.has_delete_permission(request, obj) and '{}-{}-DELETE'.format(formset.prefix, index) in request.POST ) # Bypass validation of each view-only inline form (since the form's # data won't be in request.POST), unless the form was deleted. if not inline.has_change_permission(request, obj if change else None): for index, form in enumerate(formset.initial_forms): if user_deleted_form(request, obj, formset, index): continue form._errors = {} form.cleaned_data = form.initial formsets.append(formset) inline_instances.append(inline) return formsets, inline_instances class InlineModelAdmin(BaseModelAdmin): """ Options for inline editing of ``model`` instances. Provide ``fk_name`` to specify the attribute name of the ``ForeignKey`` from ``model`` to its parent. This is required if ``model`` has more than one ``ForeignKey`` to its parent. """ model = None fk_name = None formset = BaseInlineFormSet extra = 3 min_num = None max_num = None template = None verbose_name = None verbose_name_plural = None can_delete = True show_change_link = False checks_class = InlineModelAdminChecks classes = None def __init__(self, parent_model, admin_site): self.admin_site = admin_site self.parent_model = parent_model self.opts = self.model._meta self.has_registered_model = admin_site.is_registered(self.model) super().__init__() if self.verbose_name is None: self.verbose_name = self.model._meta.verbose_name if self.verbose_name_plural is None: self.verbose_name_plural = self.model._meta.verbose_name_plural @property def media(self): extra = '' if settings.DEBUG else '.min' js = ['vendor/jquery/jquery%s.js' % extra, 'jquery.init.js', 'inlines.js'] if self.filter_vertical or self.filter_horizontal: js.extend(['SelectBox.js', 'SelectFilter2.js']) if self.classes and 'collapse' in self.classes: js.append('collapse.js') return forms.Media(js=['admin/js/%s' % url for url in js]) def get_extra(self, request, obj=None, **kwargs): """Hook for customizing the number of extra inline forms.""" return self.extra def get_min_num(self, request, obj=None, **kwargs): """Hook for customizing the min number of inline forms.""" return self.min_num def get_max_num(self, request, obj=None, **kwargs): """Hook for customizing the max number of extra inline forms.""" return self.max_num def get_formset(self, request, obj=None, **kwargs): """Return a BaseInlineFormSet class for use in admin add/change views.""" if 'fields' in kwargs: fields = kwargs.pop('fields') else: fields = flatten_fieldsets(self.get_fieldsets(request, obj)) excluded = self.get_exclude(request, obj) exclude = [] if excluded is None else list(excluded) exclude.extend(self.get_readonly_fields(request, obj)) if excluded is None and hasattr(self.form, '_meta') and self.form._meta.exclude: # Take the custom ModelForm's Meta.exclude into account only if the # InlineModelAdmin doesn't define its own. exclude.extend(self.form._meta.exclude) # If exclude is an empty list we use None, since that's the actual # default. exclude = exclude or None can_delete = self.can_delete and self.has_delete_permission(request, obj) defaults = { 'form': self.form, 'formset': self.formset, 'fk_name': self.fk_name, 'fields': fields, 'exclude': exclude, 'formfield_callback': partial(self.formfield_for_dbfield, request=request), 'extra': self.get_extra(request, obj, **kwargs), 'min_num': self.get_min_num(request, obj, **kwargs), 'max_num': self.get_max_num(request, obj, **kwargs), 'can_delete': can_delete, **kwargs, } base_model_form = defaults['form'] can_change = self.has_change_permission(request, obj) if request else True can_add = self.has_add_permission(request, obj) if request else True class DeleteProtectedModelForm(base_model_form): def hand_clean_DELETE(self): """ We don't validate the 'DELETE' field itself because on templates it's not rendered using the field information, but just using a generic "deletion_field" of the InlineModelAdmin. """ if self.cleaned_data.get(DELETION_FIELD_NAME, False): using = router.db_for_write(self._meta.model) collector = NestedObjects(using=using) if self.instance._state.adding: return collector.collect([self.instance]) if collector.protected: objs = [] for p in collector.protected: objs.append( # Translators: Model verbose name and instance representation, # suitable to be an item in a list. _('%(class_name)s %(instance)s') % { 'class_name': p._meta.verbose_name, 'instance': p} ) params = { 'class_name': self._meta.model._meta.verbose_name, 'instance': self.instance, 'related_objects': get_text_list(objs, _('and')), } msg = _("Deleting %(class_name)s %(instance)s would require " "deleting the following protected related objects: " "%(related_objects)s") raise ValidationError(msg, code='deleting_protected', params=params) def is_valid(self): result = super().is_valid() self.hand_clean_DELETE() return result def has_changed(self): # Protect against unauthorized edits. if not can_change and not self.instance._state.adding: return False if not can_add and self.instance._state.adding: return False return super().has_changed() defaults['form'] = DeleteProtectedModelForm if defaults['fields'] is None and not modelform_defines_fields(defaults['form']): defaults['fields'] = forms.ALL_FIELDS return inlineformset_factory(self.parent_model, self.model, **defaults) def _get_form_for_get_fields(self, request, obj=None): return self.get_formset(request, obj, fields=None).form def get_queryset(self, request): queryset = super().get_queryset(request) if not self.has_view_or_change_permission(request): queryset = queryset.none() return queryset def _has_any_perms_for_target_model(self, request, perms): """ This method is called only when the ModelAdmin's model is for an ManyToManyField's implicit through model (if self.opts.auto_created). Return True if the user has any of the given permissions ('add', 'change', etc.) for the model that points to the through model. """ opts = self.opts # Find the target model of an auto-created many-to-many relationship. for field in opts.fields: if field.remote_field and field.remote_field.model != self.parent_model: opts = field.remote_field.model._meta break return any( request.user.has_perm('%s.%s' % (opts.app_label, get_permission_codename(perm, opts))) for perm in perms ) def has_add_permission(self, request, obj): if self.opts.auto_created: # Auto-created intermediate models don't have their own # permissions. The user needs to have the change permission for the # related model in order to be able to do anything with the # intermediate model. return self._has_any_perms_for_target_model(request, ['change']) return super().has_add_permission(request) def has_change_permission(self, request, obj=None): if self.opts.auto_created: # Same comment as has_add_permission(). return self._has_any_perms_for_target_model(request, ['change']) return super().has_change_permission(request) def has_delete_permission(self, request, obj=None): if self.opts.auto_created: # Same comment as has_add_permission(). return self._has_any_perms_for_target_model(request, ['change']) return super().has_delete_permission(request, obj) def has_view_permission(self, request, obj=None): if self.opts.auto_created: # Same comment as has_add_permission(). The 'change' permission # also implies the 'view' permission. return self._has_any_perms_for_target_model(request, ['view', 'change']) return super().has_view_permission(request) class StackedInline(InlineModelAdmin): template = 'admin/edit_inline/stacked.html' class TabularInline(InlineModelAdmin): template = 'admin/edit_inline/tabular.html'
023314e8cbbf3a982bf0513747157007db04f354d0f38ae8ff560ebd5e25fd6b
import re from functools import update_wrapper from weakref import WeakSet from django.apps import apps from django.conf import settings from django.contrib.admin import ModelAdmin, actions from django.contrib.admin.views.autocomplete import AutocompleteJsonView from django.contrib.auth import REDIRECT_FIELD_NAME from django.core.exceptions import ImproperlyConfigured from django.db.models.base import ModelBase from django.http import ( Http404, HttpResponsePermanentRedirect, HttpResponseRedirect, ) from django.template.response import TemplateResponse from django.urls import NoReverseMatch, Resolver404, resolve, reverse from django.utils.decorators import method_decorator from django.utils.functional import LazyObject from django.utils.module_loading import import_string from django.utils.text import capfirst from django.utils.translation import gettext as _, gettext_lazy from django.views.decorators.cache import never_cache from django.views.decorators.common import no_append_slash from django.views.decorators.csrf import csrf_protect from django.views.i18n import JavaScriptCatalog all_sites = WeakSet() class AlreadyRegistered(Exception): pass class NotRegistered(Exception): pass class AdminSite: """ An AdminSite object encapsulates an instance of the Django admin application, ready to be hooked in to your URLconf. Models are registered with the AdminSite using the register() method, and the get_urls() method can then be used to access Django view functions that present a full admin interface for the collection of registered models. """ # Text to put at the end of each page's <title>. site_title = gettext_lazy('Django site admin') # Text to put in each page's <h1>. site_header = gettext_lazy('Django administration') # Text to put at the top of the admin index page. index_title = gettext_lazy('Site administration') # URL for the "View site" link at the top of each admin page. site_url = '/' enable_nav_sidebar = True empty_value_display = '-' login_form = None index_template = None app_index_template = None login_template = None logout_template = None password_change_template = None password_change_done_template = None final_catch_all_view = True def __init__(self, name='admin'): self._registry = {} # model_class class -> admin_class instance self.name = name self._actions = {'delete_selected': actions.delete_selected} self._global_actions = self._actions.copy() all_sites.add(self) def check(self, app_configs): """ Run the system checks on all ModelAdmins, except if they aren't customized at all. """ if app_configs is None: app_configs = apps.get_app_configs() app_configs = set(app_configs) # Speed up lookups below errors = [] modeladmins = (o for o in self._registry.values() if o.__class__ is not ModelAdmin) for modeladmin in modeladmins: if modeladmin.model._meta.app_config in app_configs: errors.extend(modeladmin.check()) return errors def register(self, model_or_iterable, admin_class=None, **options): """ Register the given model(s) with the given admin class. The model(s) should be Model classes, not instances. If an admin class isn't given, use ModelAdmin (the default admin options). If keyword arguments are given -- e.g., list_display -- apply them as options to the admin class. If a model is already registered, raise AlreadyRegistered. If a model is abstract, raise ImproperlyConfigured. """ admin_class = admin_class or ModelAdmin if isinstance(model_or_iterable, ModelBase): model_or_iterable = [model_or_iterable] for model in model_or_iterable: if model._meta.abstract: raise ImproperlyConfigured( 'The model %s is abstract, so it cannot be registered with admin.' % model.__name__ ) if model in self._registry: registered_admin = str(self._registry[model]) msg = 'The model %s is already registered ' % model.__name__ if registered_admin.endswith('.ModelAdmin'): # Most likely registered without a ModelAdmin subclass. msg += 'in app %r.' % re.sub(r'\.ModelAdmin$', '', registered_admin) else: msg += 'with %r.' % registered_admin raise AlreadyRegistered(msg) # Ignore the registration if the model has been # swapped out. if not model._meta.swapped: # If we got **options then dynamically construct a subclass of # admin_class with those **options. if options: # For reasons I don't quite understand, without a __module__ # the created class appears to "live" in the wrong place, # which causes issues later on. options['__module__'] = __name__ admin_class = type("%sAdmin" % model.__name__, (admin_class,), options) # Instantiate the admin class to save in the registry self._registry[model] = admin_class(model, self) def unregister(self, model_or_iterable): """ Unregister the given model(s). If a model isn't already registered, raise NotRegistered. """ if isinstance(model_or_iterable, ModelBase): model_or_iterable = [model_or_iterable] for model in model_or_iterable: if model not in self._registry: raise NotRegistered('The model %s is not registered' % model.__name__) del self._registry[model] def is_registered(self, model): """ Check if a model class is registered with this `AdminSite`. """ return model in self._registry def add_action(self, action, name=None): """ Register an action to be available globally. """ name = name or action.__name__ self._actions[name] = action self._global_actions[name] = action def disable_action(self, name): """ Disable a globally-registered action. Raise KeyError for invalid names. """ del self._actions[name] def get_action(self, name): """ Explicitly get a registered global action whether it's enabled or not. Raise KeyError for invalid names. """ return self._global_actions[name] @property def actions(self): """ Get all the enabled actions as an iterable of (name, func). """ return self._actions.items() def has_permission(self, request): """ Return True if the given HttpRequest has permission to view *at least one* page in the admin site. """ return request.user.is_active and request.user.is_staff def admin_view(self, view, cacheable=False): """ Decorator to create an admin view attached to this ``AdminSite``. This wraps the view and provides permission checking by calling ``self.has_permission``. You'll want to use this from within ``AdminSite.get_urls()``: class MyAdminSite(AdminSite): def get_urls(self): from django.urls import path urls = super().get_urls() urls += [ path('my_view/', self.admin_view(some_view)) ] return urls By default, admin_views are marked non-cacheable using the ``never_cache`` decorator. If the view can be safely cached, set cacheable=True. """ def inner(request, *args, **kwargs): if not self.has_permission(request): if request.path == reverse('admin:logout', current_app=self.name): index_path = reverse('admin:index', current_app=self.name) return HttpResponseRedirect(index_path) # Inner import to prevent django.contrib.admin (app) from # importing django.contrib.auth.models.User (unrelated model). from django.contrib.auth.views import redirect_to_login return redirect_to_login( request.get_full_path(), reverse('admin:login', current_app=self.name) ) return view(request, *args, **kwargs) if not cacheable: inner = never_cache(inner) # We add csrf_protect here so this function can be used as a utility # function for any view, without having to repeat 'csrf_protect'. if not getattr(view, 'csrf_exempt', False): inner = csrf_protect(inner) return update_wrapper(inner, view) def get_urls(self): # Since this module gets imported in the application's root package, # it cannot import models from other applications at the module level, # and django.contrib.contenttypes.views imports ContentType. from django.contrib.contenttypes import views as contenttype_views from django.urls import include, path, re_path def wrap(view, cacheable=False): def wrapper(*args, **kwargs): return self.admin_view(view, cacheable)(*args, **kwargs) wrapper.admin_site = self return update_wrapper(wrapper, view) # Admin-site-wide views. urlpatterns = [ path('', wrap(self.index), name='index'), path('login/', self.login, name='login'), path('logout/', wrap(self.logout), name='logout'), path('password_change/', wrap(self.password_change, cacheable=True), name='password_change'), path( 'password_change/done/', wrap(self.password_change_done, cacheable=True), name='password_change_done', ), path('autocomplete/', wrap(self.autocomplete_view), name='autocomplete'), path('jsi18n/', wrap(self.i18n_javascript, cacheable=True), name='jsi18n'), path( 'r/<int:content_type_id>/<path:object_id>/', wrap(contenttype_views.shortcut), name='view_on_site', ), ] # Add in each model's views, and create a list of valid URLS for the # app_index valid_app_labels = [] for model, model_admin in self._registry.items(): urlpatterns += [ path('%s/%s/' % (model._meta.app_label, model._meta.model_name), include(model_admin.urls)), ] if model._meta.app_label not in valid_app_labels: valid_app_labels.append(model._meta.app_label) # If there were ModelAdmins registered, we should have a list of app # labels for which we need to allow access to the app_index view, if valid_app_labels: regex = r'^(?P<app_label>' + '|'.join(valid_app_labels) + ')/$' urlpatterns += [ re_path(regex, wrap(self.app_index), name='app_list'), ] if self.final_catch_all_view: urlpatterns.append(re_path(r'(?P<url>.*)$', wrap(self.catch_all_view))) return urlpatterns @property def urls(self): return self.get_urls(), 'admin', self.name def each_context(self, request): """ Return a dictionary of variables to put in the template context for *every* page in the admin site. For sites running on a subpath, use the SCRIPT_NAME value if site_url hasn't been customized. """ script_name = request.META['SCRIPT_NAME'] site_url = script_name if self.site_url == '/' and script_name else self.site_url return { 'site_title': self.site_title, 'site_header': self.site_header, 'site_url': site_url, 'has_permission': self.has_permission(request), 'available_apps': self.get_app_list(request), 'is_popup': False, 'is_nav_sidebar_enabled': self.enable_nav_sidebar, } def password_change(self, request, extra_context=None): """ Handle the "change password" task -- both form display and validation. """ from django.contrib.admin.forms import AdminPasswordChangeForm from django.contrib.auth.views import PasswordChangeView url = reverse('admin:password_change_done', current_app=self.name) defaults = { 'form_class': AdminPasswordChangeForm, 'success_url': url, 'extra_context': {**self.each_context(request), **(extra_context or {})}, } if self.password_change_template is not None: defaults['template_name'] = self.password_change_template request.current_app = self.name return PasswordChangeView.as_view(**defaults)(request) def password_change_done(self, request, extra_context=None): """ Display the "success" page after a password change. """ from django.contrib.auth.views import PasswordChangeDoneView defaults = { 'extra_context': {**self.each_context(request), **(extra_context or {})}, } if self.password_change_done_template is not None: defaults['template_name'] = self.password_change_done_template request.current_app = self.name return PasswordChangeDoneView.as_view(**defaults)(request) def i18n_javascript(self, request, extra_context=None): """ Display the i18n JavaScript that the Django admin requires. `extra_context` is unused but present for consistency with the other admin views. """ return JavaScriptCatalog.as_view(packages=['django.contrib.admin'])(request) def logout(self, request, extra_context=None): """ Log out the user for the given HttpRequest. This should *not* assume the user is already logged in. """ from django.contrib.auth.views import LogoutView defaults = { 'extra_context': { **self.each_context(request), # Since the user isn't logged out at this point, the value of # has_permission must be overridden. 'has_permission': False, **(extra_context or {}) }, } if self.logout_template is not None: defaults['template_name'] = self.logout_template request.current_app = self.name return LogoutView.as_view(**defaults)(request) @method_decorator(never_cache) def login(self, request, extra_context=None): """ Display the login form for the given HttpRequest. """ if request.method == 'GET' and self.has_permission(request): # Already logged-in, redirect to admin index index_path = reverse('admin:index', current_app=self.name) return HttpResponseRedirect(index_path) # Since this module gets imported in the application's root package, # it cannot import models from other applications at the module level, # and django.contrib.admin.forms eventually imports User. from django.contrib.admin.forms import AdminAuthenticationForm from django.contrib.auth.views import LoginView context = { **self.each_context(request), 'title': _('Log in'), 'app_path': request.get_full_path(), 'username': request.user.get_username(), } if (REDIRECT_FIELD_NAME not in request.GET and REDIRECT_FIELD_NAME not in request.POST): context[REDIRECT_FIELD_NAME] = reverse('admin:index', current_app=self.name) context.update(extra_context or {}) defaults = { 'extra_context': context, 'authentication_form': self.login_form or AdminAuthenticationForm, 'template_name': self.login_template or 'admin/login.html', } request.current_app = self.name return LoginView.as_view(**defaults)(request) def autocomplete_view(self, request): return AutocompleteJsonView.as_view(admin_site=self)(request) @no_append_slash def catch_all_view(self, request, url): if settings.APPEND_SLASH and not url.endswith('/'): urlconf = getattr(request, 'urlconf', None) path = '%s/' % request.path_info try: match = resolve(path, urlconf) except Resolver404: pass else: if getattr(match.func, 'should_append_slash', True): return HttpResponsePermanentRedirect(path) raise Http404 def _build_app_dict(self, request, label=None): """ Build the app dictionary. The optional `label` parameter filters models of a specific app. """ app_dict = {} if label: models = { m: m_a for m, m_a in self._registry.items() if m._meta.app_label == label } else: models = self._registry for model, model_admin in models.items(): app_label = model._meta.app_label has_module_perms = model_admin.has_module_permission(request) if not has_module_perms: continue perms = model_admin.get_model_perms(request) # Check whether user has any perm for this module. # If so, add the module to the model_list. if True not in perms.values(): continue info = (app_label, model._meta.model_name) model_dict = { 'name': capfirst(model._meta.verbose_name_plural), 'object_name': model._meta.object_name, 'perms': perms, 'admin_url': None, 'add_url': None, } if perms.get('change') or perms.get('view'): model_dict['view_only'] = not perms.get('change') try: model_dict['admin_url'] = reverse('admin:%s_%s_changelist' % info, current_app=self.name) except NoReverseMatch: pass if perms.get('add'): try: model_dict['add_url'] = reverse('admin:%s_%s_add' % info, current_app=self.name) except NoReverseMatch: pass if app_label in app_dict: app_dict[app_label]['models'].append(model_dict) else: app_dict[app_label] = { 'name': apps.get_app_config(app_label).verbose_name, 'app_label': app_label, 'app_url': reverse( 'admin:app_list', kwargs={'app_label': app_label}, current_app=self.name, ), 'has_module_perms': has_module_perms, 'models': [model_dict], } if label: return app_dict.get(label) return app_dict def get_app_list(self, request): """ Return a sorted list of all the installed apps that have been registered in this site. """ app_dict = self._build_app_dict(request) # Sort the apps alphabetically. app_list = sorted(app_dict.values(), key=lambda x: x['name'].lower()) # Sort the models alphabetically within each app. for app in app_list: app['models'].sort(key=lambda x: x['name']) return app_list def index(self, request, extra_context=None): """ Display the main admin index page, which lists all of the installed apps that have been registered in this site. """ app_list = self.get_app_list(request) context = { **self.each_context(request), 'title': self.index_title, 'subtitle': None, 'app_list': app_list, **(extra_context or {}), } request.current_app = self.name return TemplateResponse(request, self.index_template or 'admin/index.html', context) def app_index(self, request, app_label, extra_context=None): app_dict = self._build_app_dict(request, app_label) if not app_dict: raise Http404('The requested admin page does not exist.') # Sort the models alphabetically within each app. app_dict['models'].sort(key=lambda x: x['name']) context = { **self.each_context(request), 'title': _('%(app)s administration') % {'app': app_dict['name']}, 'subtitle': None, 'app_list': [app_dict], 'app_label': app_label, **(extra_context or {}), } request.current_app = self.name return TemplateResponse(request, self.app_index_template or [ 'admin/%s/app_index.html' % app_label, 'admin/app_index.html' ], context) class DefaultAdminSite(LazyObject): def _setup(self): AdminSiteClass = import_string(apps.get_app_config('admin').default_site) self._wrapped = AdminSiteClass() # This global object represents the default admin site, for the common case. # You can provide your own AdminSite using the (Simple)AdminConfig.default_site # attribute. You can also instantiate AdminSite in your own code to create a # custom admin site. site = DefaultAdminSite()
4ffb6ae8d55877127a7e3684ff725f6371bf8f0103f2072992475ebaab09603f
from datetime import datetime, timedelta from django import forms from django.conf import settings from django.contrib import messages from django.contrib.admin import FieldListFilter from django.contrib.admin.exceptions import ( DisallowedModelAdminLookup, DisallowedModelAdminToField, ) from django.contrib.admin.options import ( IS_POPUP_VAR, TO_FIELD_VAR, IncorrectLookupParameters, ) from django.contrib.admin.utils import ( get_fields_from_path, lookup_needs_distinct, prepare_lookup_value, quote, ) from django.core.exceptions import ( FieldDoesNotExist, ImproperlyConfigured, SuspiciousOperation, ) from django.core.paginator import InvalidPage from django.db.models import Exists, F, Field, ManyToOneRel, OrderBy, OuterRef from django.db.models.expressions import Combinable from django.urls import reverse from django.utils.http import urlencode from django.utils.timezone import make_aware from django.utils.translation import gettext # Changelist settings ALL_VAR = 'all' ORDER_VAR = 'o' ORDER_TYPE_VAR = 'ot' PAGE_VAR = 'p' SEARCH_VAR = 'q' ERROR_FLAG = 'e' IGNORED_PARAMS = ( ALL_VAR, ORDER_VAR, ORDER_TYPE_VAR, SEARCH_VAR, IS_POPUP_VAR, TO_FIELD_VAR) class ChangeListSearchForm(forms.Form): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # Populate "fields" dynamically because SEARCH_VAR is a variable: self.fields = { SEARCH_VAR: forms.CharField(required=False, strip=False), } class ChangeList: search_form_class = ChangeListSearchForm def __init__(self, request, model, list_display, list_display_links, list_filter, date_hierarchy, search_fields, list_select_related, list_per_page, list_max_show_all, list_editable, model_admin, sortable_by): self.model = model self.opts = model._meta self.lookup_opts = self.opts self.root_queryset = model_admin.get_queryset(request) self.list_display = list_display self.list_display_links = list_display_links self.list_filter = list_filter self.has_filters = None self.has_active_filters = None self.clear_all_filters_qs = None self.date_hierarchy = date_hierarchy self.search_fields = search_fields self.list_select_related = list_select_related self.list_per_page = list_per_page self.list_max_show_all = list_max_show_all self.model_admin = model_admin self.preserved_filters = model_admin.get_preserved_filters(request) self.sortable_by = sortable_by # Get search parameters from the query string. _search_form = self.search_form_class(request.GET) if not _search_form.is_valid(): for error in _search_form.errors.values(): messages.error(request, ', '.join(error)) self.query = _search_form.cleaned_data.get(SEARCH_VAR) or '' try: self.page_num = int(request.GET.get(PAGE_VAR, 1)) except ValueError: self.page_num = 1 self.show_all = ALL_VAR in request.GET self.is_popup = IS_POPUP_VAR in request.GET to_field = request.GET.get(TO_FIELD_VAR) if to_field and not model_admin.to_field_allowed(request, to_field): raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field) self.to_field = to_field self.params = dict(request.GET.items()) if PAGE_VAR in self.params: del self.params[PAGE_VAR] if ERROR_FLAG in self.params: del self.params[ERROR_FLAG] if self.is_popup: self.list_editable = () else: self.list_editable = list_editable self.queryset = self.get_queryset(request) self.get_results(request) if self.is_popup: title = gettext('Select %s') elif self.model_admin.has_change_permission(request): title = gettext('Select %s to change') else: title = gettext('Select %s to view') self.title = title % self.opts.verbose_name self.pk_attname = self.lookup_opts.pk.attname def get_filters_params(self, params=None): """ Return all params except IGNORED_PARAMS. """ params = params or self.params lookup_params = params.copy() # a dictionary of the query string # Remove all the parameters that are globally and systematically # ignored. for ignored in IGNORED_PARAMS: if ignored in lookup_params: del lookup_params[ignored] return lookup_params def get_filters(self, request): lookup_params = self.get_filters_params() may_have_duplicates = False has_active_filters = False for key, value in lookup_params.items(): if not self.model_admin.lookup_allowed(key, value): raise DisallowedModelAdminLookup("Filtering by %s not allowed" % key) filter_specs = [] for list_filter in self.list_filter: lookup_params_count = len(lookup_params) if callable(list_filter): # This is simply a custom list filter class. spec = list_filter(request, lookup_params, self.model, self.model_admin) else: field_path = None if isinstance(list_filter, (tuple, list)): # This is a custom FieldListFilter class for a given field. field, field_list_filter_class = list_filter else: # This is simply a field name, so use the default # FieldListFilter class that has been registered for the # type of the given field. field, field_list_filter_class = list_filter, FieldListFilter.create if not isinstance(field, Field): field_path = field field = get_fields_from_path(self.model, field_path)[-1] spec = field_list_filter_class( field, request, lookup_params, self.model, self.model_admin, field_path=field_path, ) # field_list_filter_class removes any lookup_params it # processes. If that happened, check if distinct() is needed to # remove duplicate results. if lookup_params_count > len(lookup_params): may_have_duplicates |= lookup_needs_distinct(self.lookup_opts, field_path) if spec and spec.has_output(): filter_specs.append(spec) if lookup_params_count > len(lookup_params): has_active_filters = True if self.date_hierarchy: # Create bounded lookup parameters so that the query is more # efficient. year = lookup_params.pop('%s__year' % self.date_hierarchy, None) if year is not None: month = lookup_params.pop('%s__month' % self.date_hierarchy, None) day = lookup_params.pop('%s__day' % self.date_hierarchy, None) try: from_date = datetime( int(year), int(month if month is not None else 1), int(day if day is not None else 1), ) except ValueError as e: raise IncorrectLookupParameters(e) from e if day: to_date = from_date + timedelta(days=1) elif month: # In this branch, from_date will always be the first of a # month, so advancing 32 days gives the next month. to_date = (from_date + timedelta(days=32)).replace(day=1) else: to_date = from_date.replace(year=from_date.year + 1) if settings.USE_TZ: from_date = make_aware(from_date) to_date = make_aware(to_date) lookup_params.update({ '%s__gte' % self.date_hierarchy: from_date, '%s__lt' % self.date_hierarchy: to_date, }) # At this point, all the parameters used by the various ListFilters # have been removed from lookup_params, which now only contains other # parameters passed via the query string. We now loop through the # remaining parameters both to ensure that all the parameters are valid # fields and to determine if at least one of them needs distinct(). If # the lookup parameters aren't real fields, then bail out. try: for key, value in lookup_params.items(): lookup_params[key] = prepare_lookup_value(key, value) may_have_duplicates |= lookup_needs_distinct(self.lookup_opts, key) return ( filter_specs, bool(filter_specs), lookup_params, may_have_duplicates, has_active_filters, ) except FieldDoesNotExist as e: raise IncorrectLookupParameters(e) from e def get_query_string(self, new_params=None, remove=None): if new_params is None: new_params = {} if remove is None: remove = [] p = self.params.copy() for r in remove: for k in list(p): if k.startswith(r): del p[k] for k, v in new_params.items(): if v is None: if k in p: del p[k] else: p[k] = v return '?%s' % urlencode(sorted(p.items())) def get_results(self, request): paginator = self.model_admin.get_paginator(request, self.queryset, self.list_per_page) # Get the number of objects, with admin filters applied. result_count = paginator.count # Get the total number of objects, with no admin filters applied. if self.model_admin.show_full_result_count: full_result_count = self.root_queryset.count() else: full_result_count = None can_show_all = result_count <= self.list_max_show_all multi_page = result_count > self.list_per_page # Get the list of objects to display on this page. if (self.show_all and can_show_all) or not multi_page: result_list = self.queryset._clone() else: try: result_list = paginator.page(self.page_num).object_list except InvalidPage: raise IncorrectLookupParameters self.result_count = result_count self.show_full_result_count = self.model_admin.show_full_result_count # Admin actions are shown if there is at least one entry # or if entries are not counted because show_full_result_count is disabled self.show_admin_actions = not self.show_full_result_count or bool(full_result_count) self.full_result_count = full_result_count self.result_list = result_list self.can_show_all = can_show_all self.multi_page = multi_page self.paginator = paginator def _get_default_ordering(self): ordering = [] if self.model_admin.ordering: ordering = self.model_admin.ordering elif self.lookup_opts.ordering: ordering = self.lookup_opts.ordering return ordering def get_ordering_field(self, field_name): """ Return the proper model field name corresponding to the given field_name to use for ordering. field_name may either be the name of a proper model field or the name of a method (on the admin or model) or a callable with the 'admin_order_field' attribute. Return None if no proper model field name can be matched. """ try: field = self.lookup_opts.get_field(field_name) return field.name except FieldDoesNotExist: # See whether field_name is a name of a non-field # that allows sorting. if callable(field_name): attr = field_name elif hasattr(self.model_admin, field_name): attr = getattr(self.model_admin, field_name) else: attr = getattr(self.model, field_name) if isinstance(attr, property) and hasattr(attr, 'fget'): attr = attr.fget return getattr(attr, 'admin_order_field', None) def get_ordering(self, request, queryset): """ Return the list of ordering fields for the change list. First check the get_ordering() method in model admin, then check the object's default ordering. Then, any manually-specified ordering from the query string overrides anything. Finally, a deterministic order is guaranteed by calling _get_deterministic_ordering() with the constructed ordering. """ params = self.params ordering = list(self.model_admin.get_ordering(request) or self._get_default_ordering()) if ORDER_VAR in params: # Clear ordering and used params ordering = [] order_params = params[ORDER_VAR].split('.') for p in order_params: try: none, pfx, idx = p.rpartition('-') field_name = self.list_display[int(idx)] order_field = self.get_ordering_field(field_name) if not order_field: continue # No 'admin_order_field', skip it if isinstance(order_field, OrderBy): if pfx == '-': order_field = order_field.copy() order_field.reverse_ordering() ordering.append(order_field) elif hasattr(order_field, 'resolve_expression'): # order_field is an expression. ordering.append(order_field.desc() if pfx == '-' else order_field.asc()) # reverse order if order_field has already "-" as prefix elif order_field.startswith('-') and pfx == '-': ordering.append(order_field[1:]) else: ordering.append(pfx + order_field) except (IndexError, ValueError): continue # Invalid ordering specified, skip it. # Add the given query's ordering fields, if any. ordering.extend(queryset.query.order_by) return self._get_deterministic_ordering(ordering) def _get_deterministic_ordering(self, ordering): """ Ensure a deterministic order across all database backends. Search for a single field or unique together set of fields providing a total ordering. If these are missing, augment the ordering with a descendant primary key. """ ordering = list(ordering) ordering_fields = set() total_ordering_fields = {'pk'} | { field.attname for field in self.lookup_opts.fields if field.unique and not field.null } for part in ordering: # Search for single field providing a total ordering. field_name = None if isinstance(part, str): field_name = part.lstrip('-') elif isinstance(part, F): field_name = part.name elif isinstance(part, OrderBy) and isinstance(part.expression, F): field_name = part.expression.name if field_name: # Normalize attname references by using get_field(). try: field = self.lookup_opts.get_field(field_name) except FieldDoesNotExist: # Could be "?" for random ordering or a related field # lookup. Skip this part of introspection for now. continue # Ordering by a related field name orders by the referenced # model's ordering. Skip this part of introspection for now. if field.remote_field and field_name == field.name: continue if field.attname in total_ordering_fields: break ordering_fields.add(field.attname) else: # No single total ordering field, try unique_together and total # unique constraints. constraint_field_names = ( *self.lookup_opts.unique_together, *( constraint.fields for constraint in self.lookup_opts.total_unique_constraints ), ) for field_names in constraint_field_names: # Normalize attname references by using get_field(). fields = [self.lookup_opts.get_field(field_name) for field_name in field_names] # Composite unique constraints containing a nullable column # cannot ensure total ordering. if any(field.null for field in fields): continue if ordering_fields.issuperset(field.attname for field in fields): break else: # If no set of unique fields is present in the ordering, rely # on the primary key to provide total ordering. ordering.append('-pk') return ordering def get_ordering_field_columns(self): """ Return a dictionary of ordering field column numbers and asc/desc. """ # We must cope with more than one column having the same underlying sort # field, so we base things on column numbers. ordering = self._get_default_ordering() ordering_fields = {} if ORDER_VAR not in self.params: # for ordering specified on ModelAdmin or model Meta, we don't know # the right column numbers absolutely, because there might be more # than one column associated with that ordering, so we guess. for field in ordering: if isinstance(field, (Combinable, OrderBy)): if not isinstance(field, OrderBy): field = field.asc() if isinstance(field.expression, F): order_type = 'desc' if field.descending else 'asc' field = field.expression.name else: continue elif field.startswith('-'): field = field[1:] order_type = 'desc' else: order_type = 'asc' for index, attr in enumerate(self.list_display): if self.get_ordering_field(attr) == field: ordering_fields[index] = order_type break else: for p in self.params[ORDER_VAR].split('.'): none, pfx, idx = p.rpartition('-') try: idx = int(idx) except ValueError: continue # skip it ordering_fields[idx] = 'desc' if pfx == '-' else 'asc' return ordering_fields def get_queryset(self, request): # First, we collect all the declared list filters. ( self.filter_specs, self.has_filters, remaining_lookup_params, filters_may_have_duplicates, self.has_active_filters, ) = self.get_filters(request) # Then, we let every list filter modify the queryset to its liking. qs = self.root_queryset for filter_spec in self.filter_specs: new_qs = filter_spec.queryset(request, qs) if new_qs is not None: qs = new_qs try: # Finally, we apply the remaining lookup parameters from the query # string (i.e. those that haven't already been processed by the # filters). qs = qs.filter(**remaining_lookup_params) except (SuspiciousOperation, ImproperlyConfigured): # Allow certain types of errors to be re-raised as-is so that the # caller can treat them in a special way. raise except Exception as e: # Every other error is caught with a naked except, because we don't # have any other way of validating lookup parameters. They might be # invalid if the keyword arguments are incorrect, or if the values # are not in the correct type, so we might get FieldError, # ValueError, ValidationError, or ?. raise IncorrectLookupParameters(e) # Apply search results qs, search_may_have_duplicates = self.model_admin.get_search_results( request, qs, self.query, ) # Set query string for clearing all filters. self.clear_all_filters_qs = self.get_query_string( new_params=remaining_lookup_params, remove=self.get_filters_params(), ) # Remove duplicates from results, if necessary if filters_may_have_duplicates | search_may_have_duplicates: qs = qs.filter(pk=OuterRef('pk')) qs = self.root_queryset.filter(Exists(qs)) # Set ordering. ordering = self.get_ordering(request, qs) qs = qs.order_by(*ordering) if not qs.query.select_related: qs = self.apply_select_related(qs) return qs def apply_select_related(self, qs): if self.list_select_related is True: return qs.select_related() if self.list_select_related is False: if self.has_related_field_in_list_display(): return qs.select_related() if self.list_select_related: return qs.select_related(*self.list_select_related) return qs def has_related_field_in_list_display(self): for field_name in self.list_display: try: field = self.lookup_opts.get_field(field_name) except FieldDoesNotExist: pass else: if isinstance(field.remote_field, ManyToOneRel): # <FK>_id field names don't require a join. if field_name != field.get_attname(): return True return False def url_for_result(self, result): pk = getattr(result, self.pk_attname) return reverse('admin:%s_%s_change' % (self.opts.app_label, self.opts.model_name), args=(quote(pk),), current_app=self.model_admin.admin_site.name)
4456b957ecd7419b41bf689154773b1d274d8a6d745bb2b5b2fff22aeb871138
from django.core.exceptions import FieldDoesNotExist from django.db import ( IntegrityError, connection, migrations, models, transaction, ) from django.db.migrations.migration import Migration from django.db.migrations.operations.fields import FieldOperation from django.db.migrations.state import ModelState, ProjectState from django.db.models.functions import Abs from django.db.transaction import atomic from django.test import SimpleTestCase, override_settings, skipUnlessDBFeature from .models import FoodManager, FoodQuerySet, UnicodeModel from .test_base import OperationTestBase class Mixin: pass class OperationTests(OperationTestBase): """ Tests running the operations and making sure they do what they say they do. Each test looks at their state changing, and then their database operation - both forwards and backwards. """ def test_create_model(self): """ Tests the CreateModel operation. Most other tests use this operation as part of setup, so check failures here first. """ operation = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=1)), ], ) self.assertEqual(operation.describe(), "Create model Pony") self.assertEqual(operation.migration_name_fragment, 'pony') # Test the state alteration project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crmo", new_state) self.assertEqual(new_state.models["test_crmo", "pony"].name, "Pony") self.assertEqual(len(new_state.models["test_crmo", "pony"].fields), 2) # Test the database alteration self.assertTableNotExists("test_crmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crmo", editor, project_state, new_state) self.assertTableExists("test_crmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmo", editor, new_state, project_state) self.assertTableNotExists("test_crmo_pony") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "CreateModel") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["fields", "name"]) # And default manager not in set operation = migrations.CreateModel("Foo", fields=[], managers=[("objects", models.Manager())]) definition = operation.deconstruct() self.assertNotIn('managers', definition[2]) def test_create_model_with_duplicate_field_name(self): with self.assertRaisesMessage(ValueError, 'Found duplicate value pink in CreateModel fields argument.'): migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.TextField()), ("pink", models.IntegerField(default=1)), ], ) def test_create_model_with_duplicate_base(self): message = 'Found duplicate value test_crmo.pony in CreateModel bases argument.' with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=("test_crmo.Pony", "test_crmo.Pony",), ) with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=("test_crmo.Pony", "test_crmo.pony",), ) message = 'Found duplicate value migrations.unicodemodel in CreateModel bases argument.' with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(UnicodeModel, UnicodeModel,), ) with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(UnicodeModel, 'migrations.unicodemodel',), ) with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(UnicodeModel, 'migrations.UnicodeModel',), ) message = "Found duplicate value <class 'django.db.models.base.Model'> in CreateModel bases argument." with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(models.Model, models.Model,), ) message = "Found duplicate value <class 'migrations.test_operations.Mixin'> in CreateModel bases argument." with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(Mixin, Mixin,), ) def test_create_model_with_duplicate_manager_name(self): with self.assertRaisesMessage(ValueError, 'Found duplicate value objects in CreateModel managers argument.'): migrations.CreateModel( "Pony", fields=[], managers=[ ("objects", models.Manager()), ("objects", models.Manager()), ], ) def test_create_model_with_unique_after(self): """ Tests the CreateModel operation directly followed by an AlterUniqueTogether (bug #22844 - sqlite remake issues) """ operation1 = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=1)), ], ) operation2 = migrations.CreateModel( "Rider", [ ("id", models.AutoField(primary_key=True)), ("number", models.IntegerField(default=1)), ("pony", models.ForeignKey("test_crmoua.Pony", models.CASCADE)), ], ) operation3 = migrations.AlterUniqueTogether( "Rider", [ ("number", "pony"), ], ) # Test the database alteration project_state = ProjectState() self.assertTableNotExists("test_crmoua_pony") self.assertTableNotExists("test_crmoua_rider") with connection.schema_editor() as editor: new_state = project_state.clone() operation1.state_forwards("test_crmoua", new_state) operation1.database_forwards("test_crmoua", editor, project_state, new_state) project_state, new_state = new_state, new_state.clone() operation2.state_forwards("test_crmoua", new_state) operation2.database_forwards("test_crmoua", editor, project_state, new_state) project_state, new_state = new_state, new_state.clone() operation3.state_forwards("test_crmoua", new_state) operation3.database_forwards("test_crmoua", editor, project_state, new_state) self.assertTableExists("test_crmoua_pony") self.assertTableExists("test_crmoua_rider") def test_create_model_m2m(self): """ Test the creation of a model with a ManyToMany field and the auto-created "through" model. """ project_state = self.set_up_test_model("test_crmomm") operation = migrations.CreateModel( "Stable", [ ("id", models.AutoField(primary_key=True)), ("ponies", models.ManyToManyField("Pony", related_name="stables")) ] ) # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_crmomm", new_state) # Test the database alteration self.assertTableNotExists("test_crmomm_stable_ponies") with connection.schema_editor() as editor: operation.database_forwards("test_crmomm", editor, project_state, new_state) self.assertTableExists("test_crmomm_stable") self.assertTableExists("test_crmomm_stable_ponies") self.assertColumnNotExists("test_crmomm_stable", "ponies") # Make sure the M2M field actually works with atomic(): Pony = new_state.apps.get_model("test_crmomm", "Pony") Stable = new_state.apps.get_model("test_crmomm", "Stable") stable = Stable.objects.create() p1 = Pony.objects.create(pink=False, weight=4.55) p2 = Pony.objects.create(pink=True, weight=5.43) stable.ponies.add(p1, p2) self.assertEqual(stable.ponies.count(), 2) stable.ponies.all().delete() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmomm", editor, new_state, project_state) self.assertTableNotExists("test_crmomm_stable") self.assertTableNotExists("test_crmomm_stable_ponies") def test_create_model_inheritance(self): """ Tests the CreateModel operation on a multi-table inheritance setup. """ project_state = self.set_up_test_model("test_crmoih") # Test the state alteration operation = migrations.CreateModel( "ShetlandPony", [ ('pony_ptr', models.OneToOneField( 'test_crmoih.Pony', models.CASCADE, auto_created=True, primary_key=True, to_field='id', serialize=False, )), ("cuteness", models.IntegerField(default=1)), ], ) new_state = project_state.clone() operation.state_forwards("test_crmoih", new_state) self.assertIn(("test_crmoih", "shetlandpony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crmoih_shetlandpony") with connection.schema_editor() as editor: operation.database_forwards("test_crmoih", editor, project_state, new_state) self.assertTableExists("test_crmoih_shetlandpony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmoih", editor, new_state, project_state) self.assertTableNotExists("test_crmoih_shetlandpony") def test_create_proxy_model(self): """ CreateModel ignores proxy models. """ project_state = self.set_up_test_model("test_crprmo") # Test the state alteration operation = migrations.CreateModel( "ProxyPony", [], options={"proxy": True}, bases=("test_crprmo.Pony",), ) self.assertEqual(operation.describe(), "Create proxy model ProxyPony") new_state = project_state.clone() operation.state_forwards("test_crprmo", new_state) self.assertIn(("test_crprmo", "proxypony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crprmo", editor, project_state, new_state) self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crprmo", editor, new_state, project_state) self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "CreateModel") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["bases", "fields", "name", "options"]) def test_create_unmanaged_model(self): """ CreateModel ignores unmanaged models. """ project_state = self.set_up_test_model("test_crummo") # Test the state alteration operation = migrations.CreateModel( "UnmanagedPony", [], options={"proxy": True}, bases=("test_crummo.Pony",), ) self.assertEqual(operation.describe(), "Create proxy model UnmanagedPony") new_state = project_state.clone() operation.state_forwards("test_crummo", new_state) self.assertIn(("test_crummo", "unmanagedpony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crummo_unmanagedpony") self.assertTableExists("test_crummo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crummo", editor, project_state, new_state) self.assertTableNotExists("test_crummo_unmanagedpony") self.assertTableExists("test_crummo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crummo", editor, new_state, project_state) self.assertTableNotExists("test_crummo_unmanagedpony") self.assertTableExists("test_crummo_pony") @skipUnlessDBFeature('supports_table_check_constraints') def test_create_model_with_constraint(self): where = models.Q(pink__gt=2) check_constraint = models.CheckConstraint(check=where, name='test_constraint_pony_pink_gt_2') operation = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=3)), ], options={'constraints': [check_constraint]}, ) # Test the state alteration project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crmo", new_state) self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1) # Test database alteration self.assertTableNotExists("test_crmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crmo", editor, project_state, new_state) self.assertTableExists("test_crmo_pony") with connection.cursor() as cursor: with self.assertRaises(IntegrityError): cursor.execute("INSERT INTO test_crmo_pony (id, pink) VALUES (1, 1)") # Test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmo", editor, new_state, project_state) self.assertTableNotExists("test_crmo_pony") # Test deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "CreateModel") self.assertEqual(definition[1], []) self.assertEqual(definition[2]['options']['constraints'], [check_constraint]) def test_create_model_with_partial_unique_constraint(self): partial_unique_constraint = models.UniqueConstraint( fields=['pink'], condition=models.Q(weight__gt=5), name='test_constraint_pony_pink_for_weight_gt_5_uniq', ) operation = migrations.CreateModel( 'Pony', [ ('id', models.AutoField(primary_key=True)), ('pink', models.IntegerField(default=3)), ('weight', models.FloatField()), ], options={'constraints': [partial_unique_constraint]}, ) # Test the state alteration project_state = ProjectState() new_state = project_state.clone() operation.state_forwards('test_crmo', new_state) self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1) # Test database alteration self.assertTableNotExists('test_crmo_pony') with connection.schema_editor() as editor: operation.database_forwards('test_crmo', editor, project_state, new_state) self.assertTableExists('test_crmo_pony') # Test constraint works Pony = new_state.apps.get_model('test_crmo', 'Pony') Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=6.0) if connection.features.supports_partial_indexes: with self.assertRaises(IntegrityError): Pony.objects.create(pink=1, weight=7.0) else: Pony.objects.create(pink=1, weight=7.0) # Test reversal with connection.schema_editor() as editor: operation.database_backwards('test_crmo', editor, new_state, project_state) self.assertTableNotExists('test_crmo_pony') # Test deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], 'CreateModel') self.assertEqual(definition[1], []) self.assertEqual(definition[2]['options']['constraints'], [partial_unique_constraint]) def test_create_model_with_deferred_unique_constraint(self): deferred_unique_constraint = models.UniqueConstraint( fields=['pink'], name='deferrable_pink_constraint', deferrable=models.Deferrable.DEFERRED, ) operation = migrations.CreateModel( 'Pony', [ ('id', models.AutoField(primary_key=True)), ('pink', models.IntegerField(default=3)), ], options={'constraints': [deferred_unique_constraint]}, ) project_state = ProjectState() new_state = project_state.clone() operation.state_forwards('test_crmo', new_state) self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1) self.assertTableNotExists('test_crmo_pony') # Create table. with connection.schema_editor() as editor: operation.database_forwards('test_crmo', editor, project_state, new_state) self.assertTableExists('test_crmo_pony') Pony = new_state.apps.get_model('test_crmo', 'Pony') Pony.objects.create(pink=1) if connection.features.supports_deferrable_unique_constraints: # Unique constraint is deferred. with transaction.atomic(): obj = Pony.objects.create(pink=1) obj.pink = 2 obj.save() # Constraint behavior can be changed with SET CONSTRAINTS. with self.assertRaises(IntegrityError): with transaction.atomic(), connection.cursor() as cursor: quoted_name = connection.ops.quote_name(deferred_unique_constraint.name) cursor.execute('SET CONSTRAINTS %s IMMEDIATE' % quoted_name) obj = Pony.objects.create(pink=1) obj.pink = 3 obj.save() else: Pony.objects.create(pink=1) # Reversal. with connection.schema_editor() as editor: operation.database_backwards('test_crmo', editor, new_state, project_state) self.assertTableNotExists('test_crmo_pony') # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'CreateModel') self.assertEqual(definition[1], []) self.assertEqual( definition[2]['options']['constraints'], [deferred_unique_constraint], ) @skipUnlessDBFeature('supports_covering_indexes') def test_create_model_with_covering_unique_constraint(self): covering_unique_constraint = models.UniqueConstraint( fields=['pink'], include=['weight'], name='test_constraint_pony_pink_covering_weight', ) operation = migrations.CreateModel( 'Pony', [ ('id', models.AutoField(primary_key=True)), ('pink', models.IntegerField(default=3)), ('weight', models.FloatField()), ], options={'constraints': [covering_unique_constraint]}, ) project_state = ProjectState() new_state = project_state.clone() operation.state_forwards('test_crmo', new_state) self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1) self.assertTableNotExists('test_crmo_pony') # Create table. with connection.schema_editor() as editor: operation.database_forwards('test_crmo', editor, project_state, new_state) self.assertTableExists('test_crmo_pony') Pony = new_state.apps.get_model('test_crmo', 'Pony') Pony.objects.create(pink=1, weight=4.0) with self.assertRaises(IntegrityError): Pony.objects.create(pink=1, weight=7.0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards('test_crmo', editor, new_state, project_state) self.assertTableNotExists('test_crmo_pony') # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'CreateModel') self.assertEqual(definition[1], []) self.assertEqual( definition[2]['options']['constraints'], [covering_unique_constraint], ) def test_create_model_managers(self): """ The managers on a model are set. """ project_state = self.set_up_test_model("test_cmoma") # Test the state alteration operation = migrations.CreateModel( "Food", fields=[ ("id", models.AutoField(primary_key=True)), ], managers=[ ("food_qs", FoodQuerySet.as_manager()), ("food_mgr", FoodManager("a", "b")), ("food_mgr_kwargs", FoodManager("x", "y", 3, 4)), ] ) self.assertEqual(operation.describe(), "Create model Food") new_state = project_state.clone() operation.state_forwards("test_cmoma", new_state) self.assertIn(("test_cmoma", "food"), new_state.models) managers = new_state.models["test_cmoma", "food"].managers self.assertEqual(managers[0][0], "food_qs") self.assertIsInstance(managers[0][1], models.Manager) self.assertEqual(managers[1][0], "food_mgr") self.assertIsInstance(managers[1][1], FoodManager) self.assertEqual(managers[1][1].args, ("a", "b", 1, 2)) self.assertEqual(managers[2][0], "food_mgr_kwargs") self.assertIsInstance(managers[2][1], FoodManager) self.assertEqual(managers[2][1].args, ("x", "y", 3, 4)) def test_delete_model(self): """ Tests the DeleteModel operation. """ project_state = self.set_up_test_model("test_dlmo") # Test the state alteration operation = migrations.DeleteModel("Pony") self.assertEqual(operation.describe(), "Delete model Pony") self.assertEqual(operation.migration_name_fragment, 'delete_pony') new_state = project_state.clone() operation.state_forwards("test_dlmo", new_state) self.assertNotIn(("test_dlmo", "pony"), new_state.models) # Test the database alteration self.assertTableExists("test_dlmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_dlmo", editor, project_state, new_state) self.assertTableNotExists("test_dlmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_dlmo", editor, new_state, project_state) self.assertTableExists("test_dlmo_pony") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "DeleteModel") self.assertEqual(definition[1], []) self.assertEqual(list(definition[2]), ["name"]) def test_delete_proxy_model(self): """ Tests the DeleteModel operation ignores proxy models. """ project_state = self.set_up_test_model("test_dlprmo", proxy_model=True) # Test the state alteration operation = migrations.DeleteModel("ProxyPony") new_state = project_state.clone() operation.state_forwards("test_dlprmo", new_state) self.assertIn(("test_dlprmo", "proxypony"), project_state.models) self.assertNotIn(("test_dlprmo", "proxypony"), new_state.models) # Test the database alteration self.assertTableExists("test_dlprmo_pony") self.assertTableNotExists("test_dlprmo_proxypony") with connection.schema_editor() as editor: operation.database_forwards("test_dlprmo", editor, project_state, new_state) self.assertTableExists("test_dlprmo_pony") self.assertTableNotExists("test_dlprmo_proxypony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_dlprmo", editor, new_state, project_state) self.assertTableExists("test_dlprmo_pony") self.assertTableNotExists("test_dlprmo_proxypony") def test_delete_mti_model(self): project_state = self.set_up_test_model('test_dlmtimo', mti_model=True) # Test the state alteration operation = migrations.DeleteModel('ShetlandPony') new_state = project_state.clone() operation.state_forwards('test_dlmtimo', new_state) self.assertIn(('test_dlmtimo', 'shetlandpony'), project_state.models) self.assertNotIn(('test_dlmtimo', 'shetlandpony'), new_state.models) # Test the database alteration self.assertTableExists('test_dlmtimo_pony') self.assertTableExists('test_dlmtimo_shetlandpony') self.assertColumnExists('test_dlmtimo_shetlandpony', 'pony_ptr_id') with connection.schema_editor() as editor: operation.database_forwards('test_dlmtimo', editor, project_state, new_state) self.assertTableExists('test_dlmtimo_pony') self.assertTableNotExists('test_dlmtimo_shetlandpony') # And test reversal with connection.schema_editor() as editor: operation.database_backwards('test_dlmtimo', editor, new_state, project_state) self.assertTableExists('test_dlmtimo_pony') self.assertTableExists('test_dlmtimo_shetlandpony') self.assertColumnExists('test_dlmtimo_shetlandpony', 'pony_ptr_id') def test_rename_model(self): """ Tests the RenameModel operation. """ project_state = self.set_up_test_model("test_rnmo", related_model=True) # Test the state alteration operation = migrations.RenameModel("Pony", "Horse") self.assertEqual(operation.describe(), "Rename model Pony to Horse") self.assertEqual(operation.migration_name_fragment, 'rename_pony_horse') # Test initial state and database self.assertIn(("test_rnmo", "pony"), project_state.models) self.assertNotIn(("test_rnmo", "horse"), project_state.models) self.assertTableExists("test_rnmo_pony") self.assertTableNotExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) # Migrate forwards new_state = project_state.clone() atomic_rename = connection.features.supports_atomic_references_rename new_state = self.apply_operations("test_rnmo", new_state, [operation], atomic=atomic_rename) # Test new state and database self.assertNotIn(("test_rnmo", "pony"), new_state.models) self.assertIn(("test_rnmo", "horse"), new_state.models) # RenameModel also repoints all incoming FKs and M2Ms self.assertEqual( new_state.models['test_rnmo', 'rider'].fields['pony'].remote_field.model, 'test_rnmo.Horse', ) self.assertTableNotExists("test_rnmo_pony") self.assertTableExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) # Migrate backwards original_state = self.unapply_operations("test_rnmo", project_state, [operation], atomic=atomic_rename) # Test original state and database self.assertIn(("test_rnmo", "pony"), original_state.models) self.assertNotIn(("test_rnmo", "horse"), original_state.models) self.assertEqual( original_state.models['test_rnmo', 'rider'].fields['pony'].remote_field.model, 'Pony', ) self.assertTableExists("test_rnmo_pony") self.assertTableNotExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RenameModel") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'old_name': "Pony", 'new_name': "Horse"}) def test_rename_model_state_forwards(self): """ RenameModel operations shouldn't trigger the caching of rendered apps on state without prior apps. """ state = ProjectState() state.add_model(ModelState('migrations', 'Foo', [])) operation = migrations.RenameModel('Foo', 'Bar') operation.state_forwards('migrations', state) self.assertNotIn('apps', state.__dict__) self.assertNotIn(('migrations', 'foo'), state.models) self.assertIn(('migrations', 'bar'), state.models) # Now with apps cached. apps = state.apps operation = migrations.RenameModel('Bar', 'Foo') operation.state_forwards('migrations', state) self.assertIs(state.apps, apps) self.assertNotIn(('migrations', 'bar'), state.models) self.assertIn(('migrations', 'foo'), state.models) def test_rename_model_with_self_referential_fk(self): """ Tests the RenameModel operation on model with self referential FK. """ project_state = self.set_up_test_model("test_rmwsrf", related_model=True) # Test the state alteration operation = migrations.RenameModel("Rider", "HorseRider") self.assertEqual(operation.describe(), "Rename model Rider to HorseRider") new_state = project_state.clone() operation.state_forwards("test_rmwsrf", new_state) self.assertNotIn(("test_rmwsrf", "rider"), new_state.models) self.assertIn(("test_rmwsrf", "horserider"), new_state.models) # Remember, RenameModel also repoints all incoming FKs and M2Ms self.assertEqual( 'self', new_state.models["test_rmwsrf", "horserider"].fields['friend'].remote_field.model ) HorseRider = new_state.apps.get_model('test_rmwsrf', 'horserider') self.assertIs(HorseRider._meta.get_field('horserider').remote_field.model, HorseRider) # Test the database alteration self.assertTableExists("test_rmwsrf_rider") self.assertTableNotExists("test_rmwsrf_horserider") if connection.features.supports_foreign_keys: self.assertFKExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id")) self.assertFKNotExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id")) atomic_rename = connection.features.supports_atomic_references_rename with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_forwards("test_rmwsrf", editor, project_state, new_state) self.assertTableNotExists("test_rmwsrf_rider") self.assertTableExists("test_rmwsrf_horserider") if connection.features.supports_foreign_keys: self.assertFKNotExists("test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_rider", "id")) self.assertFKExists("test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_horserider", "id")) # And test reversal with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_backwards("test_rmwsrf", editor, new_state, project_state) self.assertTableExists("test_rmwsrf_rider") self.assertTableNotExists("test_rmwsrf_horserider") if connection.features.supports_foreign_keys: self.assertFKExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id")) self.assertFKNotExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id")) def test_rename_model_with_superclass_fk(self): """ Tests the RenameModel operation on a model which has a superclass that has a foreign key. """ project_state = self.set_up_test_model("test_rmwsc", related_model=True, mti_model=True) # Test the state alteration operation = migrations.RenameModel("ShetlandPony", "LittleHorse") self.assertEqual(operation.describe(), "Rename model ShetlandPony to LittleHorse") new_state = project_state.clone() operation.state_forwards("test_rmwsc", new_state) self.assertNotIn(("test_rmwsc", "shetlandpony"), new_state.models) self.assertIn(("test_rmwsc", "littlehorse"), new_state.models) # RenameModel shouldn't repoint the superclass's relations, only local ones self.assertEqual( project_state.models['test_rmwsc', 'rider'].fields['pony'].remote_field.model, new_state.models['test_rmwsc', 'rider'].fields['pony'].remote_field.model, ) # Before running the migration we have a table for Shetland Pony, not Little Horse self.assertTableExists("test_rmwsc_shetlandpony") self.assertTableNotExists("test_rmwsc_littlehorse") if connection.features.supports_foreign_keys: # and the foreign key on rider points to pony, not shetland pony self.assertFKExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id")) self.assertFKNotExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_shetlandpony", "id")) with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor: operation.database_forwards("test_rmwsc", editor, project_state, new_state) # Now we have a little horse table, not shetland pony self.assertTableNotExists("test_rmwsc_shetlandpony") self.assertTableExists("test_rmwsc_littlehorse") if connection.features.supports_foreign_keys: # but the Foreign keys still point at pony, not little horse self.assertFKExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id")) self.assertFKNotExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_littlehorse", "id")) def test_rename_model_with_self_referential_m2m(self): app_label = "test_rename_model_with_self_referential_m2m" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("ReflexivePony", fields=[ ("id", models.AutoField(primary_key=True)), ("ponies", models.ManyToManyField("self")), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("ReflexivePony", "ReflexivePony2"), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, "ReflexivePony2") pony = Pony.objects.create() pony.ponies.add(pony) def test_rename_model_with_m2m(self): app_label = "test_rename_model_with_m2m" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("Rider", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("Pony", fields=[ ("id", models.AutoField(primary_key=True)), ("riders", models.ManyToManyField("Rider")), ]), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("Pony", "Pony2"), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, "Pony2") Rider = project_state.apps.get_model(app_label, "Rider") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) self.assertEqual(Pony.objects.count(), 2) self.assertEqual(Rider.objects.count(), 2) self.assertEqual(Pony._meta.get_field('riders').remote_field.through.objects.count(), 2) def test_rename_m2m_target_model(self): app_label = "test_rename_m2m_target_model" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("Rider", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("Pony", fields=[ ("id", models.AutoField(primary_key=True)), ("riders", models.ManyToManyField("Rider")), ]), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("Rider", "Rider2"), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider2") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) self.assertEqual(Pony.objects.count(), 2) self.assertEqual(Rider.objects.count(), 2) self.assertEqual(Pony._meta.get_field('riders').remote_field.through.objects.count(), 2) def test_rename_m2m_through_model(self): app_label = "test_rename_through" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("Rider", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("Pony", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("PonyRider", fields=[ ("id", models.AutoField(primary_key=True)), ("rider", models.ForeignKey("test_rename_through.Rider", models.CASCADE)), ("pony", models.ForeignKey("test_rename_through.Pony", models.CASCADE)), ]), migrations.AddField( "Pony", "riders", models.ManyToManyField("test_rename_through.Rider", through="test_rename_through.PonyRider"), ), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") PonyRider = project_state.apps.get_model(app_label, "PonyRider") pony = Pony.objects.create() rider = Rider.objects.create() PonyRider.objects.create(pony=pony, rider=rider) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("PonyRider", "PonyRider2"), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") PonyRider = project_state.apps.get_model(app_label, "PonyRider2") pony = Pony.objects.first() rider = Rider.objects.create() PonyRider.objects.create(pony=pony, rider=rider) self.assertEqual(Pony.objects.count(), 1) self.assertEqual(Rider.objects.count(), 2) self.assertEqual(PonyRider.objects.count(), 2) self.assertEqual(pony.riders.count(), 2) def test_rename_m2m_model_after_rename_field(self): """RenameModel renames a many-to-many column after a RenameField.""" app_label = 'test_rename_multiple' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=20)), ]), migrations.CreateModel('Rider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('test_rename_multiple.Pony', models.CASCADE)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('riders', models.ManyToManyField('Rider')), ]), migrations.RenameField(model_name='pony', old_name='name', new_name='fancy_name'), migrations.RenameModel(old_name='Rider', new_name='Jockey'), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, 'Pony') Jockey = project_state.apps.get_model(app_label, 'Jockey') PonyRider = project_state.apps.get_model(app_label, 'PonyRider') # No "no such column" error means the column was renamed correctly. pony = Pony.objects.create(fancy_name='a good name') jockey = Jockey.objects.create(pony=pony) ponyrider = PonyRider.objects.create() ponyrider.riders.add(jockey) def test_add_field(self): """ Tests the AddField operation. """ # Test the state alteration operation = migrations.AddField( "Pony", "height", models.FloatField(null=True, default=5), ) self.assertEqual(operation.describe(), "Add field height to Pony") self.assertEqual(operation.migration_name_fragment, 'pony_height') project_state, new_state = self.make_test_state("test_adfl", operation) self.assertEqual(len(new_state.models["test_adfl", "pony"].fields), 4) field = new_state.models['test_adfl', 'pony'].fields['height'] self.assertEqual(field.default, 5) # Test the database alteration self.assertColumnNotExists("test_adfl_pony", "height") with connection.schema_editor() as editor: operation.database_forwards("test_adfl", editor, project_state, new_state) self.assertColumnExists("test_adfl_pony", "height") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adfl", editor, new_state, project_state) self.assertColumnNotExists("test_adfl_pony", "height") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AddField") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"]) def test_add_charfield(self): """ Tests the AddField operation on TextField. """ project_state = self.set_up_test_model("test_adchfl") Pony = project_state.apps.get_model("test_adchfl", "Pony") pony = Pony.objects.create(weight=42) new_state = self.apply_operations("test_adchfl", project_state, [ migrations.AddField( "Pony", "text", models.CharField(max_length=10, default="some text"), ), migrations.AddField( "Pony", "empty", models.CharField(max_length=10, default=""), ), # If not properly quoted digits would be interpreted as an int. migrations.AddField( "Pony", "digits", models.CharField(max_length=10, default="42"), ), # Manual quoting is fragile and could trip on quotes. Refs #xyz. migrations.AddField( "Pony", "quotes", models.CharField(max_length=10, default='"\'"'), ), ]) Pony = new_state.apps.get_model("test_adchfl", "Pony") pony = Pony.objects.get(pk=pony.pk) self.assertEqual(pony.text, "some text") self.assertEqual(pony.empty, "") self.assertEqual(pony.digits, "42") self.assertEqual(pony.quotes, '"\'"') def test_add_textfield(self): """ Tests the AddField operation on TextField. """ project_state = self.set_up_test_model("test_adtxtfl") Pony = project_state.apps.get_model("test_adtxtfl", "Pony") pony = Pony.objects.create(weight=42) new_state = self.apply_operations("test_adtxtfl", project_state, [ migrations.AddField( "Pony", "text", models.TextField(default="some text"), ), migrations.AddField( "Pony", "empty", models.TextField(default=""), ), # If not properly quoted digits would be interpreted as an int. migrations.AddField( "Pony", "digits", models.TextField(default="42"), ), # Manual quoting is fragile and could trip on quotes. Refs #xyz. migrations.AddField( "Pony", "quotes", models.TextField(default='"\'"'), ), ]) Pony = new_state.apps.get_model("test_adtxtfl", "Pony") pony = Pony.objects.get(pk=pony.pk) self.assertEqual(pony.text, "some text") self.assertEqual(pony.empty, "") self.assertEqual(pony.digits, "42") self.assertEqual(pony.quotes, '"\'"') def test_add_binaryfield(self): """ Tests the AddField operation on TextField/BinaryField. """ project_state = self.set_up_test_model("test_adbinfl") Pony = project_state.apps.get_model("test_adbinfl", "Pony") pony = Pony.objects.create(weight=42) new_state = self.apply_operations("test_adbinfl", project_state, [ migrations.AddField( "Pony", "blob", models.BinaryField(default=b"some text"), ), migrations.AddField( "Pony", "empty", models.BinaryField(default=b""), ), # If not properly quoted digits would be interpreted as an int. migrations.AddField( "Pony", "digits", models.BinaryField(default=b"42"), ), # Manual quoting is fragile and could trip on quotes. Refs #xyz. migrations.AddField( "Pony", "quotes", models.BinaryField(default=b'"\'"'), ), ]) Pony = new_state.apps.get_model("test_adbinfl", "Pony") pony = Pony.objects.get(pk=pony.pk) # SQLite returns buffer/memoryview, cast to bytes for checking. self.assertEqual(bytes(pony.blob), b"some text") self.assertEqual(bytes(pony.empty), b"") self.assertEqual(bytes(pony.digits), b"42") self.assertEqual(bytes(pony.quotes), b'"\'"') def test_column_name_quoting(self): """ Column names that are SQL keywords shouldn't cause problems when used in migrations (#22168). """ project_state = self.set_up_test_model("test_regr22168") operation = migrations.AddField( "Pony", "order", models.IntegerField(default=0), ) new_state = project_state.clone() operation.state_forwards("test_regr22168", new_state) with connection.schema_editor() as editor: operation.database_forwards("test_regr22168", editor, project_state, new_state) self.assertColumnExists("test_regr22168_pony", "order") def test_add_field_preserve_default(self): """ Tests the AddField operation's state alteration when preserve_default = False. """ project_state = self.set_up_test_model("test_adflpd") # Test the state alteration operation = migrations.AddField( "Pony", "height", models.FloatField(null=True, default=4), preserve_default=False, ) new_state = project_state.clone() operation.state_forwards("test_adflpd", new_state) self.assertEqual(len(new_state.models["test_adflpd", "pony"].fields), 4) field = new_state.models['test_adflpd', 'pony'].fields['height'] self.assertEqual(field.default, models.NOT_PROVIDED) # Test the database alteration project_state.apps.get_model("test_adflpd", "pony").objects.create( weight=4, ) self.assertColumnNotExists("test_adflpd_pony", "height") with connection.schema_editor() as editor: operation.database_forwards("test_adflpd", editor, project_state, new_state) self.assertColumnExists("test_adflpd_pony", "height") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AddField") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["field", "model_name", "name", "preserve_default"]) def test_add_field_m2m(self): """ Tests the AddField operation with a ManyToManyField. """ project_state = self.set_up_test_model("test_adflmm", second_model=True) # Test the state alteration operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) new_state = project_state.clone() operation.state_forwards("test_adflmm", new_state) self.assertEqual(len(new_state.models["test_adflmm", "pony"].fields), 4) # Test the database alteration self.assertTableNotExists("test_adflmm_pony_stables") with connection.schema_editor() as editor: operation.database_forwards("test_adflmm", editor, project_state, new_state) self.assertTableExists("test_adflmm_pony_stables") self.assertColumnNotExists("test_adflmm_pony", "stables") # Make sure the M2M field actually works with atomic(): Pony = new_state.apps.get_model("test_adflmm", "Pony") p = Pony.objects.create(pink=False, weight=4.55) p.stables.create() self.assertEqual(p.stables.count(), 1) p.stables.all().delete() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adflmm", editor, new_state, project_state) self.assertTableNotExists("test_adflmm_pony_stables") def test_alter_field_m2m(self): project_state = self.set_up_test_model("test_alflmm", second_model=True) project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) ]) Pony = project_state.apps.get_model("test_alflmm", "Pony") self.assertFalse(Pony._meta.get_field('stables').blank) project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AlterField( "Pony", "stables", models.ManyToManyField(to="Stable", related_name="ponies", blank=True) ) ]) Pony = project_state.apps.get_model("test_alflmm", "Pony") self.assertTrue(Pony._meta.get_field('stables').blank) def test_repoint_field_m2m(self): project_state = self.set_up_test_model("test_alflmm", second_model=True, third_model=True) project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AddField("Pony", "places", models.ManyToManyField("Stable", related_name="ponies")) ]) Pony = project_state.apps.get_model("test_alflmm", "Pony") project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AlterField("Pony", "places", models.ManyToManyField(to="Van", related_name="ponies")) ]) # Ensure the new field actually works Pony = project_state.apps.get_model("test_alflmm", "Pony") p = Pony.objects.create(pink=False, weight=4.55) p.places.create() self.assertEqual(p.places.count(), 1) p.places.all().delete() def test_remove_field_m2m(self): project_state = self.set_up_test_model("test_rmflmm", second_model=True) project_state = self.apply_operations("test_rmflmm", project_state, operations=[ migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) ]) self.assertTableExists("test_rmflmm_pony_stables") with_field_state = project_state.clone() operations = [migrations.RemoveField("Pony", "stables")] project_state = self.apply_operations("test_rmflmm", project_state, operations=operations) self.assertTableNotExists("test_rmflmm_pony_stables") # And test reversal self.unapply_operations("test_rmflmm", with_field_state, operations=operations) self.assertTableExists("test_rmflmm_pony_stables") def test_remove_field_m2m_with_through(self): project_state = self.set_up_test_model("test_rmflmmwt", second_model=True) self.assertTableNotExists("test_rmflmmwt_ponystables") project_state = self.apply_operations("test_rmflmmwt", project_state, operations=[ migrations.CreateModel("PonyStables", fields=[ ("pony", models.ForeignKey('test_rmflmmwt.Pony', models.CASCADE)), ("stable", models.ForeignKey('test_rmflmmwt.Stable', models.CASCADE)), ]), migrations.AddField( "Pony", "stables", models.ManyToManyField("Stable", related_name="ponies", through='test_rmflmmwt.PonyStables') ) ]) self.assertTableExists("test_rmflmmwt_ponystables") operations = [migrations.RemoveField("Pony", "stables"), migrations.DeleteModel("PonyStables")] self.apply_operations("test_rmflmmwt", project_state, operations=operations) def test_remove_field(self): """ Tests the RemoveField operation. """ project_state = self.set_up_test_model("test_rmfl") # Test the state alteration operation = migrations.RemoveField("Pony", "pink") self.assertEqual(operation.describe(), "Remove field pink from Pony") self.assertEqual(operation.migration_name_fragment, 'remove_pony_pink') new_state = project_state.clone() operation.state_forwards("test_rmfl", new_state) self.assertEqual(len(new_state.models["test_rmfl", "pony"].fields), 2) # Test the database alteration self.assertColumnExists("test_rmfl_pony", "pink") with connection.schema_editor() as editor: operation.database_forwards("test_rmfl", editor, project_state, new_state) self.assertColumnNotExists("test_rmfl_pony", "pink") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rmfl", editor, new_state, project_state) self.assertColumnExists("test_rmfl_pony", "pink") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RemoveField") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'name': 'pink'}) def test_remove_fk(self): """ Tests the RemoveField operation on a foreign key. """ project_state = self.set_up_test_model("test_rfk", related_model=True) self.assertColumnExists("test_rfk_rider", "pony_id") operation = migrations.RemoveField("Rider", "pony") new_state = project_state.clone() operation.state_forwards("test_rfk", new_state) with connection.schema_editor() as editor: operation.database_forwards("test_rfk", editor, project_state, new_state) self.assertColumnNotExists("test_rfk_rider", "pony_id") with connection.schema_editor() as editor: operation.database_backwards("test_rfk", editor, new_state, project_state) self.assertColumnExists("test_rfk_rider", "pony_id") def test_alter_model_table(self): """ Tests the AlterModelTable operation. """ project_state = self.set_up_test_model("test_almota") # Test the state alteration operation = migrations.AlterModelTable("Pony", "test_almota_pony_2") self.assertEqual(operation.describe(), "Rename table for Pony to test_almota_pony_2") self.assertEqual(operation.migration_name_fragment, 'alter_pony_table') new_state = project_state.clone() operation.state_forwards("test_almota", new_state) self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony_2") # Test the database alteration self.assertTableExists("test_almota_pony") self.assertTableNotExists("test_almota_pony_2") with connection.schema_editor() as editor: operation.database_forwards("test_almota", editor, project_state, new_state) self.assertTableNotExists("test_almota_pony") self.assertTableExists("test_almota_pony_2") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_almota", editor, new_state, project_state) self.assertTableExists("test_almota_pony") self.assertTableNotExists("test_almota_pony_2") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterModelTable") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'table': "test_almota_pony_2"}) def test_alter_model_table_none(self): """ Tests the AlterModelTable operation if the table name is set to None. """ operation = migrations.AlterModelTable("Pony", None) self.assertEqual(operation.describe(), "Rename table for Pony to (default)") def test_alter_model_table_noop(self): """ Tests the AlterModelTable operation if the table name is not changed. """ project_state = self.set_up_test_model("test_almota") # Test the state alteration operation = migrations.AlterModelTable("Pony", "test_almota_pony") new_state = project_state.clone() operation.state_forwards("test_almota", new_state) self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony") # Test the database alteration self.assertTableExists("test_almota_pony") with connection.schema_editor() as editor: operation.database_forwards("test_almota", editor, project_state, new_state) self.assertTableExists("test_almota_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_almota", editor, new_state, project_state) self.assertTableExists("test_almota_pony") def test_alter_model_table_m2m(self): """ AlterModelTable should rename auto-generated M2M tables. """ app_label = "test_talflmltlm2m" pony_db_table = 'pony_foo' project_state = self.set_up_test_model(app_label, second_model=True, db_table=pony_db_table) # Add the M2M field first_state = project_state.clone() operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable")) operation.state_forwards(app_label, first_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, first_state) original_m2m_table = "%s_%s" % (pony_db_table, "stables") new_m2m_table = "%s_%s" % (app_label, "pony_stables") self.assertTableExists(original_m2m_table) self.assertTableNotExists(new_m2m_table) # Rename the Pony db_table which should also rename the m2m table. second_state = first_state.clone() operation = migrations.AlterModelTable(name='pony', table=None) operation.state_forwards(app_label, second_state) atomic_rename = connection.features.supports_atomic_references_rename with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_forwards(app_label, editor, first_state, second_state) self.assertTableExists(new_m2m_table) self.assertTableNotExists(original_m2m_table) # And test reversal with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_backwards(app_label, editor, second_state, first_state) self.assertTableExists(original_m2m_table) self.assertTableNotExists(new_m2m_table) def test_alter_field(self): """ Tests the AlterField operation. """ project_state = self.set_up_test_model("test_alfl") # Test the state alteration operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True)) self.assertEqual(operation.describe(), "Alter field pink on Pony") self.assertEqual(operation.migration_name_fragment, 'alter_pony_pink') new_state = project_state.clone() operation.state_forwards("test_alfl", new_state) self.assertIs(project_state.models['test_alfl', 'pony'].fields['pink'].null, False) self.assertIs(new_state.models['test_alfl', 'pony'].fields['pink'].null, True) # Test the database alteration self.assertColumnNotNull("test_alfl_pony", "pink") with connection.schema_editor() as editor: operation.database_forwards("test_alfl", editor, project_state, new_state) self.assertColumnNull("test_alfl_pony", "pink") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alfl", editor, new_state, project_state) self.assertColumnNotNull("test_alfl_pony", "pink") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterField") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"]) def test_alter_field_add_db_column_noop(self): """ AlterField operation is a noop when adding only a db_column and the column name is not changed. """ app_label = 'test_afadbn' project_state = self.set_up_test_model(app_label, related_model=True) pony_table = '%s_pony' % app_label new_state = project_state.clone() operation = migrations.AlterField('Pony', 'weight', models.FloatField(db_column='weight')) operation.state_forwards(app_label, new_state) self.assertIsNone( project_state.models[app_label, 'pony'].fields['weight'].db_column, ) self.assertEqual( new_state.models[app_label, 'pony'].fields['weight'].db_column, 'weight', ) self.assertColumnExists(pony_table, 'weight') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_forwards(app_label, editor, project_state, new_state) self.assertColumnExists(pony_table, 'weight') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_backwards(app_label, editor, new_state, project_state) self.assertColumnExists(pony_table, 'weight') rider_table = '%s_rider' % app_label new_state = project_state.clone() operation = migrations.AlterField( 'Rider', 'pony', models.ForeignKey('Pony', models.CASCADE, db_column='pony_id'), ) operation.state_forwards(app_label, new_state) self.assertIsNone( project_state.models[app_label, 'rider'].fields['pony'].db_column, ) self.assertIs( new_state.models[app_label, 'rider'].fields['pony'].db_column, 'pony_id', ) self.assertColumnExists(rider_table, 'pony_id') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_forwards(app_label, editor, project_state, new_state) self.assertColumnExists(rider_table, 'pony_id') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_forwards(app_label, editor, new_state, project_state) self.assertColumnExists(rider_table, 'pony_id') def test_alter_field_pk(self): """ Tests the AlterField operation on primary keys (for things like PostgreSQL's SERIAL weirdness) """ project_state = self.set_up_test_model("test_alflpk") # Test the state alteration operation = migrations.AlterField("Pony", "id", models.IntegerField(primary_key=True)) new_state = project_state.clone() operation.state_forwards("test_alflpk", new_state) self.assertIsInstance( project_state.models['test_alflpk', 'pony'].fields['id'], models.AutoField, ) self.assertIsInstance( new_state.models['test_alflpk', 'pony'].fields['id'], models.IntegerField, ) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alflpk", editor, project_state, new_state) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alflpk", editor, new_state, project_state) @skipUnlessDBFeature('supports_foreign_keys') def test_alter_field_pk_fk(self): """ Tests the AlterField operation on primary keys changes any FKs pointing to it. """ project_state = self.set_up_test_model("test_alflpkfk", related_model=True) project_state = self.apply_operations('test_alflpkfk', project_state, [ migrations.CreateModel('Stable', fields=[ ('ponies', models.ManyToManyField('Pony')), ]), migrations.AddField( 'Pony', 'stables', models.ManyToManyField('Stable'), ), ]) # Test the state alteration operation = migrations.AlterField("Pony", "id", models.FloatField(primary_key=True)) new_state = project_state.clone() operation.state_forwards("test_alflpkfk", new_state) self.assertIsInstance( project_state.models['test_alflpkfk', 'pony'].fields['id'], models.AutoField, ) self.assertIsInstance( new_state.models['test_alflpkfk', 'pony'].fields['id'], models.FloatField, ) def assertIdTypeEqualsFkType(): with connection.cursor() as cursor: id_type, id_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description(cursor, "test_alflpkfk_pony") if c.name == "id" ][0] fk_type, fk_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description(cursor, "test_alflpkfk_rider") if c.name == "pony_id" ][0] m2m_fk_type, m2m_fk_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description( cursor, 'test_alflpkfk_pony_stables', ) if c.name == 'pony_id' ][0] remote_m2m_fk_type, remote_m2m_fk_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description( cursor, 'test_alflpkfk_stable_ponies', ) if c.name == 'pony_id' ][0] self.assertEqual(id_type, fk_type) self.assertEqual(id_type, m2m_fk_type) self.assertEqual(id_type, remote_m2m_fk_type) self.assertEqual(id_null, fk_null) self.assertEqual(id_null, m2m_fk_null) self.assertEqual(id_null, remote_m2m_fk_null) assertIdTypeEqualsFkType() # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alflpkfk", editor, project_state, new_state) assertIdTypeEqualsFkType() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alflpkfk", editor, new_state, project_state) assertIdTypeEqualsFkType() @skipUnlessDBFeature('supports_foreign_keys') def test_alter_field_reloads_state_on_fk_with_to_field_target_type_change(self): app_label = 'test_alflrsfkwtflttc' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.AutoField(primary_key=True)), ('code', models.IntegerField(unique=True)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE, to_field='code')), ]), ]) operation = migrations.AlterField( 'Rider', 'code', models.CharField(max_length=100, unique=True), ) self.apply_operations(app_label, project_state, operations=[operation]) id_type, id_null = [ (c.type_code, c.null_ok) for c in self.get_table_description('%s_rider' % app_label) if c.name == 'code' ][0] fk_type, fk_null = [ (c.type_code, c.null_ok) for c in self.get_table_description('%s_pony' % app_label) if c.name == 'rider_id' ][0] self.assertEqual(id_type, fk_type) self.assertEqual(id_null, fk_null) @skipUnlessDBFeature('supports_foreign_keys') def test_alter_field_reloads_state_on_fk_with_to_field_related_name_target_type_change(self): app_label = 'test_alflrsfkwtflrnttc' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.AutoField(primary_key=True)), ('code', models.PositiveIntegerField(unique=True)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('rider', models.ForeignKey( '%s.Rider' % app_label, models.CASCADE, to_field='code', related_name='+', )), ]), ]) operation = migrations.AlterField( 'Rider', 'code', models.CharField(max_length=100, unique=True), ) self.apply_operations(app_label, project_state, operations=[operation]) def test_alter_field_reloads_state_on_fk_target_changes(self): """ If AlterField doesn't reload state appropriately, the second AlterField crashes on MySQL due to not dropping the PonyRider.pony foreign key constraint before modifying the column. """ app_label = 'alter_alter_field_reloads_state_on_fk_target_changes' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE)), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.AlterField('Rider', 'id', models.CharField(primary_key=True, max_length=99)), migrations.AlterField('Pony', 'id', models.CharField(primary_key=True, max_length=99)), ]) def test_alter_field_reloads_state_on_fk_with_to_field_target_changes(self): """ If AlterField doesn't reload state appropriately, the second AlterField crashes on MySQL due to not dropping the PonyRider.pony foreign key constraint before modifying the column. """ app_label = 'alter_alter_field_reloads_state_on_fk_with_to_field_target_changes' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('slug', models.CharField(unique=True, max_length=100)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE, to_field='slug')), ('slug', models.CharField(unique=True, max_length=100)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE, to_field='slug')), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.AlterField('Rider', 'slug', models.CharField(unique=True, max_length=99)), migrations.AlterField('Pony', 'slug', models.CharField(unique=True, max_length=99)), ]) def test_rename_field_reloads_state_on_fk_target_changes(self): """ If RenameField doesn't reload state appropriately, the AlterField crashes on MySQL due to not dropping the PonyRider.pony foreign key constraint before modifying the column. """ app_label = 'alter_rename_field_reloads_state_on_fk_target_changes' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE)), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameField('Rider', 'id', 'id2'), migrations.AlterField('Pony', 'id', models.CharField(primary_key=True, max_length=99)), ], atomic=connection.features.supports_atomic_references_rename) def test_rename_field(self): """ Tests the RenameField operation. """ project_state = self.set_up_test_model("test_rnfl", unique_together=True, index_together=True) # Test the state alteration operation = migrations.RenameField("Pony", "pink", "blue") self.assertEqual(operation.describe(), "Rename field pink on Pony to blue") self.assertEqual(operation.migration_name_fragment, 'rename_pink_pony_blue') new_state = project_state.clone() operation.state_forwards("test_rnfl", new_state) self.assertIn("blue", new_state.models["test_rnfl", "pony"].fields) self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].fields) # Make sure the unique_together has the renamed column too self.assertIn("blue", new_state.models["test_rnfl", "pony"].options['unique_together'][0]) self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].options['unique_together'][0]) # Make sure the index_together has the renamed column too self.assertIn("blue", new_state.models["test_rnfl", "pony"].options['index_together'][0]) self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].options['index_together'][0]) # Test the database alteration self.assertColumnExists("test_rnfl_pony", "pink") self.assertColumnNotExists("test_rnfl_pony", "blue") with connection.schema_editor() as editor: operation.database_forwards("test_rnfl", editor, project_state, new_state) self.assertColumnExists("test_rnfl_pony", "blue") self.assertColumnNotExists("test_rnfl_pony", "pink") # Ensure the unique constraint has been ported over with connection.cursor() as cursor: cursor.execute("INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)") with self.assertRaises(IntegrityError): with atomic(): cursor.execute("INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_rnfl_pony") # Ensure the index constraint has been ported over self.assertIndexExists("test_rnfl_pony", ["weight", "blue"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rnfl", editor, new_state, project_state) self.assertColumnExists("test_rnfl_pony", "pink") self.assertColumnNotExists("test_rnfl_pony", "blue") # Ensure the index constraint has been reset self.assertIndexExists("test_rnfl_pony", ["weight", "pink"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RenameField") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'old_name': "pink", 'new_name': "blue"}) def test_rename_field_with_db_column(self): project_state = self.apply_operations('test_rfwdbc', ProjectState(), operations=[ migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('field', models.IntegerField(db_column='db_field')), ('fk_field', models.ForeignKey( 'Pony', models.CASCADE, db_column='db_fk_field', )), ]), ]) new_state = project_state.clone() operation = migrations.RenameField('Pony', 'field', 'renamed_field') operation.state_forwards('test_rfwdbc', new_state) self.assertIn('renamed_field', new_state.models['test_rfwdbc', 'pony'].fields) self.assertNotIn('field', new_state.models['test_rfwdbc', 'pony'].fields) self.assertColumnExists('test_rfwdbc_pony', 'db_field') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_forwards('test_rfwdbc', editor, project_state, new_state) self.assertColumnExists('test_rfwdbc_pony', 'db_field') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_backwards('test_rfwdbc', editor, new_state, project_state) self.assertColumnExists('test_rfwdbc_pony', 'db_field') new_state = project_state.clone() operation = migrations.RenameField('Pony', 'fk_field', 'renamed_fk_field') operation.state_forwards('test_rfwdbc', new_state) self.assertIn('renamed_fk_field', new_state.models['test_rfwdbc', 'pony'].fields) self.assertNotIn('fk_field', new_state.models['test_rfwdbc', 'pony'].fields) self.assertColumnExists('test_rfwdbc_pony', 'db_fk_field') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_forwards('test_rfwdbc', editor, project_state, new_state) self.assertColumnExists('test_rfwdbc_pony', 'db_fk_field') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_backwards('test_rfwdbc', editor, new_state, project_state) self.assertColumnExists('test_rfwdbc_pony', 'db_fk_field') def test_rename_field_case(self): project_state = self.apply_operations('test_rfmx', ProjectState(), operations=[ migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('field', models.IntegerField()), ]), ]) new_state = project_state.clone() operation = migrations.RenameField('Pony', 'field', 'FiElD') operation.state_forwards('test_rfmx', new_state) self.assertIn('FiElD', new_state.models['test_rfmx', 'pony'].fields) self.assertColumnExists('test_rfmx_pony', 'field') with connection.schema_editor() as editor: operation.database_forwards('test_rfmx', editor, project_state, new_state) self.assertColumnExists( 'test_rfmx_pony', connection.introspection.identifier_converter('FiElD'), ) with connection.schema_editor() as editor: operation.database_backwards('test_rfmx', editor, new_state, project_state) self.assertColumnExists('test_rfmx_pony', 'field') def test_rename_missing_field(self): state = ProjectState() state.add_model(ModelState('app', 'model', [])) with self.assertRaisesMessage(FieldDoesNotExist, "app.model has no field named 'field'"): migrations.RenameField('model', 'field', 'new_field').state_forwards('app', state) def test_rename_referenced_field_state_forward(self): state = ProjectState() state.add_model(ModelState('app', 'Model', [ ('id', models.AutoField(primary_key=True)), ('field', models.IntegerField(unique=True)), ])) state.add_model(ModelState('app', 'OtherModel', [ ('id', models.AutoField(primary_key=True)), ('fk', models.ForeignKey('Model', models.CASCADE, to_field='field')), ('fo', models.ForeignObject('Model', models.CASCADE, from_fields=('fk',), to_fields=('field',))), ])) operation = migrations.RenameField('Model', 'field', 'renamed') new_state = state.clone() operation.state_forwards('app', new_state) self.assertEqual(new_state.models['app', 'othermodel'].fields['fk'].remote_field.field_name, 'renamed') self.assertEqual(new_state.models['app', 'othermodel'].fields['fk'].from_fields, ['self']) self.assertEqual(new_state.models['app', 'othermodel'].fields['fk'].to_fields, ('renamed',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['fo'].from_fields, ('fk',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['fo'].to_fields, ('renamed',)) operation = migrations.RenameField('OtherModel', 'fk', 'renamed_fk') new_state = state.clone() operation.state_forwards('app', new_state) self.assertEqual(new_state.models['app', 'othermodel'].fields['renamed_fk'].remote_field.field_name, 'renamed') self.assertEqual(new_state.models['app', 'othermodel'].fields['renamed_fk'].from_fields, ('self',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['renamed_fk'].to_fields, ('renamed',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['fo'].from_fields, ('renamed_fk',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['fo'].to_fields, ('renamed',)) def test_alter_unique_together(self): """ Tests the AlterUniqueTogether operation. """ project_state = self.set_up_test_model("test_alunto") # Test the state alteration operation = migrations.AlterUniqueTogether("Pony", [("pink", "weight")]) self.assertEqual(operation.describe(), "Alter unique_together for Pony (1 constraint(s))") self.assertEqual( operation.migration_name_fragment, 'alter_pony_unique_together', ) new_state = project_state.clone() operation.state_forwards("test_alunto", new_state) self.assertEqual(len(project_state.models["test_alunto", "pony"].options.get("unique_together", set())), 0) self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1) # Make sure we can insert duplicate rows with connection.cursor() as cursor: cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alunto", editor, project_state, new_state) cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") with self.assertRaises(IntegrityError): with atomic(): cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alunto", editor, new_state, project_state) cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # Test flat unique_together operation = migrations.AlterUniqueTogether("Pony", ("pink", "weight")) operation.state_forwards("test_alunto", new_state) self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterUniqueTogether") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'unique_together': {("pink", "weight")}}) def test_alter_unique_together_remove(self): operation = migrations.AlterUniqueTogether("Pony", None) self.assertEqual(operation.describe(), "Alter unique_together for Pony (0 constraint(s))") def test_add_index(self): """ Test the AddIndex operation. """ project_state = self.set_up_test_model("test_adin") msg = ( "Indexes passed to AddIndex operations require a name argument. " "<Index: fields=['pink']> doesn't have one." ) with self.assertRaisesMessage(ValueError, msg): migrations.AddIndex("Pony", models.Index(fields=["pink"])) index = models.Index(fields=["pink"], name="test_adin_pony_pink_idx") operation = migrations.AddIndex("Pony", index) self.assertEqual(operation.describe(), "Create index test_adin_pony_pink_idx on field(s) pink of model Pony") self.assertEqual( operation.migration_name_fragment, 'pony_test_adin_pony_pink_idx', ) new_state = project_state.clone() operation.state_forwards("test_adin", new_state) # Test the database alteration self.assertEqual(len(new_state.models["test_adin", "pony"].options['indexes']), 1) self.assertIndexNotExists("test_adin_pony", ["pink"]) with connection.schema_editor() as editor: operation.database_forwards("test_adin", editor, project_state, new_state) self.assertIndexExists("test_adin_pony", ["pink"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adin", editor, new_state, project_state) self.assertIndexNotExists("test_adin_pony", ["pink"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AddIndex") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'index': index}) def test_remove_index(self): """ Test the RemoveIndex operation. """ project_state = self.set_up_test_model("test_rmin", multicol_index=True) self.assertTableExists("test_rmin_pony") self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) operation = migrations.RemoveIndex("Pony", "pony_test_idx") self.assertEqual(operation.describe(), "Remove index pony_test_idx from Pony") self.assertEqual( operation.migration_name_fragment, 'remove_pony_pony_test_idx', ) new_state = project_state.clone() operation.state_forwards("test_rmin", new_state) # Test the state alteration self.assertEqual(len(new_state.models["test_rmin", "pony"].options['indexes']), 0) self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_rmin", editor, project_state, new_state) self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rmin", editor, new_state, project_state) self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RemoveIndex") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'name': "pony_test_idx"}) # Also test a field dropped with index - sqlite remake issue operations = [ migrations.RemoveIndex("Pony", "pony_test_idx"), migrations.RemoveField("Pony", "pink"), ] self.assertColumnExists("test_rmin_pony", "pink") self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) # Test database alteration new_state = project_state.clone() self.apply_operations('test_rmin', new_state, operations=operations) self.assertColumnNotExists("test_rmin_pony", "pink") self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"]) # And test reversal self.unapply_operations("test_rmin", project_state, operations=operations) self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) def test_add_index_state_forwards(self): project_state = self.set_up_test_model('test_adinsf') index = models.Index(fields=['pink'], name='test_adinsf_pony_pink_idx') old_model = project_state.apps.get_model('test_adinsf', 'Pony') new_state = project_state.clone() operation = migrations.AddIndex('Pony', index) operation.state_forwards('test_adinsf', new_state) new_model = new_state.apps.get_model('test_adinsf', 'Pony') self.assertIsNot(old_model, new_model) def test_remove_index_state_forwards(self): project_state = self.set_up_test_model('test_rminsf') index = models.Index(fields=['pink'], name='test_rminsf_pony_pink_idx') migrations.AddIndex('Pony', index).state_forwards('test_rminsf', project_state) old_model = project_state.apps.get_model('test_rminsf', 'Pony') new_state = project_state.clone() operation = migrations.RemoveIndex('Pony', 'test_rminsf_pony_pink_idx') operation.state_forwards('test_rminsf', new_state) new_model = new_state.apps.get_model('test_rminsf', 'Pony') self.assertIsNot(old_model, new_model) @skipUnlessDBFeature('supports_expression_indexes') def test_add_func_index(self): app_label = 'test_addfuncin' index_name = f'{app_label}_pony_abs_idx' table_name = f'{app_label}_pony' project_state = self.set_up_test_model(app_label) index = models.Index(Abs('weight'), name=index_name) operation = migrations.AddIndex('Pony', index) self.assertEqual( operation.describe(), 'Create index test_addfuncin_pony_abs_idx on Abs(F(weight)) on model Pony', ) self.assertEqual( operation.migration_name_fragment, 'pony_test_addfuncin_pony_abs_idx', ) new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['indexes']), 1) self.assertIndexNameNotExists(table_name, index_name) # Add index. with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) self.assertIndexNameExists(table_name, index_name) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) self.assertIndexNameNotExists(table_name, index_name) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'AddIndex') self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': 'Pony', 'index': index}) @skipUnlessDBFeature('supports_expression_indexes') def test_remove_func_index(self): app_label = 'test_rmfuncin' index_name = f'{app_label}_pony_abs_idx' table_name = f'{app_label}_pony' project_state = self.set_up_test_model(app_label, indexes=[ models.Index(Abs('weight'), name=index_name), ]) self.assertTableExists(table_name) self.assertIndexNameExists(table_name, index_name) operation = migrations.RemoveIndex('Pony', index_name) self.assertEqual( operation.describe(), 'Remove index test_rmfuncin_pony_abs_idx from Pony', ) self.assertEqual( operation.migration_name_fragment, 'remove_pony_test_rmfuncin_pony_abs_idx', ) new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['indexes']), 0) # Remove index. with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) self.assertIndexNameNotExists(table_name, index_name) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) self.assertIndexNameExists(table_name, index_name) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'RemoveIndex') self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': 'Pony', 'name': index_name}) def test_alter_field_with_index(self): """ Test AlterField operation with an index to ensure indexes created via Meta.indexes don't get dropped with sqlite3 remake. """ project_state = self.set_up_test_model("test_alflin", index=True) operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True)) new_state = project_state.clone() operation.state_forwards("test_alflin", new_state) # Test the database alteration self.assertColumnNotNull("test_alflin_pony", "pink") with connection.schema_editor() as editor: operation.database_forwards("test_alflin", editor, project_state, new_state) # Index hasn't been dropped self.assertIndexExists("test_alflin_pony", ["pink"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alflin", editor, new_state, project_state) # Ensure the index is still there self.assertIndexExists("test_alflin_pony", ["pink"]) def test_alter_index_together(self): """ Tests the AlterIndexTogether operation. """ project_state = self.set_up_test_model("test_alinto") # Test the state alteration operation = migrations.AlterIndexTogether("Pony", [("pink", "weight")]) self.assertEqual(operation.describe(), "Alter index_together for Pony (1 constraint(s))") self.assertEqual( operation.migration_name_fragment, 'alter_pony_index_together', ) new_state = project_state.clone() operation.state_forwards("test_alinto", new_state) self.assertEqual(len(project_state.models["test_alinto", "pony"].options.get("index_together", set())), 0) self.assertEqual(len(new_state.models["test_alinto", "pony"].options.get("index_together", set())), 1) # Make sure there's no matching index self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"]) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alinto", editor, project_state, new_state) self.assertIndexExists("test_alinto_pony", ["pink", "weight"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alinto", editor, new_state, project_state) self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterIndexTogether") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'index_together': {("pink", "weight")}}) def test_alter_index_together_remove(self): operation = migrations.AlterIndexTogether("Pony", None) self.assertEqual(operation.describe(), "Alter index_together for Pony (0 constraint(s))") @skipUnlessDBFeature('allows_multiple_constraints_on_same_fields') def test_alter_index_together_remove_with_unique_together(self): app_label = 'test_alintoremove_wunto' table_name = '%s_pony' % app_label project_state = self.set_up_test_model(app_label, unique_together=True) self.assertUniqueConstraintExists(table_name, ['pink', 'weight']) # Add index together. new_state = project_state.clone() operation = migrations.AlterIndexTogether('Pony', [('pink', 'weight')]) operation.state_forwards(app_label, new_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) self.assertIndexExists(table_name, ['pink', 'weight']) # Remove index together. project_state = new_state new_state = project_state.clone() operation = migrations.AlterIndexTogether('Pony', set()) operation.state_forwards(app_label, new_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) self.assertIndexNotExists(table_name, ['pink', 'weight']) self.assertUniqueConstraintExists(table_name, ['pink', 'weight']) @skipUnlessDBFeature('supports_table_check_constraints') def test_add_constraint(self): project_state = self.set_up_test_model("test_addconstraint") gt_check = models.Q(pink__gt=2) gt_constraint = models.CheckConstraint(check=gt_check, name="test_add_constraint_pony_pink_gt_2") gt_operation = migrations.AddConstraint("Pony", gt_constraint) self.assertEqual( gt_operation.describe(), "Create constraint test_add_constraint_pony_pink_gt_2 on model Pony" ) self.assertEqual( gt_operation.migration_name_fragment, 'pony_test_add_constraint_pony_pink_gt_2', ) # Test the state alteration new_state = project_state.clone() gt_operation.state_forwards("test_addconstraint", new_state) self.assertEqual(len(new_state.models["test_addconstraint", "pony"].options["constraints"]), 1) Pony = new_state.apps.get_model("test_addconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 1) # Test the database alteration with connection.schema_editor() as editor: gt_operation.database_forwards("test_addconstraint", editor, project_state, new_state) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=1.0) # Add another one. lt_check = models.Q(pink__lt=100) lt_constraint = models.CheckConstraint(check=lt_check, name="test_add_constraint_pony_pink_lt_100") lt_operation = migrations.AddConstraint("Pony", lt_constraint) lt_operation.state_forwards("test_addconstraint", new_state) self.assertEqual(len(new_state.models["test_addconstraint", "pony"].options["constraints"]), 2) Pony = new_state.apps.get_model("test_addconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 2) with connection.schema_editor() as editor: lt_operation.database_forwards("test_addconstraint", editor, project_state, new_state) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=100, weight=1.0) # Test reversal with connection.schema_editor() as editor: gt_operation.database_backwards("test_addconstraint", editor, new_state, project_state) Pony.objects.create(pink=1, weight=1.0) # Test deconstruction definition = gt_operation.deconstruct() self.assertEqual(definition[0], "AddConstraint") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'constraint': gt_constraint}) @skipUnlessDBFeature('supports_table_check_constraints') def test_add_constraint_percent_escaping(self): app_label = 'add_constraint_string_quoting' operations = [ migrations.CreateModel( 'Author', fields=[ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=100)), ('surname', models.CharField(max_length=100, default='')), ('rebate', models.CharField(max_length=100)), ], ), ] from_state = self.apply_operations(app_label, ProjectState(), operations) # "%" generated in startswith lookup should be escaped in a way that is # considered a leading wildcard. check = models.Q(name__startswith='Albert') constraint = models.CheckConstraint(check=check, name='name_constraint') operation = migrations.AddConstraint('Author', constraint) to_state = from_state.clone() operation.state_forwards(app_label, to_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Author = to_state.apps.get_model(app_label, 'Author') with self.assertRaises(IntegrityError), transaction.atomic(): Author.objects.create(name='Artur') # Literal "%" should be escaped in a way that is not a considered a # wildcard. check = models.Q(rebate__endswith='%') constraint = models.CheckConstraint(check=check, name='rebate_constraint') operation = migrations.AddConstraint('Author', constraint) from_state = to_state to_state = from_state.clone() operation.state_forwards(app_label, to_state) Author = to_state.apps.get_model(app_label, 'Author') with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Author = to_state.apps.get_model(app_label, 'Author') with self.assertRaises(IntegrityError), transaction.atomic(): Author.objects.create(name='Albert', rebate='10$') author = Author.objects.create(name='Albert', rebate='10%') self.assertEqual(Author.objects.get(), author) # Right-hand-side baked "%" literals should not be used for parameters # interpolation. check = ~models.Q(surname__startswith=models.F('name')) constraint = models.CheckConstraint(check=check, name='name_constraint_rhs') operation = migrations.AddConstraint('Author', constraint) from_state = to_state to_state = from_state.clone() operation.state_forwards(app_label, to_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Author = to_state.apps.get_model(app_label, 'Author') with self.assertRaises(IntegrityError), transaction.atomic(): Author.objects.create(name='Albert', surname='Alberto') @skipUnlessDBFeature('supports_table_check_constraints') def test_add_or_constraint(self): app_label = 'test_addorconstraint' constraint_name = 'add_constraint_or' from_state = self.set_up_test_model(app_label) check = models.Q(pink__gt=2, weight__gt=2) | models.Q(weight__lt=0) constraint = models.CheckConstraint(check=check, name=constraint_name) operation = migrations.AddConstraint('Pony', constraint) to_state = from_state.clone() operation.state_forwards(app_label, to_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Pony = to_state.apps.get_model(app_label, 'Pony') with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=2, weight=3.0) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=3, weight=1.0) Pony.objects.bulk_create([ Pony(pink=3, weight=-1.0), Pony(pink=1, weight=-1.0), Pony(pink=3, weight=3.0), ]) @skipUnlessDBFeature('supports_table_check_constraints') def test_add_constraint_combinable(self): app_label = 'test_addconstraint_combinable' operations = [ migrations.CreateModel( 'Book', fields=[ ('id', models.AutoField(primary_key=True)), ('read', models.PositiveIntegerField()), ('unread', models.PositiveIntegerField()), ], ), ] from_state = self.apply_operations(app_label, ProjectState(), operations) constraint = models.CheckConstraint( check=models.Q(read=(100 - models.F('unread'))), name='test_addconstraint_combinable_sum_100', ) operation = migrations.AddConstraint('Book', constraint) to_state = from_state.clone() operation.state_forwards(app_label, to_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Book = to_state.apps.get_model(app_label, 'Book') with self.assertRaises(IntegrityError), transaction.atomic(): Book.objects.create(read=70, unread=10) Book.objects.create(read=70, unread=30) @skipUnlessDBFeature('supports_table_check_constraints') def test_remove_constraint(self): project_state = self.set_up_test_model("test_removeconstraint", constraints=[ models.CheckConstraint(check=models.Q(pink__gt=2), name="test_remove_constraint_pony_pink_gt_2"), models.CheckConstraint(check=models.Q(pink__lt=100), name="test_remove_constraint_pony_pink_lt_100"), ]) gt_operation = migrations.RemoveConstraint("Pony", "test_remove_constraint_pony_pink_gt_2") self.assertEqual( gt_operation.describe(), "Remove constraint test_remove_constraint_pony_pink_gt_2 from model Pony" ) self.assertEqual( gt_operation.migration_name_fragment, 'remove_pony_test_remove_constraint_pony_pink_gt_2', ) # Test state alteration new_state = project_state.clone() gt_operation.state_forwards("test_removeconstraint", new_state) self.assertEqual(len(new_state.models["test_removeconstraint", "pony"].options['constraints']), 1) Pony = new_state.apps.get_model("test_removeconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 1) # Test database alteration with connection.schema_editor() as editor: gt_operation.database_forwards("test_removeconstraint", editor, project_state, new_state) Pony.objects.create(pink=1, weight=1.0).delete() with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=100, weight=1.0) # Remove the other one. lt_operation = migrations.RemoveConstraint("Pony", "test_remove_constraint_pony_pink_lt_100") lt_operation.state_forwards("test_removeconstraint", new_state) self.assertEqual(len(new_state.models["test_removeconstraint", "pony"].options['constraints']), 0) Pony = new_state.apps.get_model("test_removeconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 0) with connection.schema_editor() as editor: lt_operation.database_forwards("test_removeconstraint", editor, project_state, new_state) Pony.objects.create(pink=100, weight=1.0).delete() # Test reversal with connection.schema_editor() as editor: gt_operation.database_backwards("test_removeconstraint", editor, new_state, project_state) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=1.0) # Test deconstruction definition = gt_operation.deconstruct() self.assertEqual(definition[0], "RemoveConstraint") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'name': "test_remove_constraint_pony_pink_gt_2"}) def test_add_partial_unique_constraint(self): project_state = self.set_up_test_model('test_addpartialuniqueconstraint') partial_unique_constraint = models.UniqueConstraint( fields=['pink'], condition=models.Q(weight__gt=5), name='test_constraint_pony_pink_for_weight_gt_5_uniq', ) operation = migrations.AddConstraint('Pony', partial_unique_constraint) self.assertEqual( operation.describe(), 'Create constraint test_constraint_pony_pink_for_weight_gt_5_uniq ' 'on model Pony' ) # Test the state alteration new_state = project_state.clone() operation.state_forwards('test_addpartialuniqueconstraint', new_state) self.assertEqual(len(new_state.models['test_addpartialuniqueconstraint', 'pony'].options['constraints']), 1) Pony = new_state.apps.get_model('test_addpartialuniqueconstraint', 'Pony') self.assertEqual(len(Pony._meta.constraints), 1) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards('test_addpartialuniqueconstraint', editor, project_state, new_state) # Test constraint works Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=6.0) if connection.features.supports_partial_indexes: with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=7.0) else: Pony.objects.create(pink=1, weight=7.0) # Test reversal with connection.schema_editor() as editor: operation.database_backwards('test_addpartialuniqueconstraint', editor, new_state, project_state) # Test constraint doesn't work Pony.objects.create(pink=1, weight=7.0) # Test deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], 'AddConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': 'Pony', 'constraint': partial_unique_constraint}) def test_remove_partial_unique_constraint(self): project_state = self.set_up_test_model('test_removepartialuniqueconstraint', constraints=[ models.UniqueConstraint( fields=['pink'], condition=models.Q(weight__gt=5), name='test_constraint_pony_pink_for_weight_gt_5_uniq', ), ]) gt_operation = migrations.RemoveConstraint('Pony', 'test_constraint_pony_pink_for_weight_gt_5_uniq') self.assertEqual( gt_operation.describe(), 'Remove constraint test_constraint_pony_pink_for_weight_gt_5_uniq from model Pony' ) # Test state alteration new_state = project_state.clone() gt_operation.state_forwards('test_removepartialuniqueconstraint', new_state) self.assertEqual(len(new_state.models['test_removepartialuniqueconstraint', 'pony'].options['constraints']), 0) Pony = new_state.apps.get_model('test_removepartialuniqueconstraint', 'Pony') self.assertEqual(len(Pony._meta.constraints), 0) # Test database alteration with connection.schema_editor() as editor: gt_operation.database_forwards('test_removepartialuniqueconstraint', editor, project_state, new_state) # Test constraint doesn't work Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=6.0) Pony.objects.create(pink=1, weight=7.0).delete() # Test reversal with connection.schema_editor() as editor: gt_operation.database_backwards('test_removepartialuniqueconstraint', editor, new_state, project_state) # Test constraint works if connection.features.supports_partial_indexes: with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=7.0) else: Pony.objects.create(pink=1, weight=7.0) # Test deconstruction definition = gt_operation.deconstruct() self.assertEqual(definition[0], 'RemoveConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], { 'model_name': 'Pony', 'name': 'test_constraint_pony_pink_for_weight_gt_5_uniq', }) def test_add_deferred_unique_constraint(self): app_label = 'test_adddeferred_uc' project_state = self.set_up_test_model(app_label) deferred_unique_constraint = models.UniqueConstraint( fields=['pink'], name='deferred_pink_constraint_add', deferrable=models.Deferrable.DEFERRED, ) operation = migrations.AddConstraint('Pony', deferred_unique_constraint) self.assertEqual( operation.describe(), 'Create constraint deferred_pink_constraint_add on model Pony', ) # Add constraint. new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 1) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 1) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) Pony.objects.create(pink=1, weight=4.0) if connection.features.supports_deferrable_unique_constraints: # Unique constraint is deferred. with transaction.atomic(): obj = Pony.objects.create(pink=1, weight=4.0) obj.pink = 2 obj.save() # Constraint behavior can be changed with SET CONSTRAINTS. with self.assertRaises(IntegrityError): with transaction.atomic(), connection.cursor() as cursor: quoted_name = connection.ops.quote_name(deferred_unique_constraint.name) cursor.execute('SET CONSTRAINTS %s IMMEDIATE' % quoted_name) obj = Pony.objects.create(pink=1, weight=4.0) obj.pink = 3 obj.save() else: Pony.objects.create(pink=1, weight=4.0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'AddConstraint') self.assertEqual(definition[1], []) self.assertEqual( definition[2], {'model_name': 'Pony', 'constraint': deferred_unique_constraint}, ) def test_remove_deferred_unique_constraint(self): app_label = 'test_removedeferred_uc' deferred_unique_constraint = models.UniqueConstraint( fields=['pink'], name='deferred_pink_constraint_rm', deferrable=models.Deferrable.DEFERRED, ) project_state = self.set_up_test_model(app_label, constraints=[deferred_unique_constraint]) operation = migrations.RemoveConstraint('Pony', deferred_unique_constraint.name) self.assertEqual( operation.describe(), 'Remove constraint deferred_pink_constraint_rm from model Pony', ) # Remove constraint. new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 0) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 0) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0).delete() # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) if connection.features.supports_deferrable_unique_constraints: # Unique constraint is deferred. with transaction.atomic(): obj = Pony.objects.create(pink=1, weight=4.0) obj.pink = 2 obj.save() # Constraint behavior can be changed with SET CONSTRAINTS. with self.assertRaises(IntegrityError): with transaction.atomic(), connection.cursor() as cursor: quoted_name = connection.ops.quote_name(deferred_unique_constraint.name) cursor.execute('SET CONSTRAINTS %s IMMEDIATE' % quoted_name) obj = Pony.objects.create(pink=1, weight=4.0) obj.pink = 3 obj.save() else: Pony.objects.create(pink=1, weight=4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'RemoveConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], { 'model_name': 'Pony', 'name': 'deferred_pink_constraint_rm', }) def test_add_covering_unique_constraint(self): app_label = 'test_addcovering_uc' project_state = self.set_up_test_model(app_label) covering_unique_constraint = models.UniqueConstraint( fields=['pink'], name='covering_pink_constraint_add', include=['weight'], ) operation = migrations.AddConstraint('Pony', covering_unique_constraint) self.assertEqual( operation.describe(), 'Create constraint covering_pink_constraint_add on model Pony', ) # Add constraint. new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 1) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 1) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) Pony.objects.create(pink=1, weight=4.0) if connection.features.supports_covering_indexes: with self.assertRaises(IntegrityError): Pony.objects.create(pink=1, weight=4.0) else: Pony.objects.create(pink=1, weight=4.0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'AddConstraint') self.assertEqual(definition[1], []) self.assertEqual( definition[2], {'model_name': 'Pony', 'constraint': covering_unique_constraint}, ) def test_remove_covering_unique_constraint(self): app_label = 'test_removecovering_uc' covering_unique_constraint = models.UniqueConstraint( fields=['pink'], name='covering_pink_constraint_rm', include=['weight'], ) project_state = self.set_up_test_model(app_label, constraints=[covering_unique_constraint]) operation = migrations.RemoveConstraint('Pony', covering_unique_constraint.name) self.assertEqual( operation.describe(), 'Remove constraint covering_pink_constraint_rm from model Pony', ) # Remove constraint. new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 0) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 0) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0).delete() # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) if connection.features.supports_covering_indexes: with self.assertRaises(IntegrityError): Pony.objects.create(pink=1, weight=4.0) else: Pony.objects.create(pink=1, weight=4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'RemoveConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], { 'model_name': 'Pony', 'name': 'covering_pink_constraint_rm', }) def test_add_func_unique_constraint(self): app_label = 'test_adfuncuc' constraint_name = f'{app_label}_pony_abs_uq' table_name = f'{app_label}_pony' project_state = self.set_up_test_model(app_label) constraint = models.UniqueConstraint(Abs('weight'), name=constraint_name) operation = migrations.AddConstraint('Pony', constraint) self.assertEqual( operation.describe(), 'Create constraint test_adfuncuc_pony_abs_uq on model Pony', ) self.assertEqual( operation.migration_name_fragment, 'pony_test_adfuncuc_pony_abs_uq', ) new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 1) self.assertIndexNameNotExists(table_name, constraint_name) # Add constraint. with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) Pony = new_state.apps.get_model(app_label, 'Pony') Pony.objects.create(weight=4.0) if connection.features.supports_expression_indexes: self.assertIndexNameExists(table_name, constraint_name) with self.assertRaises(IntegrityError): Pony.objects.create(weight=-4.0) else: self.assertIndexNameNotExists(table_name, constraint_name) Pony.objects.create(weight=-4.0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) self.assertIndexNameNotExists(table_name, constraint_name) # Constraint doesn't work. Pony.objects.create(weight=-4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'AddConstraint') self.assertEqual(definition[1], []) self.assertEqual( definition[2], {'model_name': 'Pony', 'constraint': constraint}, ) def test_remove_func_unique_constraint(self): app_label = 'test_rmfuncuc' constraint_name = f'{app_label}_pony_abs_uq' table_name = f'{app_label}_pony' project_state = self.set_up_test_model(app_label, constraints=[ models.UniqueConstraint(Abs('weight'), name=constraint_name), ]) self.assertTableExists(table_name) if connection.features.supports_expression_indexes: self.assertIndexNameExists(table_name, constraint_name) operation = migrations.RemoveConstraint('Pony', constraint_name) self.assertEqual( operation.describe(), 'Remove constraint test_rmfuncuc_pony_abs_uq from model Pony', ) self.assertEqual( operation.migration_name_fragment, 'remove_pony_test_rmfuncuc_pony_abs_uq', ) new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 0) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 0) # Remove constraint. with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) self.assertIndexNameNotExists(table_name, constraint_name) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=-4.0).delete() # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) if connection.features.supports_expression_indexes: self.assertIndexNameExists(table_name, constraint_name) with self.assertRaises(IntegrityError): Pony.objects.create(weight=-4.0) else: self.assertIndexNameNotExists(table_name, constraint_name) Pony.objects.create(weight=-4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'RemoveConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': 'Pony', 'name': constraint_name}) def test_alter_model_options(self): """ Tests the AlterModelOptions operation. """ project_state = self.set_up_test_model("test_almoop") # Test the state alteration (no DB alteration to test) operation = migrations.AlterModelOptions("Pony", {"permissions": [("can_groom", "Can groom")]}) self.assertEqual(operation.describe(), "Change Meta options on Pony") self.assertEqual(operation.migration_name_fragment, 'alter_pony_options') new_state = project_state.clone() operation.state_forwards("test_almoop", new_state) self.assertEqual(len(project_state.models["test_almoop", "pony"].options.get("permissions", [])), 0) self.assertEqual(len(new_state.models["test_almoop", "pony"].options.get("permissions", [])), 1) self.assertEqual(new_state.models["test_almoop", "pony"].options["permissions"][0][0], "can_groom") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterModelOptions") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'options': {"permissions": [("can_groom", "Can groom")]}}) def test_alter_model_options_emptying(self): """ The AlterModelOptions operation removes keys from the dict (#23121) """ project_state = self.set_up_test_model("test_almoop", options=True) # Test the state alteration (no DB alteration to test) operation = migrations.AlterModelOptions("Pony", {}) self.assertEqual(operation.describe(), "Change Meta options on Pony") new_state = project_state.clone() operation.state_forwards("test_almoop", new_state) self.assertEqual(len(project_state.models["test_almoop", "pony"].options.get("permissions", [])), 1) self.assertEqual(len(new_state.models["test_almoop", "pony"].options.get("permissions", [])), 0) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterModelOptions") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'options': {}}) def test_alter_order_with_respect_to(self): """ Tests the AlterOrderWithRespectTo operation. """ project_state = self.set_up_test_model("test_alorwrtto", related_model=True) # Test the state alteration operation = migrations.AlterOrderWithRespectTo("Rider", "pony") self.assertEqual(operation.describe(), "Set order_with_respect_to on Rider to pony") self.assertEqual( operation.migration_name_fragment, 'alter_rider_order_with_respect_to', ) new_state = project_state.clone() operation.state_forwards("test_alorwrtto", new_state) self.assertIsNone( project_state.models["test_alorwrtto", "rider"].options.get("order_with_respect_to", None) ) self.assertEqual( new_state.models["test_alorwrtto", "rider"].options.get("order_with_respect_to", None), "pony" ) # Make sure there's no matching index self.assertColumnNotExists("test_alorwrtto_rider", "_order") # Create some rows before alteration rendered_state = project_state.apps pony = rendered_state.get_model("test_alorwrtto", "Pony").objects.create(weight=50) rider1 = rendered_state.get_model("test_alorwrtto", "Rider").objects.create(pony=pony) rider1.friend = rider1 rider1.save() rider2 = rendered_state.get_model("test_alorwrtto", "Rider").objects.create(pony=pony) rider2.friend = rider2 rider2.save() # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alorwrtto", editor, project_state, new_state) self.assertColumnExists("test_alorwrtto_rider", "_order") # Check for correct value in rows updated_riders = new_state.apps.get_model("test_alorwrtto", "Rider").objects.all() self.assertEqual(updated_riders[0]._order, 0) self.assertEqual(updated_riders[1]._order, 0) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alorwrtto", editor, new_state, project_state) self.assertColumnNotExists("test_alorwrtto_rider", "_order") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterOrderWithRespectTo") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Rider", 'order_with_respect_to': "pony"}) def test_alter_model_managers(self): """ The managers on a model are set. """ project_state = self.set_up_test_model("test_almoma") # Test the state alteration operation = migrations.AlterModelManagers( "Pony", managers=[ ("food_qs", FoodQuerySet.as_manager()), ("food_mgr", FoodManager("a", "b")), ("food_mgr_kwargs", FoodManager("x", "y", 3, 4)), ] ) self.assertEqual(operation.describe(), "Change managers on Pony") self.assertEqual(operation.migration_name_fragment, 'alter_pony_managers') managers = project_state.models["test_almoma", "pony"].managers self.assertEqual(managers, []) new_state = project_state.clone() operation.state_forwards("test_almoma", new_state) self.assertIn(("test_almoma", "pony"), new_state.models) managers = new_state.models["test_almoma", "pony"].managers self.assertEqual(managers[0][0], "food_qs") self.assertIsInstance(managers[0][1], models.Manager) self.assertEqual(managers[1][0], "food_mgr") self.assertIsInstance(managers[1][1], FoodManager) self.assertEqual(managers[1][1].args, ("a", "b", 1, 2)) self.assertEqual(managers[2][0], "food_mgr_kwargs") self.assertIsInstance(managers[2][1], FoodManager) self.assertEqual(managers[2][1].args, ("x", "y", 3, 4)) rendered_state = new_state.apps model = rendered_state.get_model('test_almoma', 'pony') self.assertIsInstance(model.food_qs, models.Manager) self.assertIsInstance(model.food_mgr, FoodManager) self.assertIsInstance(model.food_mgr_kwargs, FoodManager) def test_alter_model_managers_emptying(self): """ The managers on a model are set. """ project_state = self.set_up_test_model("test_almomae", manager_model=True) # Test the state alteration operation = migrations.AlterModelManagers("Food", managers=[]) self.assertEqual(operation.describe(), "Change managers on Food") self.assertIn(("test_almomae", "food"), project_state.models) managers = project_state.models["test_almomae", "food"].managers self.assertEqual(managers[0][0], "food_qs") self.assertIsInstance(managers[0][1], models.Manager) self.assertEqual(managers[1][0], "food_mgr") self.assertIsInstance(managers[1][1], FoodManager) self.assertEqual(managers[1][1].args, ("a", "b", 1, 2)) self.assertEqual(managers[2][0], "food_mgr_kwargs") self.assertIsInstance(managers[2][1], FoodManager) self.assertEqual(managers[2][1].args, ("x", "y", 3, 4)) new_state = project_state.clone() operation.state_forwards("test_almomae", new_state) managers = new_state.models["test_almomae", "food"].managers self.assertEqual(managers, []) def test_alter_fk(self): """ Creating and then altering an FK works correctly and deals with the pending SQL (#23091) """ project_state = self.set_up_test_model("test_alfk") # Test adding and then altering the FK in one go create_operation = migrations.CreateModel( name="Rider", fields=[ ("id", models.AutoField(primary_key=True)), ("pony", models.ForeignKey("Pony", models.CASCADE)), ], ) create_state = project_state.clone() create_operation.state_forwards("test_alfk", create_state) alter_operation = migrations.AlterField( model_name='Rider', name='pony', field=models.ForeignKey("Pony", models.CASCADE, editable=False), ) alter_state = create_state.clone() alter_operation.state_forwards("test_alfk", alter_state) with connection.schema_editor() as editor: create_operation.database_forwards("test_alfk", editor, project_state, create_state) alter_operation.database_forwards("test_alfk", editor, create_state, alter_state) def test_alter_fk_non_fk(self): """ Altering an FK to a non-FK works (#23244) """ # Test the state alteration operation = migrations.AlterField( model_name="Rider", name="pony", field=models.FloatField(), ) project_state, new_state = self.make_test_state("test_afknfk", operation, related_model=True) # Test the database alteration self.assertColumnExists("test_afknfk_rider", "pony_id") self.assertColumnNotExists("test_afknfk_rider", "pony") with connection.schema_editor() as editor: operation.database_forwards("test_afknfk", editor, project_state, new_state) self.assertColumnExists("test_afknfk_rider", "pony") self.assertColumnNotExists("test_afknfk_rider", "pony_id") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_afknfk", editor, new_state, project_state) self.assertColumnExists("test_afknfk_rider", "pony_id") self.assertColumnNotExists("test_afknfk_rider", "pony") def test_run_sql(self): """ Tests the RunSQL operation. """ project_state = self.set_up_test_model("test_runsql") # Create the operation operation = migrations.RunSQL( # Use a multi-line string with a comment to test splitting on SQLite and MySQL respectively "CREATE TABLE i_love_ponies (id int, special_thing varchar(15));\n" "INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'i love ponies'); -- this is magic!\n" "INSERT INTO i_love_ponies (id, special_thing) VALUES (2, 'i love django');\n" "UPDATE i_love_ponies SET special_thing = 'Ponies' WHERE special_thing LIKE '%%ponies';" "UPDATE i_love_ponies SET special_thing = 'Django' WHERE special_thing LIKE '%django';", # Run delete queries to test for parameter substitution failure # reported in #23426 "DELETE FROM i_love_ponies WHERE special_thing LIKE '%Django%';" "DELETE FROM i_love_ponies WHERE special_thing LIKE '%%Ponies%%';" "DROP TABLE i_love_ponies", state_operations=[migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))])], ) self.assertEqual(operation.describe(), "Raw SQL operation") # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_runsql", new_state) self.assertEqual(len(new_state.models["test_runsql", "somethingelse"].fields), 1) # Make sure there's no table self.assertTableNotExists("i_love_ponies") # Test SQL collection with connection.schema_editor(collect_sql=True) as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) self.assertIn("LIKE '%%ponies';", "\n".join(editor.collected_sql)) operation.database_backwards("test_runsql", editor, project_state, new_state) self.assertIn("LIKE '%%Ponies%%';", "\n".join(editor.collected_sql)) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) self.assertTableExists("i_love_ponies") # Make sure all the SQL was processed with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM i_love_ponies") self.assertEqual(cursor.fetchall()[0][0], 2) cursor.execute("SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Django'") self.assertEqual(cursor.fetchall()[0][0], 1) cursor.execute("SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Ponies'") self.assertEqual(cursor.fetchall()[0][0], 1) # And test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_runsql", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RunSQL") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["reverse_sql", "sql", "state_operations"]) # And elidable reduction self.assertIs(False, operation.reduce(operation, [])) elidable_operation = migrations.RunSQL('SELECT 1 FROM void;', elidable=True) self.assertEqual(elidable_operation.reduce(operation, []), [operation]) def test_run_sql_params(self): """ #23426 - RunSQL should accept parameters. """ project_state = self.set_up_test_model("test_runsql") # Create the operation operation = migrations.RunSQL( ["CREATE TABLE i_love_ponies (id int, special_thing varchar(15));"], ["DROP TABLE i_love_ponies"], ) param_operation = migrations.RunSQL( # forwards ( "INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'Django');", ["INSERT INTO i_love_ponies (id, special_thing) VALUES (2, %s);", ['Ponies']], ("INSERT INTO i_love_ponies (id, special_thing) VALUES (%s, %s);", (3, 'Python',)), ), # backwards [ "DELETE FROM i_love_ponies WHERE special_thing = 'Django';", ["DELETE FROM i_love_ponies WHERE special_thing = 'Ponies';", None], ("DELETE FROM i_love_ponies WHERE id = %s OR special_thing = %s;", [3, 'Python']), ] ) # Make sure there's no table self.assertTableNotExists("i_love_ponies") new_state = project_state.clone() # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) # Test parameter passing with connection.schema_editor() as editor: param_operation.database_forwards("test_runsql", editor, project_state, new_state) # Make sure all the SQL was processed with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM i_love_ponies") self.assertEqual(cursor.fetchall()[0][0], 3) with connection.schema_editor() as editor: param_operation.database_backwards("test_runsql", editor, new_state, project_state) with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM i_love_ponies") self.assertEqual(cursor.fetchall()[0][0], 0) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_runsql", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies") def test_run_sql_params_invalid(self): """ #23426 - RunSQL should fail when a list of statements with an incorrect number of tuples is given. """ project_state = self.set_up_test_model("test_runsql") new_state = project_state.clone() operation = migrations.RunSQL( # forwards [ ["INSERT INTO foo (bar) VALUES ('buz');"] ], # backwards ( ("DELETE FROM foo WHERE bar = 'buz';", 'invalid', 'parameter count'), ), ) with connection.schema_editor() as editor: with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 1"): operation.database_forwards("test_runsql", editor, project_state, new_state) with connection.schema_editor() as editor: with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 3"): operation.database_backwards("test_runsql", editor, new_state, project_state) def test_run_sql_noop(self): """ #24098 - Tests no-op RunSQL operations. """ operation = migrations.RunSQL(migrations.RunSQL.noop, migrations.RunSQL.noop) with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, None, None) operation.database_backwards("test_runsql", editor, None, None) def test_run_sql_add_missing_semicolon_on_collect_sql(self): project_state = self.set_up_test_model('test_runsql') new_state = project_state.clone() tests = [ 'INSERT INTO test_runsql_pony (pink, weight) VALUES (1, 1);\n', 'INSERT INTO test_runsql_pony (pink, weight) VALUES (1, 1)\n', ] for sql in tests: with self.subTest(sql=sql): operation = migrations.RunSQL(sql, migrations.RunPython.noop) with connection.schema_editor(collect_sql=True) as editor: operation.database_forwards('test_runsql', editor, project_state, new_state) collected_sql = '\n'.join(editor.collected_sql) self.assertEqual(collected_sql.count(';'), 1) def test_run_python(self): """ Tests the RunPython operation """ project_state = self.set_up_test_model("test_runpython", mti_model=True) # Create the operation def inner_method(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") Pony.objects.create(pink=1, weight=3.55) Pony.objects.create(weight=5) def inner_method_reverse(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") Pony.objects.filter(pink=1, weight=3.55).delete() Pony.objects.filter(weight=5).delete() operation = migrations.RunPython(inner_method, reverse_code=inner_method_reverse) self.assertEqual(operation.describe(), "Raw Python operation") # Test the state alteration does nothing new_state = project_state.clone() operation.state_forwards("test_runpython", new_state) self.assertEqual(new_state, project_state) # Test the database alteration self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2) # Now test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0) # Now test we can't use a string with self.assertRaisesMessage(ValueError, 'RunPython must be supplied with a callable'): migrations.RunPython("print 'ahahaha'") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RunPython") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["code", "reverse_code"]) # Also test reversal fails, with an operation identical to above but without reverse_code set no_reverse_operation = migrations.RunPython(inner_method) self.assertFalse(no_reverse_operation.reversible) with connection.schema_editor() as editor: no_reverse_operation.database_forwards("test_runpython", editor, project_state, new_state) with self.assertRaises(NotImplementedError): no_reverse_operation.database_backwards("test_runpython", editor, new_state, project_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2) def create_ponies(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") pony1 = Pony.objects.create(pink=1, weight=3.55) self.assertIsNot(pony1.pk, None) pony2 = Pony.objects.create(weight=5) self.assertIsNot(pony2.pk, None) self.assertNotEqual(pony1.pk, pony2.pk) operation = migrations.RunPython(create_ponies) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 4) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RunPython") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["code"]) def create_shetlandponies(models, schema_editor): ShetlandPony = models.get_model("test_runpython", "ShetlandPony") pony1 = ShetlandPony.objects.create(weight=4.0) self.assertIsNot(pony1.pk, None) pony2 = ShetlandPony.objects.create(weight=5.0) self.assertIsNot(pony2.pk, None) self.assertNotEqual(pony1.pk, pony2.pk) operation = migrations.RunPython(create_shetlandponies) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 6) self.assertEqual(project_state.apps.get_model("test_runpython", "ShetlandPony").objects.count(), 2) # And elidable reduction self.assertIs(False, operation.reduce(operation, [])) elidable_operation = migrations.RunPython(inner_method, elidable=True) self.assertEqual(elidable_operation.reduce(operation, []), [operation]) def test_run_python_atomic(self): """ Tests the RunPython operation correctly handles the "atomic" keyword """ project_state = self.set_up_test_model("test_runpythonatomic", mti_model=True) def inner_method(models, schema_editor): Pony = models.get_model("test_runpythonatomic", "Pony") Pony.objects.create(pink=1, weight=3.55) raise ValueError("Adrian hates ponies.") # Verify atomicity when applying. atomic_migration = Migration("test", "test_runpythonatomic") atomic_migration.operations = [migrations.RunPython(inner_method, reverse_code=inner_method)] non_atomic_migration = Migration("test", "test_runpythonatomic") non_atomic_migration.operations = [migrations.RunPython(inner_method, reverse_code=inner_method, atomic=False)] # If we're a fully-transactional database, both versions should rollback if connection.features.can_rollback_ddl: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) # Otherwise, the non-atomic operation should leave a row there else: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 1) # Reset object count to zero and verify atomicity when unapplying. project_state.apps.get_model("test_runpythonatomic", "Pony").objects.all().delete() # On a fully-transactional database, both versions rollback. if connection.features.can_rollback_ddl: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) # Otherwise, the non-atomic operation leaves a row there. else: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 1) # Verify deconstruction. definition = non_atomic_migration.operations[0].deconstruct() self.assertEqual(definition[0], "RunPython") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["atomic", "code", "reverse_code"]) def test_run_python_related_assignment(self): """ #24282 - Model changes to a FK reverse side update the model on the FK side as well. """ def inner_method(models, schema_editor): Author = models.get_model("test_authors", "Author") Book = models.get_model("test_books", "Book") author = Author.objects.create(name="Hemingway") Book.objects.create(title="Old Man and The Sea", author=author) create_author = migrations.CreateModel( "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ], options={}, ) create_book = migrations.CreateModel( "Book", [ ("id", models.AutoField(primary_key=True)), ("title", models.CharField(max_length=100)), ("author", models.ForeignKey("test_authors.Author", models.CASCADE)) ], options={}, ) add_hometown = migrations.AddField( "Author", "hometown", models.CharField(max_length=100), ) create_old_man = migrations.RunPython(inner_method, inner_method) project_state = ProjectState() new_state = project_state.clone() with connection.schema_editor() as editor: create_author.state_forwards("test_authors", new_state) create_author.database_forwards("test_authors", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_book.state_forwards("test_books", new_state) create_book.database_forwards("test_books", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: add_hometown.state_forwards("test_authors", new_state) add_hometown.database_forwards("test_authors", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_old_man.state_forwards("test_books", new_state) create_old_man.database_forwards("test_books", editor, project_state, new_state) def test_model_with_bigautofield(self): """ A model with BigAutoField can be created. """ def create_data(models, schema_editor): Author = models.get_model("test_author", "Author") Book = models.get_model("test_book", "Book") author1 = Author.objects.create(name="Hemingway") Book.objects.create(title="Old Man and The Sea", author=author1) Book.objects.create(id=2 ** 33, title="A farewell to arms", author=author1) author2 = Author.objects.create(id=2 ** 33, name="Remarque") Book.objects.create(title="All quiet on the western front", author=author2) Book.objects.create(title="Arc de Triomphe", author=author2) create_author = migrations.CreateModel( "Author", [ ("id", models.BigAutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ], options={}, ) create_book = migrations.CreateModel( "Book", [ ("id", models.BigAutoField(primary_key=True)), ("title", models.CharField(max_length=100)), ("author", models.ForeignKey(to="test_author.Author", on_delete=models.CASCADE)) ], options={}, ) fill_data = migrations.RunPython(create_data) project_state = ProjectState() new_state = project_state.clone() with connection.schema_editor() as editor: create_author.state_forwards("test_author", new_state) create_author.database_forwards("test_author", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_book.state_forwards("test_book", new_state) create_book.database_forwards("test_book", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: fill_data.state_forwards("fill_data", new_state) fill_data.database_forwards("fill_data", editor, project_state, new_state) def _test_autofield_foreignfield_growth(self, source_field, target_field, target_value): """ A field may be migrated in the following ways: - AutoField to BigAutoField - SmallAutoField to AutoField - SmallAutoField to BigAutoField """ def create_initial_data(models, schema_editor): Article = models.get_model("test_article", "Article") Blog = models.get_model("test_blog", "Blog") blog = Blog.objects.create(name="web development done right") Article.objects.create(name="Frameworks", blog=blog) Article.objects.create(name="Programming Languages", blog=blog) def create_big_data(models, schema_editor): Article = models.get_model("test_article", "Article") Blog = models.get_model("test_blog", "Blog") blog2 = Blog.objects.create(name="Frameworks", id=target_value) Article.objects.create(name="Django", blog=blog2) Article.objects.create(id=target_value, name="Django2", blog=blog2) create_blog = migrations.CreateModel( "Blog", [ ("id", source_field(primary_key=True)), ("name", models.CharField(max_length=100)), ], options={}, ) create_article = migrations.CreateModel( "Article", [ ("id", source_field(primary_key=True)), ("blog", models.ForeignKey(to="test_blog.Blog", on_delete=models.CASCADE)), ("name", models.CharField(max_length=100)), ("data", models.TextField(default="")), ], options={}, ) fill_initial_data = migrations.RunPython(create_initial_data, create_initial_data) fill_big_data = migrations.RunPython(create_big_data, create_big_data) grow_article_id = migrations.AlterField('Article', 'id', target_field(primary_key=True)) grow_blog_id = migrations.AlterField('Blog', 'id', target_field(primary_key=True)) project_state = ProjectState() new_state = project_state.clone() with connection.schema_editor() as editor: create_blog.state_forwards("test_blog", new_state) create_blog.database_forwards("test_blog", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_article.state_forwards("test_article", new_state) create_article.database_forwards("test_article", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: fill_initial_data.state_forwards("fill_initial_data", new_state) fill_initial_data.database_forwards("fill_initial_data", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: grow_article_id.state_forwards("test_article", new_state) grow_article_id.database_forwards("test_article", editor, project_state, new_state) state = new_state.clone() article = state.apps.get_model("test_article.Article") self.assertIsInstance(article._meta.pk, target_field) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: grow_blog_id.state_forwards("test_blog", new_state) grow_blog_id.database_forwards("test_blog", editor, project_state, new_state) state = new_state.clone() blog = state.apps.get_model("test_blog.Blog") self.assertIsInstance(blog._meta.pk, target_field) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: fill_big_data.state_forwards("fill_big_data", new_state) fill_big_data.database_forwards("fill_big_data", editor, project_state, new_state) def test_autofield__bigautofield_foreignfield_growth(self): """A field may be migrated from AutoField to BigAutoField.""" self._test_autofield_foreignfield_growth( models.AutoField, models.BigAutoField, 2 ** 33, ) def test_smallfield_autofield_foreignfield_growth(self): """A field may be migrated from SmallAutoField to AutoField.""" self._test_autofield_foreignfield_growth( models.SmallAutoField, models.AutoField, 2 ** 22, ) def test_smallfield_bigautofield_foreignfield_growth(self): """A field may be migrated from SmallAutoField to BigAutoField.""" self._test_autofield_foreignfield_growth( models.SmallAutoField, models.BigAutoField, 2 ** 33, ) def test_run_python_noop(self): """ #24098 - Tests no-op RunPython operations. """ project_state = ProjectState() new_state = project_state.clone() operation = migrations.RunPython(migrations.RunPython.noop, migrations.RunPython.noop) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) operation.database_backwards("test_runpython", editor, new_state, project_state) def test_separate_database_and_state(self): """ Tests the SeparateDatabaseAndState operation. """ project_state = self.set_up_test_model("test_separatedatabaseandstate") # Create the operation database_operation = migrations.RunSQL( "CREATE TABLE i_love_ponies (id int, special_thing int);", "DROP TABLE i_love_ponies;" ) state_operation = migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))]) operation = migrations.SeparateDatabaseAndState( state_operations=[state_operation], database_operations=[database_operation] ) self.assertEqual(operation.describe(), "Custom state/database change combination") # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_separatedatabaseandstate", new_state) self.assertEqual(len(new_state.models["test_separatedatabaseandstate", "somethingelse"].fields), 1) # Make sure there's no table self.assertTableNotExists("i_love_ponies") # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_separatedatabaseandstate", editor, project_state, new_state) self.assertTableExists("i_love_ponies") # And test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_separatedatabaseandstate", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "SeparateDatabaseAndState") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["database_operations", "state_operations"]) def test_separate_database_and_state2(self): """ A complex SeparateDatabaseAndState operation: Multiple operations both for state and database. Verify the state dependencies within each list and that state ops don't affect the database. """ app_label = "test_separatedatabaseandstate2" project_state = self.set_up_test_model(app_label) # Create the operation database_operations = [ migrations.CreateModel( "ILovePonies", [("id", models.AutoField(primary_key=True))], options={"db_table": "iloveponies"}, ), migrations.CreateModel( "ILoveMorePonies", # We use IntegerField and not AutoField because # the model is going to be deleted immediately # and with an AutoField this fails on Oracle [("id", models.IntegerField(primary_key=True))], options={"db_table": "ilovemoreponies"}, ), migrations.DeleteModel("ILoveMorePonies"), migrations.CreateModel( "ILoveEvenMorePonies", [("id", models.AutoField(primary_key=True))], options={"db_table": "iloveevenmoreponies"}, ), ] state_operations = [ migrations.CreateModel( "SomethingElse", [("id", models.AutoField(primary_key=True))], options={"db_table": "somethingelse"}, ), migrations.DeleteModel("SomethingElse"), migrations.CreateModel( "SomethingCompletelyDifferent", [("id", models.AutoField(primary_key=True))], options={"db_table": "somethingcompletelydifferent"}, ), ] operation = migrations.SeparateDatabaseAndState( state_operations=state_operations, database_operations=database_operations, ) # Test the state alteration new_state = project_state.clone() operation.state_forwards(app_label, new_state) def assertModelsAndTables(after_db): # Tables and models exist, or don't, as they should: self.assertNotIn((app_label, "somethingelse"), new_state.models) self.assertEqual(len(new_state.models[app_label, "somethingcompletelydifferent"].fields), 1) self.assertNotIn((app_label, "iloveponiesonies"), new_state.models) self.assertNotIn((app_label, "ilovemoreponies"), new_state.models) self.assertNotIn((app_label, "iloveevenmoreponies"), new_state.models) self.assertTableNotExists("somethingelse") self.assertTableNotExists("somethingcompletelydifferent") self.assertTableNotExists("ilovemoreponies") if after_db: self.assertTableExists("iloveponies") self.assertTableExists("iloveevenmoreponies") else: self.assertTableNotExists("iloveponies") self.assertTableNotExists("iloveevenmoreponies") assertModelsAndTables(after_db=False) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) assertModelsAndTables(after_db=True) # And test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) assertModelsAndTables(after_db=False) class SwappableOperationTests(OperationTestBase): """ Key operations ignore swappable models (we don't want to replicate all of them here, as the functionality is in a common base class anyway) """ available_apps = ['migrations'] @override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel") def test_create_ignore_swapped(self): """ The CreateTable operation ignores swapped models. """ operation = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=1)), ], options={ "swappable": "TEST_SWAP_MODEL", }, ) # Test the state alteration (it should still be there!) project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crigsw", new_state) self.assertEqual(new_state.models["test_crigsw", "pony"].name, "Pony") self.assertEqual(len(new_state.models["test_crigsw", "pony"].fields), 2) # Test the database alteration self.assertTableNotExists("test_crigsw_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crigsw", editor, project_state, new_state) self.assertTableNotExists("test_crigsw_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crigsw", editor, new_state, project_state) self.assertTableNotExists("test_crigsw_pony") @override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel") def test_delete_ignore_swapped(self): """ Tests the DeleteModel operation ignores swapped models. """ operation = migrations.DeleteModel("Pony") project_state, new_state = self.make_test_state("test_dligsw", operation) # Test the database alteration self.assertTableNotExists("test_dligsw_pony") with connection.schema_editor() as editor: operation.database_forwards("test_dligsw", editor, project_state, new_state) self.assertTableNotExists("test_dligsw_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_dligsw", editor, new_state, project_state) self.assertTableNotExists("test_dligsw_pony") @override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel") def test_add_field_ignore_swapped(self): """ Tests the AddField operation. """ # Test the state alteration operation = migrations.AddField( "Pony", "height", models.FloatField(null=True, default=5), ) project_state, new_state = self.make_test_state("test_adfligsw", operation) # Test the database alteration self.assertTableNotExists("test_adfligsw_pony") with connection.schema_editor() as editor: operation.database_forwards("test_adfligsw", editor, project_state, new_state) self.assertTableNotExists("test_adfligsw_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adfligsw", editor, new_state, project_state) self.assertTableNotExists("test_adfligsw_pony") @override_settings(TEST_SWAP_MODEL='migrations.SomeFakeModel') def test_indexes_ignore_swapped(self): """ Add/RemoveIndex operations ignore swapped models. """ operation = migrations.AddIndex('Pony', models.Index(fields=['pink'], name='my_name_idx')) project_state, new_state = self.make_test_state('test_adinigsw', operation) with connection.schema_editor() as editor: # No database queries should be run for swapped models operation.database_forwards('test_adinigsw', editor, project_state, new_state) operation.database_backwards('test_adinigsw', editor, new_state, project_state) operation = migrations.RemoveIndex('Pony', models.Index(fields=['pink'], name='my_name_idx')) project_state, new_state = self.make_test_state("test_rminigsw", operation) with connection.schema_editor() as editor: operation.database_forwards('test_rminigsw', editor, project_state, new_state) operation.database_backwards('test_rminigsw', editor, new_state, project_state) class TestCreateModel(SimpleTestCase): def test_references_model_mixin(self): migrations.CreateModel( 'name', fields=[], bases=(Mixin, models.Model), ).references_model('other_model', 'migrations') class FieldOperationTests(SimpleTestCase): def test_references_model(self): operation = FieldOperation('MoDel', 'field', models.ForeignKey('Other', models.CASCADE)) # Model name match. self.assertIs(operation.references_model('mOdEl', 'migrations'), True) # Referenced field. self.assertIs(operation.references_model('oTher', 'migrations'), True) # Doesn't reference. self.assertIs(operation.references_model('Whatever', 'migrations'), False) def test_references_field_by_name(self): operation = FieldOperation('MoDel', 'field', models.BooleanField(default=False)) self.assertIs(operation.references_field('model', 'field', 'migrations'), True) def test_references_field_by_remote_field_model(self): operation = FieldOperation('Model', 'field', models.ForeignKey('Other', models.CASCADE)) self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), True) self.assertIs(operation.references_field('Missing', 'whatever', 'migrations'), False) def test_references_field_by_from_fields(self): operation = FieldOperation( 'Model', 'field', models.fields.related.ForeignObject('Other', models.CASCADE, ['from'], ['to']) ) self.assertIs(operation.references_field('Model', 'from', 'migrations'), True) self.assertIs(operation.references_field('Model', 'to', 'migrations'), False) self.assertIs(operation.references_field('Other', 'from', 'migrations'), False) self.assertIs(operation.references_field('Model', 'to', 'migrations'), False) def test_references_field_by_to_fields(self): operation = FieldOperation('Model', 'field', models.ForeignKey('Other', models.CASCADE, to_field='field')) self.assertIs(operation.references_field('Other', 'field', 'migrations'), True) self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), False) self.assertIs(operation.references_field('Missing', 'whatever', 'migrations'), False) def test_references_field_by_through(self): operation = FieldOperation('Model', 'field', models.ManyToManyField('Other', through='Through')) self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), True) self.assertIs(operation.references_field('Through', 'whatever', 'migrations'), True) self.assertIs(operation.references_field('Missing', 'whatever', 'migrations'), False) def test_reference_field_by_through_fields(self): operation = FieldOperation( 'Model', 'field', models.ManyToManyField('Other', through='Through', through_fields=('first', 'second')) ) self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), True) self.assertIs(operation.references_field('Through', 'whatever', 'migrations'), False) self.assertIs(operation.references_field('Through', 'first', 'migrations'), True) self.assertIs(operation.references_field('Through', 'second', 'migrations'), True)
aa2bfb6362ea23714f1cdcef77b96d2c8c724e30c12f69392d5a3b7ea1b509eb
import datetime import os import re import unittest from unittest import mock from urllib.parse import parse_qsl, urljoin, urlparse import pytz try: import zoneinfo except ImportError: try: from backports import zoneinfo except ImportError: zoneinfo = None from django.contrib import admin from django.contrib.admin import AdminSite, ModelAdmin from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME from django.contrib.admin.models import ADDITION, DELETION, LogEntry from django.contrib.admin.options import TO_FIELD_VAR from django.contrib.admin.templatetags.admin_urls import add_preserved_filters from django.contrib.admin.tests import AdminSeleniumTestCase from django.contrib.admin.utils import quote from django.contrib.admin.views.main import IS_POPUP_VAR from django.contrib.auth import REDIRECT_FIELD_NAME, get_permission_codename from django.contrib.auth.models import Group, Permission, User from django.contrib.contenttypes.models import ContentType from django.core import mail from django.core.checks import Error from django.core.files import temp as tempfile from django.forms.utils import ErrorList from django.template.response import TemplateResponse from django.test import ( TestCase, modify_settings, override_settings, skipUnlessDBFeature, ) from django.test.utils import override_script_prefix from django.urls import NoReverseMatch, resolve, reverse from django.utils import formats, translation from django.utils.cache import get_max_age from django.utils.encoding import iri_to_uri from django.utils.html import escape from django.utils.http import urlencode from . import customadmin from .admin import CityAdmin, site, site2 from .models import ( Actor, AdminOrderedAdminMethod, AdminOrderedCallable, AdminOrderedField, AdminOrderedModelMethod, Album, Answer, Answer2, Article, BarAccount, Book, Bookmark, Category, Chapter, ChapterXtra1, ChapterXtra2, Character, Child, Choice, City, Collector, Color, ComplexSortedPerson, CoverLetter, CustomArticle, CyclicOne, CyclicTwo, DooHickey, Employee, EmptyModel, Fabric, FancyDoodad, FieldOverridePost, FilteredManager, FooAccount, FoodDelivery, FunkyTag, Gallery, Grommet, Inquisition, Language, Link, MainPrepopulated, Media, ModelWithStringPrimaryKey, OtherStory, Paper, Parent, ParentWithDependentChildren, ParentWithUUIDPK, Person, Persona, Picture, Pizza, Plot, PlotDetails, PluggableSearchPerson, Podcast, Post, PrePopulatedPost, Promo, Question, ReadablePizza, ReadOnlyPizza, ReadOnlyRelatedField, Recommendation, Recommender, RelatedPrepopulated, RelatedWithUUIDPKModel, Report, Restaurant, RowLevelChangePermissionModel, SecretHideout, Section, ShortMessage, Simple, Song, State, Story, SuperSecretHideout, SuperVillain, Telegram, TitleTranslation, Topping, UnchangeableObject, UndeletableObject, UnorderedObject, UserProxy, Villain, Vodcast, Whatsit, Widget, Worker, WorkHour, ) ERROR_MESSAGE = "Please enter the correct username and password \ for a staff account. Note that both fields may be case-sensitive." MULTIPART_ENCTYPE = 'enctype="multipart/form-data"' def make_aware_datetimes(dt, iana_key): """Makes one aware datetime for each supported time zone provider.""" yield pytz.timezone(iana_key).localize(dt, is_dst=None) if zoneinfo is not None: yield dt.replace(tzinfo=zoneinfo.ZoneInfo(iana_key)) class AdminFieldExtractionMixin: """ Helper methods for extracting data from AdminForm. """ def get_admin_form_fields(self, response): """ Return a list of AdminFields for the AdminForm in the response. """ fields = [] for fieldset in response.context['adminform']: for field_line in fieldset: fields.extend(field_line) return fields def get_admin_readonly_fields(self, response): """ Return the readonly fields for the response's AdminForm. """ return [f for f in self.get_admin_form_fields(response) if f.is_readonly] def get_admin_readonly_field(self, response, field_name): """ Return the readonly field for the given field_name. """ admin_readonly_fields = self.get_admin_readonly_fields(response) for field in admin_readonly_fields: if field.field['name'] == field_name: return field @override_settings(ROOT_URLCONF='admin_views.urls', USE_I18N=True, USE_L10N=False, LANGUAGE_CODE='en') class AdminViewBasicTestCase(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1, title='Article 1', ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1, title='Article 2', ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') cls.color1 = Color.objects.create(value='Red', warm=True) cls.color2 = Color.objects.create(value='Orange', warm=True) cls.color3 = Color.objects.create(value='Blue', warm=False) cls.color4 = Color.objects.create(value='Green', warm=False) cls.fab1 = Fabric.objects.create(surface='x') cls.fab2 = Fabric.objects.create(surface='y') cls.fab3 = Fabric.objects.create(surface='plain') cls.b1 = Book.objects.create(name='Book 1') cls.b2 = Book.objects.create(name='Book 2') cls.pro1 = Promo.objects.create(name='Promo 1', book=cls.b1) cls.pro1 = Promo.objects.create(name='Promo 2', book=cls.b2) cls.chap1 = Chapter.objects.create(title='Chapter 1', content='[ insert contents here ]', book=cls.b1) cls.chap2 = Chapter.objects.create(title='Chapter 2', content='[ insert contents here ]', book=cls.b1) cls.chap3 = Chapter.objects.create(title='Chapter 1', content='[ insert contents here ]', book=cls.b2) cls.chap4 = Chapter.objects.create(title='Chapter 2', content='[ insert contents here ]', book=cls.b2) cls.cx1 = ChapterXtra1.objects.create(chap=cls.chap1, xtra='ChapterXtra1 1') cls.cx2 = ChapterXtra1.objects.create(chap=cls.chap3, xtra='ChapterXtra1 2') Actor.objects.create(name='Palin', age=27) # Post data for edit inline cls.inline_post_data = { "name": "Test section", # inline data "article_set-TOTAL_FORMS": "6", "article_set-INITIAL_FORMS": "3", "article_set-MAX_NUM_FORMS": "0", "article_set-0-id": cls.a1.pk, # there is no title in database, give one here or formset will fail. "article_set-0-title": "Norske bostaver æøå skaper problemer", "article_set-0-content": "&lt;p&gt;Middle content&lt;/p&gt;", "article_set-0-date_0": "2008-03-18", "article_set-0-date_1": "11:54:58", "article_set-0-section": cls.s1.pk, "article_set-1-id": cls.a2.pk, "article_set-1-title": "Need a title.", "article_set-1-content": "&lt;p&gt;Oldest content&lt;/p&gt;", "article_set-1-date_0": "2000-03-18", "article_set-1-date_1": "11:54:58", "article_set-2-id": cls.a3.pk, "article_set-2-title": "Need a title.", "article_set-2-content": "&lt;p&gt;Newest content&lt;/p&gt;", "article_set-2-date_0": "2009-03-18", "article_set-2-date_1": "11:54:58", "article_set-3-id": "", "article_set-3-title": "", "article_set-3-content": "", "article_set-3-date_0": "", "article_set-3-date_1": "", "article_set-4-id": "", "article_set-4-title": "", "article_set-4-content": "", "article_set-4-date_0": "", "article_set-4-date_1": "", "article_set-5-id": "", "article_set-5-title": "", "article_set-5-content": "", "article_set-5-date_0": "", "article_set-5-date_1": "", } def setUp(self): self.client.force_login(self.superuser) def assertContentBefore(self, response, text1, text2, failing_msg=None): """ Testing utility asserting that text1 appears before text2 in response content. """ self.assertEqual(response.status_code, 200) self.assertLess( response.content.index(text1.encode()), response.content.index(text2.encode()), (failing_msg or '') + '\nResponse:\n' + response.content.decode(response.charset) ) class AdminViewBasicTest(AdminViewBasicTestCase): def test_trailing_slash_required(self): """ If you leave off the trailing slash, app should redirect and add it. """ add_url = reverse('admin:admin_views_article_add') response = self.client.get(add_url[:-1]) self.assertRedirects(response, add_url, status_code=301) def test_basic_add_GET(self): """ A smoke test to ensure GET on the add_view works. """ response = self.client.get(reverse('admin:admin_views_section_add')) self.assertIsInstance(response, TemplateResponse) self.assertEqual(response.status_code, 200) def test_add_with_GET_args(self): response = self.client.get(reverse('admin:admin_views_section_add'), {'name': 'My Section'}) self.assertContains( response, 'value="My Section"', msg_prefix="Couldn't find an input with the right value in the response" ) def test_basic_edit_GET(self): """ A smoke test to ensure GET on the change_view works. """ response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,))) self.assertIsInstance(response, TemplateResponse) self.assertEqual(response.status_code, 200) def test_basic_edit_GET_string_PK(self): """ GET on the change_view (when passing a string as the PK argument for a model with an integer PK field) redirects to the index page with a message saying the object doesn't exist. """ response = self.client.get(reverse('admin:admin_views_section_change', args=(quote("abc/<b>"),)), follow=True) self.assertRedirects(response, reverse('admin:index')) self.assertEqual( [m.message for m in response.context['messages']], ['section with ID “abc/<b>” doesn’t exist. Perhaps it was deleted?'] ) def test_basic_edit_GET_old_url_redirect(self): """ The change URL changed in Django 1.9, but the old one still redirects. """ response = self.client.get( reverse('admin:admin_views_section_change', args=(self.s1.pk,)).replace('change/', '') ) self.assertRedirects(response, reverse('admin:admin_views_section_change', args=(self.s1.pk,))) def test_basic_inheritance_GET_string_PK(self): """ GET on the change_view (for inherited models) redirects to the index page with a message saying the object doesn't exist. """ response = self.client.get(reverse('admin:admin_views_supervillain_change', args=('abc',)), follow=True) self.assertRedirects(response, reverse('admin:index')) self.assertEqual( [m.message for m in response.context['messages']], ['super villain with ID “abc” doesn’t exist. Perhaps it was deleted?'] ) def test_basic_add_POST(self): """ A smoke test to ensure POST on add_view works. """ post_data = { "name": "Another Section", # inline data "article_set-TOTAL_FORMS": "3", "article_set-INITIAL_FORMS": "0", "article_set-MAX_NUM_FORMS": "0", } response = self.client.post(reverse('admin:admin_views_section_add'), post_data) self.assertEqual(response.status_code, 302) # redirect somewhere def test_popup_add_POST(self): """ Ensure http response from a popup is properly escaped. """ post_data = { '_popup': '1', 'title': 'title with a new\nline', 'content': 'some content', 'date_0': '2010-09-10', 'date_1': '14:55:39', } response = self.client.post(reverse('admin:admin_views_article_add'), post_data) self.assertContains(response, 'title with a new\\nline') def test_basic_edit_POST(self): """ A smoke test to ensure POST on edit_view works. """ url = reverse('admin:admin_views_section_change', args=(self.s1.pk,)) response = self.client.post(url, self.inline_post_data) self.assertEqual(response.status_code, 302) # redirect somewhere def test_edit_save_as(self): """ Test "save as". """ post_data = self.inline_post_data.copy() post_data.update({ '_saveasnew': 'Save+as+new', "article_set-1-section": "1", "article_set-2-section": "1", "article_set-3-section": "1", "article_set-4-section": "1", "article_set-5-section": "1", }) response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), post_data) self.assertEqual(response.status_code, 302) # redirect somewhere def test_edit_save_as_delete_inline(self): """ Should be able to "Save as new" while also deleting an inline. """ post_data = self.inline_post_data.copy() post_data.update({ '_saveasnew': 'Save+as+new', "article_set-1-section": "1", "article_set-2-section": "1", "article_set-2-DELETE": "1", "article_set-3-section": "1", }) response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), post_data) self.assertEqual(response.status_code, 302) # started with 3 articles, one was deleted. self.assertEqual(Section.objects.latest('id').article_set.count(), 2) def test_change_list_column_field_classes(self): response = self.client.get(reverse('admin:admin_views_article_changelist')) # callables display the callable name. self.assertContains(response, 'column-callable_year') self.assertContains(response, 'field-callable_year') # lambdas display as "lambda" + index that they appear in list_display. self.assertContains(response, 'column-lambda8') self.assertContains(response, 'field-lambda8') def test_change_list_sorting_callable(self): """ Ensure we can sort on a list_display field that is a callable (column 2 is callable_year in ArticleAdmin) """ response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': 2}) self.assertContentBefore( response, 'Oldest content', 'Middle content', "Results of sorting on callable are out of order." ) self.assertContentBefore( response, 'Middle content', 'Newest content', "Results of sorting on callable are out of order." ) def test_change_list_sorting_property(self): """ Sort on a list_display field that is a property (column 10 is a property in Article model). """ response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': 10}) self.assertContentBefore( response, 'Oldest content', 'Middle content', 'Results of sorting on property are out of order.', ) self.assertContentBefore( response, 'Middle content', 'Newest content', 'Results of sorting on property are out of order.', ) def test_change_list_sorting_callable_query_expression(self): """Query expressions may be used for admin_order_field.""" tests = [ ('order_by_expression', 9), ('order_by_f_expression', 12), ('order_by_orderby_expression', 13), ] for admin_order_field, index in tests: with self.subTest(admin_order_field): response = self.client.get( reverse('admin:admin_views_article_changelist'), {'o': index}, ) self.assertContentBefore( response, 'Oldest content', 'Middle content', 'Results of sorting on callable are out of order.' ) self.assertContentBefore( response, 'Middle content', 'Newest content', 'Results of sorting on callable are out of order.' ) def test_change_list_sorting_callable_query_expression_reverse(self): tests = [ ('order_by_expression', -9), ('order_by_f_expression', -12), ('order_by_orderby_expression', -13), ] for admin_order_field, index in tests: with self.subTest(admin_order_field): response = self.client.get( reverse('admin:admin_views_article_changelist'), {'o': index}, ) self.assertContentBefore( response, 'Middle content', 'Oldest content', 'Results of sorting on callable are out of order.' ) self.assertContentBefore( response, 'Newest content', 'Middle content', 'Results of sorting on callable are out of order.' ) def test_change_list_sorting_model(self): """ Ensure we can sort on a list_display field that is a Model method (column 3 is 'model_year' in ArticleAdmin) """ response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '-3'}) self.assertContentBefore( response, 'Newest content', 'Middle content', "Results of sorting on Model method are out of order." ) self.assertContentBefore( response, 'Middle content', 'Oldest content', "Results of sorting on Model method are out of order." ) def test_change_list_sorting_model_admin(self): """ Ensure we can sort on a list_display field that is a ModelAdmin method (column 4 is 'modeladmin_year' in ArticleAdmin) """ response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '4'}) self.assertContentBefore( response, 'Oldest content', 'Middle content', "Results of sorting on ModelAdmin method are out of order." ) self.assertContentBefore( response, 'Middle content', 'Newest content', "Results of sorting on ModelAdmin method are out of order." ) def test_change_list_sorting_model_admin_reverse(self): """ Ensure we can sort on a list_display field that is a ModelAdmin method in reverse order (i.e. admin_order_field uses the '-' prefix) (column 6 is 'model_year_reverse' in ArticleAdmin) """ td = '<td class="field-model_property_year">%s</td>' td_2000, td_2008, td_2009 = td % 2000, td % 2008, td % 2009 response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '6'}) self.assertContentBefore( response, td_2009, td_2008, "Results of sorting on ModelAdmin method are out of order." ) self.assertContentBefore( response, td_2008, td_2000, "Results of sorting on ModelAdmin method are out of order." ) # Let's make sure the ordering is right and that we don't get a # FieldError when we change to descending order response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '-6'}) self.assertContentBefore( response, td_2000, td_2008, "Results of sorting on ModelAdmin method are out of order." ) self.assertContentBefore( response, td_2008, td_2009, "Results of sorting on ModelAdmin method are out of order." ) def test_change_list_sorting_multiple(self): p1 = Person.objects.create(name="Chris", gender=1, alive=True) p2 = Person.objects.create(name="Chris", gender=2, alive=True) p3 = Person.objects.create(name="Bob", gender=1, alive=True) link1 = reverse('admin:admin_views_person_change', args=(p1.pk,)) link2 = reverse('admin:admin_views_person_change', args=(p2.pk,)) link3 = reverse('admin:admin_views_person_change', args=(p3.pk,)) # Sort by name, gender response = self.client.get(reverse('admin:admin_views_person_changelist'), {'o': '1.2'}) self.assertContentBefore(response, link3, link1) self.assertContentBefore(response, link1, link2) # Sort by gender descending, name response = self.client.get(reverse('admin:admin_views_person_changelist'), {'o': '-2.1'}) self.assertContentBefore(response, link2, link3) self.assertContentBefore(response, link3, link1) def test_change_list_sorting_preserve_queryset_ordering(self): """ If no ordering is defined in `ModelAdmin.ordering` or in the query string, then the underlying order of the queryset should not be changed, even if it is defined in `Modeladmin.get_queryset()`. Refs #11868, #7309. """ p1 = Person.objects.create(name="Amy", gender=1, alive=True, age=80) p2 = Person.objects.create(name="Bob", gender=1, alive=True, age=70) p3 = Person.objects.create(name="Chris", gender=2, alive=False, age=60) link1 = reverse('admin:admin_views_person_change', args=(p1.pk,)) link2 = reverse('admin:admin_views_person_change', args=(p2.pk,)) link3 = reverse('admin:admin_views_person_change', args=(p3.pk,)) response = self.client.get(reverse('admin:admin_views_person_changelist'), {}) self.assertContentBefore(response, link3, link2) self.assertContentBefore(response, link2, link1) def test_change_list_sorting_model_meta(self): # Test ordering on Model Meta is respected l1 = Language.objects.create(iso='ur', name='Urdu') l2 = Language.objects.create(iso='ar', name='Arabic') link1 = reverse('admin:admin_views_language_change', args=(quote(l1.pk),)) link2 = reverse('admin:admin_views_language_change', args=(quote(l2.pk),)) response = self.client.get(reverse('admin:admin_views_language_changelist'), {}) self.assertContentBefore(response, link2, link1) # Test we can override with query string response = self.client.get(reverse('admin:admin_views_language_changelist'), {'o': '-1'}) self.assertContentBefore(response, link1, link2) def test_change_list_sorting_override_model_admin(self): # Test ordering on Model Admin is respected, and overrides Model Meta dt = datetime.datetime.now() p1 = Podcast.objects.create(name="A", release_date=dt) p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10)) link1 = reverse('admin:admin_views_podcast_change', args=(p1.pk,)) link2 = reverse('admin:admin_views_podcast_change', args=(p2.pk,)) response = self.client.get(reverse('admin:admin_views_podcast_changelist'), {}) self.assertContentBefore(response, link1, link2) def test_multiple_sort_same_field(self): # The changelist displays the correct columns if two columns correspond # to the same ordering field. dt = datetime.datetime.now() p1 = Podcast.objects.create(name="A", release_date=dt) p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10)) link1 = reverse('admin:admin_views_podcast_change', args=(quote(p1.pk),)) link2 = reverse('admin:admin_views_podcast_change', args=(quote(p2.pk),)) response = self.client.get(reverse('admin:admin_views_podcast_changelist'), {}) self.assertContentBefore(response, link1, link2) p1 = ComplexSortedPerson.objects.create(name="Bob", age=10) p2 = ComplexSortedPerson.objects.create(name="Amy", age=20) link1 = reverse('admin:admin_views_complexsortedperson_change', args=(p1.pk,)) link2 = reverse('admin:admin_views_complexsortedperson_change', args=(p2.pk,)) response = self.client.get(reverse('admin:admin_views_complexsortedperson_changelist'), {}) # Should have 5 columns (including action checkbox col) self.assertContains(response, '<th scope="col"', count=5) self.assertContains(response, 'Name') self.assertContains(response, 'Colored name') # Check order self.assertContentBefore(response, 'Name', 'Colored name') # Check sorting - should be by name self.assertContentBefore(response, link2, link1) def test_sort_indicators_admin_order(self): """ The admin shows default sort indicators for all kinds of 'ordering' fields: field names, method on the model admin and model itself, and other callables. See #17252. """ models = [(AdminOrderedField, 'adminorderedfield'), (AdminOrderedModelMethod, 'adminorderedmodelmethod'), (AdminOrderedAdminMethod, 'adminorderedadminmethod'), (AdminOrderedCallable, 'adminorderedcallable')] for model, url in models: model.objects.create(stuff='The Last Item', order=3) model.objects.create(stuff='The First Item', order=1) model.objects.create(stuff='The Middle Item', order=2) response = self.client.get(reverse('admin:admin_views_%s_changelist' % url), {}) # Should have 3 columns including action checkbox col. self.assertContains(response, '<th scope="col"', count=3, msg_prefix=url) # Check if the correct column was selected. 2 is the index of the # 'order' column in the model admin's 'list_display' with 0 being # the implicit 'action_checkbox' and 1 being the column 'stuff'. self.assertEqual(response.context['cl'].get_ordering_field_columns(), {2: 'asc'}) # Check order of records. self.assertContentBefore(response, 'The First Item', 'The Middle Item') self.assertContentBefore(response, 'The Middle Item', 'The Last Item') def test_has_related_field_in_list_display_fk(self): """Joins shouldn't be performed for <FK>_id fields in list display.""" state = State.objects.create(name='Karnataka') City.objects.create(state=state, name='Bangalore') response = self.client.get(reverse('admin:admin_views_city_changelist'), {}) response.context['cl'].list_display = ['id', 'name', 'state'] self.assertIs(response.context['cl'].has_related_field_in_list_display(), True) response.context['cl'].list_display = ['id', 'name', 'state_id'] self.assertIs(response.context['cl'].has_related_field_in_list_display(), False) def test_has_related_field_in_list_display_o2o(self): """Joins shouldn't be performed for <O2O>_id fields in list display.""" media = Media.objects.create(name='Foo') Vodcast.objects.create(media=media) response = self.client.get(reverse('admin:admin_views_vodcast_changelist'), {}) response.context['cl'].list_display = ['media'] self.assertIs(response.context['cl'].has_related_field_in_list_display(), True) response.context['cl'].list_display = ['media_id'] self.assertIs(response.context['cl'].has_related_field_in_list_display(), False) def test_limited_filter(self): """Ensure admin changelist filters do not contain objects excluded via limit_choices_to. This also tests relation-spanning filters (e.g. 'color__value'). """ response = self.client.get(reverse('admin:admin_views_thing_changelist')) self.assertContains( response, '<div id="changelist-filter">', msg_prefix="Expected filter not found in changelist view" ) self.assertNotContains( response, '<a href="?color__id__exact=3">Blue</a>', msg_prefix="Changelist filter not correctly limited by limit_choices_to" ) def test_relation_spanning_filters(self): changelist_url = reverse('admin:admin_views_chapterxtra1_changelist') response = self.client.get(changelist_url) self.assertContains(response, '<div id="changelist-filter">') filters = { 'chap__id__exact': { 'values': [c.id for c in Chapter.objects.all()], 'test': lambda obj, value: obj.chap.id == value, }, 'chap__title': { 'values': [c.title for c in Chapter.objects.all()], 'test': lambda obj, value: obj.chap.title == value, }, 'chap__book__id__exact': { 'values': [b.id for b in Book.objects.all()], 'test': lambda obj, value: obj.chap.book.id == value, }, 'chap__book__name': { 'values': [b.name for b in Book.objects.all()], 'test': lambda obj, value: obj.chap.book.name == value, }, 'chap__book__promo__id__exact': { 'values': [p.id for p in Promo.objects.all()], 'test': lambda obj, value: obj.chap.book.promo_set.filter(id=value).exists(), }, 'chap__book__promo__name': { 'values': [p.name for p in Promo.objects.all()], 'test': lambda obj, value: obj.chap.book.promo_set.filter(name=value).exists(), }, # A forward relation (book) after a reverse relation (promo). 'guest_author__promo__book__id__exact': { 'values': [p.id for p in Book.objects.all()], 'test': lambda obj, value: obj.guest_author.promo_set.filter(book=value).exists(), }, } for filter_path, params in filters.items(): for value in params['values']: query_string = urlencode({filter_path: value}) # ensure filter link exists self.assertContains(response, '<a href="?%s"' % query_string) # ensure link works filtered_response = self.client.get('%s?%s' % (changelist_url, query_string)) self.assertEqual(filtered_response.status_code, 200) # ensure changelist contains only valid objects for obj in filtered_response.context['cl'].queryset.all(): self.assertTrue(params['test'](obj, value)) def test_incorrect_lookup_parameters(self): """Ensure incorrect lookup parameters are handled gracefully.""" changelist_url = reverse('admin:admin_views_thing_changelist') response = self.client.get(changelist_url, {'notarealfield': '5'}) self.assertRedirects(response, '%s?e=1' % changelist_url) # Spanning relationships through a nonexistent related object (Refs #16716) response = self.client.get(changelist_url, {'notarealfield__whatever': '5'}) self.assertRedirects(response, '%s?e=1' % changelist_url) response = self.client.get(changelist_url, {'color__id__exact': 'StringNotInteger!'}) self.assertRedirects(response, '%s?e=1' % changelist_url) # Regression test for #18530 response = self.client.get(changelist_url, {'pub_date__gte': 'foo'}) self.assertRedirects(response, '%s?e=1' % changelist_url) def test_isnull_lookups(self): """Ensure is_null is handled correctly.""" Article.objects.create(title="I Could Go Anywhere", content="Versatile", date=datetime.datetime.now()) changelist_url = reverse('admin:admin_views_article_changelist') response = self.client.get(changelist_url) self.assertContains(response, '4 articles') response = self.client.get(changelist_url, {'section__isnull': 'false'}) self.assertContains(response, '3 articles') response = self.client.get(changelist_url, {'section__isnull': '0'}) self.assertContains(response, '3 articles') response = self.client.get(changelist_url, {'section__isnull': 'true'}) self.assertContains(response, '1 article') response = self.client.get(changelist_url, {'section__isnull': '1'}) self.assertContains(response, '1 article') def test_logout_and_password_change_URLs(self): response = self.client.get(reverse('admin:admin_views_article_changelist')) self.assertContains(response, '<a href="%s">' % reverse('admin:logout')) self.assertContains(response, '<a href="%s">' % reverse('admin:password_change')) def test_named_group_field_choices_change_list(self): """ Ensures the admin changelist shows correct values in the relevant column for rows corresponding to instances of a model in which a named group has been used in the choices option of a field. """ link1 = reverse('admin:admin_views_fabric_change', args=(self.fab1.pk,)) link2 = reverse('admin:admin_views_fabric_change', args=(self.fab2.pk,)) response = self.client.get(reverse('admin:admin_views_fabric_changelist')) fail_msg = ( "Changelist table isn't showing the right human-readable values " "set by a model field 'choices' option named group." ) self.assertContains(response, '<a href="%s">Horizontal</a>' % link1, msg_prefix=fail_msg, html=True) self.assertContains(response, '<a href="%s">Vertical</a>' % link2, msg_prefix=fail_msg, html=True) def test_named_group_field_choices_filter(self): """ Ensures the filter UI shows correctly when at least one named group has been used in the choices option of a model field. """ response = self.client.get(reverse('admin:admin_views_fabric_changelist')) fail_msg = ( "Changelist filter isn't showing options contained inside a model " "field 'choices' option named group." ) self.assertContains(response, '<div id="changelist-filter">') self.assertContains( response, '<a href="?surface__exact=x" title="Horizontal">Horizontal</a>', msg_prefix=fail_msg, html=True ) self.assertContains( response, '<a href="?surface__exact=y" title="Vertical">Vertical</a>', msg_prefix=fail_msg, html=True ) def test_change_list_null_boolean_display(self): Post.objects.create(public=None) response = self.client.get(reverse('admin:admin_views_post_changelist')) self.assertContains(response, 'icon-unknown.svg') def test_display_decorator_with_boolean_and_empty_value(self): msg = ( 'The boolean and empty_value arguments to the @display decorator ' 'are mutually exclusive.' ) with self.assertRaisesMessage(ValueError, msg): class BookAdmin(admin.ModelAdmin): @admin.display(boolean=True, empty_value='(Missing)') def is_published(self, obj): return obj.publish_date is not None def test_i18n_language_non_english_default(self): """ Check if the JavaScript i18n view returns an empty language catalog if the default language is non-English but the selected language is English. See #13388 and #3594 for more details. """ with self.settings(LANGUAGE_CODE='fr'), translation.override('en-us'): response = self.client.get(reverse('admin:jsi18n')) self.assertNotContains(response, 'Choisir une heure') def test_i18n_language_non_english_fallback(self): """ Makes sure that the fallback language is still working properly in cases where the selected language cannot be found. """ with self.settings(LANGUAGE_CODE='fr'), translation.override('none'): response = self.client.get(reverse('admin:jsi18n')) self.assertContains(response, 'Choisir une heure') def test_jsi18n_with_context(self): response = self.client.get(reverse('admin-extra-context:jsi18n')) self.assertEqual(response.status_code, 200) def test_L10N_deactivated(self): """ Check if L10N is deactivated, the JavaScript i18n view doesn't return localized date/time formats. Refs #14824. """ with self.settings(LANGUAGE_CODE='ru', USE_L10N=False), translation.override('none'): response = self.client.get(reverse('admin:jsi18n')) self.assertNotContains(response, '%d.%m.%Y %H:%M:%S') self.assertContains(response, '%Y-%m-%d %H:%M:%S') def test_disallowed_filtering(self): with self.assertLogs('django.security.DisallowedModelAdminLookup', 'ERROR'): response = self.client.get( "%s?owner__email__startswith=fuzzy" % reverse('admin:admin_views_album_changelist') ) self.assertEqual(response.status_code, 400) # Filters are allowed if explicitly included in list_filter response = self.client.get("%s?color__value__startswith=red" % reverse('admin:admin_views_thing_changelist')) self.assertEqual(response.status_code, 200) response = self.client.get("%s?color__value=red" % reverse('admin:admin_views_thing_changelist')) self.assertEqual(response.status_code, 200) # Filters should be allowed if they involve a local field without the # need to allow them in list_filter or date_hierarchy. response = self.client.get("%s?age__gt=30" % reverse('admin:admin_views_person_changelist')) self.assertEqual(response.status_code, 200) e1 = Employee.objects.create(name='Anonymous', gender=1, age=22, alive=True, code='123') e2 = Employee.objects.create(name='Visitor', gender=2, age=19, alive=True, code='124') WorkHour.objects.create(datum=datetime.datetime.now(), employee=e1) WorkHour.objects.create(datum=datetime.datetime.now(), employee=e2) response = self.client.get(reverse('admin:admin_views_workhour_changelist')) self.assertContains(response, 'employee__person_ptr__exact') response = self.client.get("%s?employee__person_ptr__exact=%d" % ( reverse('admin:admin_views_workhour_changelist'), e1.pk) ) self.assertEqual(response.status_code, 200) def test_disallowed_to_field(self): url = reverse('admin:admin_views_section_changelist') with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'): response = self.client.get(url, {TO_FIELD_VAR: 'missing_field'}) self.assertEqual(response.status_code, 400) # Specifying a field that is not referred by any other model registered # to this admin site should raise an exception. with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'): response = self.client.get(reverse('admin:admin_views_section_changelist'), {TO_FIELD_VAR: 'name'}) self.assertEqual(response.status_code, 400) # #23839 - Primary key should always be allowed, even if the referenced model isn't registered. response = self.client.get(reverse('admin:admin_views_notreferenced_changelist'), {TO_FIELD_VAR: 'id'}) self.assertEqual(response.status_code, 200) # #23915 - Specifying a field referenced by another model though a m2m should be allowed. response = self.client.get(reverse('admin:admin_views_recipe_changelist'), {TO_FIELD_VAR: 'rname'}) self.assertEqual(response.status_code, 200) # #23604, #23915 - Specifying a field referenced through a reverse m2m relationship should be allowed. response = self.client.get(reverse('admin:admin_views_ingredient_changelist'), {TO_FIELD_VAR: 'iname'}) self.assertEqual(response.status_code, 200) # #23329 - Specifying a field that is not referred by any other model directly registered # to this admin site but registered through inheritance should be allowed. response = self.client.get(reverse('admin:admin_views_referencedbyparent_changelist'), {TO_FIELD_VAR: 'name'}) self.assertEqual(response.status_code, 200) # #23431 - Specifying a field that is only referred to by a inline of a registered # model should be allowed. response = self.client.get(reverse('admin:admin_views_referencedbyinline_changelist'), {TO_FIELD_VAR: 'name'}) self.assertEqual(response.status_code, 200) # #25622 - Specifying a field of a model only referred by a generic # relation should raise DisallowedModelAdminToField. url = reverse('admin:admin_views_referencedbygenrel_changelist') with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'): response = self.client.get(url, {TO_FIELD_VAR: 'object_id'}) self.assertEqual(response.status_code, 400) # We also want to prevent the add, change, and delete views from # leaking a disallowed field value. with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'): response = self.client.post(reverse('admin:admin_views_section_add'), {TO_FIELD_VAR: 'name'}) self.assertEqual(response.status_code, 400) section = Section.objects.create() url = reverse('admin:admin_views_section_change', args=(section.pk,)) with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'): response = self.client.post(url, {TO_FIELD_VAR: 'name'}) self.assertEqual(response.status_code, 400) url = reverse('admin:admin_views_section_delete', args=(section.pk,)) with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'): response = self.client.post(url, {TO_FIELD_VAR: 'name'}) self.assertEqual(response.status_code, 400) def test_allowed_filtering_15103(self): """ Regressions test for ticket 15103 - filtering on fields defined in a ForeignKey 'limit_choices_to' should be allowed, otherwise raw_id_fields can break. """ # Filters should be allowed if they are defined on a ForeignKey pointing to this model url = "%s?leader__name=Palin&leader__age=27" % reverse('admin:admin_views_inquisition_changelist') response = self.client.get(url) self.assertEqual(response.status_code, 200) def test_popup_dismiss_related(self): """ Regression test for ticket 20664 - ensure the pk is properly quoted. """ actor = Actor.objects.create(name="Palin", age=27) response = self.client.get("%s?%s" % (reverse('admin:admin_views_actor_changelist'), IS_POPUP_VAR)) self.assertContains(response, 'data-popup-opener="%s"' % actor.pk) def test_hide_change_password(self): """ Tests if the "change password" link in the admin is hidden if the User does not have a usable password set. (against 9bea85795705d015cdadc82c68b99196a8554f5c) """ user = User.objects.get(username='super') user.set_unusable_password() user.save() self.client.force_login(user) response = self.client.get(reverse('admin:index')) self.assertNotContains( response, reverse('admin:password_change'), msg_prefix='The "change password" link should not be displayed if a user does not have a usable password.' ) def test_change_view_with_show_delete_extra_context(self): """ The 'show_delete' context variable in the admin's change view controls the display of the delete button. """ instance = UndeletableObject.objects.create(name='foo') response = self.client.get(reverse('admin:admin_views_undeletableobject_change', args=(instance.pk,))) self.assertNotContains(response, 'deletelink') def test_change_view_logs_m2m_field_changes(self): """Changes to ManyToManyFields are included in the object's history.""" pizza = ReadablePizza.objects.create(name='Cheese') cheese = Topping.objects.create(name='cheese') post_data = {'name': pizza.name, 'toppings': [cheese.pk]} response = self.client.post(reverse('admin:admin_views_readablepizza_change', args=(pizza.pk,)), post_data) self.assertRedirects(response, reverse('admin:admin_views_readablepizza_changelist')) pizza_ctype = ContentType.objects.get_for_model(ReadablePizza, for_concrete_model=False) log = LogEntry.objects.filter(content_type=pizza_ctype, object_id=pizza.pk).first() self.assertEqual(log.get_change_message(), 'Changed Toppings.') def test_allows_attributeerror_to_bubble_up(self): """ AttributeErrors are allowed to bubble when raised inside a change list view. Requires a model to be created so there's something to display. Refs: #16655, #18593, and #18747 """ Simple.objects.create() with self.assertRaises(AttributeError): self.client.get(reverse('admin:admin_views_simple_changelist')) def test_changelist_with_no_change_url(self): """ ModelAdmin.changelist_view shouldn't result in a NoReverseMatch if url for change_view is removed from get_urls (#20934). """ o = UnchangeableObject.objects.create() response = self.client.get(reverse('admin:admin_views_unchangeableobject_changelist')) # Check the format of the shown object -- shouldn't contain a change link self.assertContains(response, '<th class="field-__str__">%s</th>' % o, html=True) def test_invalid_appindex_url(self): """ #21056 -- URL reversing shouldn't work for nonexistent apps. """ good_url = '/test_admin/admin/admin_views/' confirm_good_url = reverse('admin:app_list', kwargs={'app_label': 'admin_views'}) self.assertEqual(good_url, confirm_good_url) with self.assertRaises(NoReverseMatch): reverse('admin:app_list', kwargs={'app_label': 'this_should_fail'}) with self.assertRaises(NoReverseMatch): reverse('admin:app_list', args=('admin_views2',)) def test_resolve_admin_views(self): index_match = resolve('/test_admin/admin4/') list_match = resolve('/test_admin/admin4/auth/user/') self.assertIs(index_match.func.admin_site, customadmin.simple_site) self.assertIsInstance(list_match.func.model_admin, customadmin.CustomPwdTemplateUserAdmin) def test_adminsite_display_site_url(self): """ #13749 - Admin should display link to front-end site 'View site' """ url = reverse('admin:index') response = self.client.get(url) self.assertEqual(response.context['site_url'], '/my-site-url/') self.assertContains(response, '<a href="/my-site-url/">View site</a>') def test_date_hierarchy_empty_queryset(self): self.assertIs(Question.objects.exists(), False) response = self.client.get(reverse('admin:admin_views_answer2_changelist')) self.assertEqual(response.status_code, 200) @override_settings(TIME_ZONE='America/Sao_Paulo', USE_TZ=True) def test_date_hierarchy_timezone_dst(self): # This datetime doesn't exist in this timezone due to DST. for date in make_aware_datetimes(datetime.datetime(2016, 10, 16, 15), 'America/Sao_Paulo'): with self.subTest(repr(date.tzinfo)): q = Question.objects.create(question='Why?', expires=date) Answer2.objects.create(question=q, answer='Because.') response = self.client.get(reverse('admin:admin_views_answer2_changelist')) self.assertContains(response, 'question__expires__day=16') self.assertContains(response, 'question__expires__month=10') self.assertContains(response, 'question__expires__year=2016') @override_settings(TIME_ZONE='America/Los_Angeles', USE_TZ=True) def test_date_hierarchy_local_date_differ_from_utc(self): # This datetime is 2017-01-01 in UTC. for date in make_aware_datetimes(datetime.datetime(2016, 12, 31, 16), 'America/Los_Angeles'): with self.subTest(repr(date.tzinfo)): q = Question.objects.create(question='Why?', expires=date) Answer2.objects.create(question=q, answer='Because.') response = self.client.get(reverse('admin:admin_views_answer2_changelist')) self.assertContains(response, 'question__expires__day=31') self.assertContains(response, 'question__expires__month=12') self.assertContains(response, 'question__expires__year=2016') def test_sortable_by_columns_subset(self): expected_sortable_fields = ('date', 'callable_year') expected_not_sortable_fields = ( 'content', 'model_year', 'modeladmin_year', 'model_year_reversed', 'section', ) response = self.client.get(reverse('admin6:admin_views_article_changelist')) for field_name in expected_sortable_fields: self.assertContains(response, '<th scope="col" class="sortable column-%s">' % field_name) for field_name in expected_not_sortable_fields: self.assertContains(response, '<th scope="col" class="column-%s">' % field_name) def test_get_sortable_by_columns_subset(self): response = self.client.get(reverse('admin6:admin_views_actor_changelist')) self.assertContains(response, '<th scope="col" class="sortable column-age">') self.assertContains(response, '<th scope="col" class="column-name">') def test_sortable_by_no_column(self): expected_not_sortable_fields = ('title', 'book') response = self.client.get(reverse('admin6:admin_views_chapter_changelist')) for field_name in expected_not_sortable_fields: self.assertContains(response, '<th scope="col" class="column-%s">' % field_name) self.assertNotContains(response, '<th scope="col" class="sortable column') def test_get_sortable_by_no_column(self): response = self.client.get(reverse('admin6:admin_views_color_changelist')) self.assertContains(response, '<th scope="col" class="column-value">') self.assertNotContains(response, '<th scope="col" class="sortable column') def test_app_index_context(self): response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertContains( response, '<title>Admin_Views administration | Django site admin</title>', ) self.assertEqual(response.context['title'], 'Admin_Views administration') self.assertEqual(response.context['app_label'], 'admin_views') def test_change_view_subtitle_per_object(self): response = self.client.get( reverse('admin:admin_views_article_change', args=(self.a1.pk,)), ) self.assertContains( response, '<title>Article 1 | Change article | Django site admin</title>', ) self.assertContains(response, '<h1>Change article</h1>') self.assertContains(response, '<h2>Article 1</h2>') response = self.client.get( reverse('admin:admin_views_article_change', args=(self.a2.pk,)), ) self.assertContains( response, '<title>Article 2 | Change article | Django site admin</title>', ) self.assertContains(response, '<h1>Change article</h1>') self.assertContains(response, '<h2>Article 2</h2>') def test_view_subtitle_per_object(self): viewuser = User.objects.create_user( username='viewuser', password='secret', is_staff=True, ) viewuser.user_permissions.add( get_perm(Article, get_permission_codename('view', Article._meta)), ) self.client.force_login(viewuser) response = self.client.get( reverse('admin:admin_views_article_change', args=(self.a1.pk,)), ) self.assertContains( response, '<title>Article 1 | View article | Django site admin</title>', ) self.assertContains(response, '<h1>View article</h1>') self.assertContains(response, '<h2>Article 1</h2>') response = self.client.get( reverse('admin:admin_views_article_change', args=(self.a2.pk,)), ) self.assertContains( response, '<title>Article 2 | View article | Django site admin</title>', ) self.assertContains(response, '<h1>View article</h1>') self.assertContains(response, '<h2>Article 2</h2>') def test_formset_kwargs_can_be_overridden(self): response = self.client.get(reverse('admin:admin_views_city_add')) self.assertContains(response, 'overridden_name') def test_render_views_no_subtitle(self): tests = [ reverse('admin:index'), reverse('admin:app_list', args=('admin_views',)), reverse('admin:admin_views_article_delete', args=(self.a1.pk,)), reverse('admin:admin_views_article_history', args=(self.a1.pk,)), ] for url in tests: with self.subTest(url=url): with self.assertNoLogs('django.template', 'DEBUG'): self.client.get(url) @override_settings(TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', # Put this app's and the shared tests templates dirs in DIRS to take precedence # over the admin's templates dir. 'DIRS': [ os.path.join(os.path.dirname(__file__), 'templates'), os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates'), ], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }]) class AdminCustomTemplateTests(AdminViewBasicTestCase): def test_custom_model_admin_templates(self): # Test custom change list template with custom extra context response = self.client.get(reverse('admin:admin_views_customarticle_changelist')) self.assertContains(response, "var hello = 'Hello!';") self.assertTemplateUsed(response, 'custom_admin/change_list.html') # Test custom add form template response = self.client.get(reverse('admin:admin_views_customarticle_add')) self.assertTemplateUsed(response, 'custom_admin/add_form.html') # Add an article so we can test delete, change, and history views post = self.client.post(reverse('admin:admin_views_customarticle_add'), { 'content': '<p>great article</p>', 'date_0': '2008-03-18', 'date_1': '10:54:39' }) self.assertRedirects(post, reverse('admin:admin_views_customarticle_changelist')) self.assertEqual(CustomArticle.objects.all().count(), 1) article_pk = CustomArticle.objects.all()[0].pk # Test custom delete, change, and object history templates # Test custom change form template response = self.client.get(reverse('admin:admin_views_customarticle_change', args=(article_pk,))) self.assertTemplateUsed(response, 'custom_admin/change_form.html') response = self.client.get(reverse('admin:admin_views_customarticle_delete', args=(article_pk,))) self.assertTemplateUsed(response, 'custom_admin/delete_confirmation.html') response = self.client.post(reverse('admin:admin_views_customarticle_changelist'), data={ 'index': 0, 'action': ['delete_selected'], '_selected_action': ['1'], }) self.assertTemplateUsed(response, 'custom_admin/delete_selected_confirmation.html') response = self.client.get(reverse('admin:admin_views_customarticle_history', args=(article_pk,))) self.assertTemplateUsed(response, 'custom_admin/object_history.html') # A custom popup response template may be specified by # ModelAdmin.popup_response_template. response = self.client.post(reverse('admin:admin_views_customarticle_add') + '?%s=1' % IS_POPUP_VAR, { 'content': '<p>great article</p>', 'date_0': '2008-03-18', 'date_1': '10:54:39', IS_POPUP_VAR: '1' }) self.assertEqual(response.template_name, 'custom_admin/popup_response.html') def test_extended_bodyclass_template_change_form(self): """ The admin/change_form.html template uses block.super in the bodyclass block. """ response = self.client.get(reverse('admin:admin_views_section_add')) self.assertContains(response, 'bodyclass_consistency_check ') def test_change_password_template(self): user = User.objects.get(username='super') response = self.client.get(reverse('admin:auth_user_password_change', args=(user.id,))) # The auth/user/change_password.html template uses super in the # bodyclass block. self.assertContains(response, 'bodyclass_consistency_check ') # When a site has multiple passwords in the browser's password manager, # a browser pop up asks which user the new password is for. To prevent # this, the username is added to the change password form. self.assertContains(response, '<input type="text" name="username" value="super" class="hidden">') def test_extended_bodyclass_template_index(self): """ The admin/index.html template uses block.super in the bodyclass block. """ response = self.client.get(reverse('admin:index')) self.assertContains(response, 'bodyclass_consistency_check ') def test_extended_bodyclass_change_list(self): """ The admin/change_list.html' template uses block.super in the bodyclass block. """ response = self.client.get(reverse('admin:admin_views_article_changelist')) self.assertContains(response, 'bodyclass_consistency_check ') def test_extended_bodyclass_template_login(self): """ The admin/login.html template uses block.super in the bodyclass block. """ self.client.logout() response = self.client.get(reverse('admin:login')) self.assertContains(response, 'bodyclass_consistency_check ') def test_extended_bodyclass_template_delete_confirmation(self): """ The admin/delete_confirmation.html template uses block.super in the bodyclass block. """ group = Group.objects.create(name="foogroup") response = self.client.get(reverse('admin:auth_group_delete', args=(group.id,))) self.assertContains(response, 'bodyclass_consistency_check ') def test_extended_bodyclass_template_delete_selected_confirmation(self): """ The admin/delete_selected_confirmation.html template uses block.super in bodyclass block. """ group = Group.objects.create(name="foogroup") post_data = { 'action': 'delete_selected', 'selected_across': '0', 'index': '0', '_selected_action': group.id } response = self.client.post(reverse('admin:auth_group_changelist'), post_data) self.assertEqual(response.context['site_header'], 'Django administration') self.assertContains(response, 'bodyclass_consistency_check ') def test_filter_with_custom_template(self): """ A custom template can be used to render an admin filter. """ response = self.client.get(reverse('admin:admin_views_color2_changelist')) self.assertTemplateUsed(response, 'custom_filter_template.html') @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewFormUrlTest(TestCase): current_app = "admin3" @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') def setUp(self): self.client.force_login(self.superuser) def test_change_form_URL_has_correct_value(self): """ change_view has form_url in response.context """ response = self.client.get( reverse('admin:admin_views_section_change', args=(self.s1.pk,), current_app=self.current_app) ) self.assertIn('form_url', response.context, msg='form_url not present in response.context') self.assertEqual(response.context['form_url'], 'pony') def test_initial_data_can_be_overridden(self): """ The behavior for setting initial form data can be overridden in the ModelAdmin class. Usually, the initial value is set via the GET params. """ response = self.client.get( reverse('admin:admin_views_restaurant_add', current_app=self.current_app), {'name': 'test_value'} ) # this would be the usual behaviour self.assertNotContains(response, 'value="test_value"') # this is the overridden behaviour self.assertContains(response, 'value="overridden_value"') @override_settings(ROOT_URLCONF='admin_views.urls') class AdminJavaScriptTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_js_minified_only_if_debug_is_false(self): """ The minified versions of the JS files are only used when DEBUG is False. """ with override_settings(DEBUG=False): response = self.client.get(reverse('admin:admin_views_section_add')) self.assertNotContains(response, 'vendor/jquery/jquery.js') self.assertContains(response, 'vendor/jquery/jquery.min.js') self.assertContains(response, 'prepopulate.js') self.assertContains(response, 'actions.js') self.assertContains(response, 'collapse.js') self.assertContains(response, 'inlines.js') with override_settings(DEBUG=True): response = self.client.get(reverse('admin:admin_views_section_add')) self.assertContains(response, 'vendor/jquery/jquery.js') self.assertNotContains(response, 'vendor/jquery/jquery.min.js') self.assertContains(response, 'prepopulate.js') self.assertContains(response, 'actions.js') self.assertContains(response, 'collapse.js') self.assertContains(response, 'inlines.js') @override_settings(ROOT_URLCONF='admin_views.urls') class SaveAsTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True) def setUp(self): self.client.force_login(self.superuser) def test_save_as_duplication(self): """'save as' creates a new person""" post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 1, 'age': 42} response = self.client.post(reverse('admin:admin_views_person_change', args=(self.per1.pk,)), post_data) self.assertEqual(len(Person.objects.filter(name='John M')), 1) self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1) new_person = Person.objects.latest('id') self.assertRedirects(response, reverse('admin:admin_views_person_change', args=(new_person.pk,))) def test_save_as_continue_false(self): """ Saving a new object using "Save as new" redirects to the changelist instead of the change view when ModelAdmin.save_as_continue=False. """ post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 1, 'age': 42} url = reverse('admin:admin_views_person_change', args=(self.per1.pk,), current_app=site2.name) response = self.client.post(url, post_data) self.assertEqual(len(Person.objects.filter(name='John M')), 1) self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1) self.assertRedirects(response, reverse('admin:admin_views_person_changelist', current_app=site2.name)) def test_save_as_new_with_validation_errors(self): """ When you click "Save as new" and have a validation error, you only see the "Save as new" button and not the other save buttons, and that only the "Save as" button is visible. """ response = self.client.post(reverse('admin:admin_views_person_change', args=(self.per1.pk,)), { '_saveasnew': '', 'gender': 'invalid', '_addanother': 'fail', }) self.assertContains(response, 'Please correct the errors below.') self.assertFalse(response.context['show_save_and_add_another']) self.assertFalse(response.context['show_save_and_continue']) self.assertTrue(response.context['show_save_as_new']) def test_save_as_new_with_validation_errors_with_inlines(self): parent = Parent.objects.create(name='Father') child = Child.objects.create(parent=parent, name='Child') response = self.client.post(reverse('admin:admin_views_parent_change', args=(parent.pk,)), { '_saveasnew': 'Save as new', 'child_set-0-parent': parent.pk, 'child_set-0-id': child.pk, 'child_set-0-name': 'Child', 'child_set-INITIAL_FORMS': 1, 'child_set-MAX_NUM_FORMS': 1000, 'child_set-MIN_NUM_FORMS': 0, 'child_set-TOTAL_FORMS': 4, 'name': '_invalid', }) self.assertContains(response, 'Please correct the error below.') self.assertFalse(response.context['show_save_and_add_another']) self.assertFalse(response.context['show_save_and_continue']) self.assertTrue(response.context['show_save_as_new']) def test_save_as_new_with_inlines_with_validation_errors(self): parent = Parent.objects.create(name='Father') child = Child.objects.create(parent=parent, name='Child') response = self.client.post(reverse('admin:admin_views_parent_change', args=(parent.pk,)), { '_saveasnew': 'Save as new', 'child_set-0-parent': parent.pk, 'child_set-0-id': child.pk, 'child_set-0-name': '_invalid', 'child_set-INITIAL_FORMS': 1, 'child_set-MAX_NUM_FORMS': 1000, 'child_set-MIN_NUM_FORMS': 0, 'child_set-TOTAL_FORMS': 4, 'name': 'Father', }) self.assertContains(response, 'Please correct the error below.') self.assertFalse(response.context['show_save_and_add_another']) self.assertFalse(response.context['show_save_and_continue']) self.assertTrue(response.context['show_save_as_new']) @override_settings(ROOT_URLCONF='admin_views.urls') class CustomModelAdminTest(AdminViewBasicTestCase): def test_custom_admin_site_login_form(self): self.client.logout() response = self.client.get(reverse('admin2:index'), follow=True) self.assertIsInstance(response, TemplateResponse) self.assertEqual(response.status_code, 200) login = self.client.post(reverse('admin2:login'), { REDIRECT_FIELD_NAME: reverse('admin2:index'), 'username': 'customform', 'password': 'secret', }, follow=True) self.assertIsInstance(login, TemplateResponse) self.assertContains(login, 'custom form error') self.assertContains(login, 'path/to/media.css') def test_custom_admin_site_login_template(self): self.client.logout() response = self.client.get(reverse('admin2:index'), follow=True) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/login.html') self.assertContains(response, 'Hello from a custom login template') def test_custom_admin_site_logout_template(self): response = self.client.get(reverse('admin2:logout')) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/logout.html') self.assertContains(response, 'Hello from a custom logout template') def test_custom_admin_site_index_view_and_template(self): response = self.client.get(reverse('admin2:index')) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/index.html') self.assertContains(response, 'Hello from a custom index template *bar*') def test_custom_admin_site_app_index_view_and_template(self): response = self.client.get(reverse('admin2:app_list', args=('admin_views',))) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/app_index.html') self.assertContains(response, 'Hello from a custom app_index template') def test_custom_admin_site_password_change_template(self): response = self.client.get(reverse('admin2:password_change')) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/password_change_form.html') self.assertContains(response, 'Hello from a custom password change form template') def test_custom_admin_site_password_change_with_extra_context(self): response = self.client.get(reverse('admin2:password_change')) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/password_change_form.html') self.assertContains(response, 'eggs') def test_custom_admin_site_password_change_done_template(self): response = self.client.get(reverse('admin2:password_change_done')) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/password_change_done.html') self.assertContains(response, 'Hello from a custom password change done template') def test_custom_admin_site_view(self): self.client.force_login(self.superuser) response = self.client.get(reverse('admin2:my_view')) self.assertEqual(response.content, b"Django is a magical pony!") def test_pwd_change_custom_template(self): self.client.force_login(self.superuser) su = User.objects.get(username='super') response = self.client.get(reverse('admin4:auth_user_password_change', args=(su.pk,))) self.assertEqual(response.status_code, 200) def get_perm(Model, codename): """Return the permission object, for the Model""" ct = ContentType.objects.get_for_model(Model, for_concrete_model=False) return Permission.objects.get(content_type=ct, codename=codename) @override_settings( ROOT_URLCONF='admin_views.urls', # Test with the admin's documented list of required context processors. TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }], ) class AdminViewPermissionsTest(TestCase): """Tests for Admin Views Permissions.""" @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.viewuser = User.objects.create_user(username='viewuser', password='secret', is_staff=True) cls.adduser = User.objects.create_user(username='adduser', password='secret', is_staff=True) cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True) cls.deleteuser = User.objects.create_user(username='deleteuser', password='secret', is_staff=True) cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret') cls.nostaffuser = User.objects.create_user(username='nostaff', password='secret') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1, another_section=cls.s1, ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') # Setup permissions, for our users who can add, change, and delete. opts = Article._meta # User who can view Articles cls.viewuser.user_permissions.add(get_perm(Article, get_permission_codename('view', opts))) # User who can add Articles cls.adduser.user_permissions.add(get_perm(Article, get_permission_codename('add', opts))) # User who can change Articles cls.changeuser.user_permissions.add(get_perm(Article, get_permission_codename('change', opts))) cls.nostaffuser.user_permissions.add(get_perm(Article, get_permission_codename('change', opts))) # User who can delete Articles cls.deleteuser.user_permissions.add(get_perm(Article, get_permission_codename('delete', opts))) cls.deleteuser.user_permissions.add(get_perm(Section, get_permission_codename('delete', Section._meta))) # login POST dicts cls.index_url = reverse('admin:index') cls.super_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': 'super', 'password': 'secret', } cls.super_email_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': '[email protected]', 'password': 'secret', } cls.super_email_bad_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': '[email protected]', 'password': 'notsecret', } cls.adduser_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': 'adduser', 'password': 'secret', } cls.changeuser_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': 'changeuser', 'password': 'secret', } cls.deleteuser_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': 'deleteuser', 'password': 'secret', } cls.nostaff_login = { REDIRECT_FIELD_NAME: reverse('has_permission_admin:index'), 'username': 'nostaff', 'password': 'secret', } cls.joepublic_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': 'joepublic', 'password': 'secret', } cls.viewuser_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': 'viewuser', 'password': 'secret', } cls.no_username_login = { REDIRECT_FIELD_NAME: cls.index_url, 'password': 'secret', } def test_login(self): """ Make sure only staff members can log in. Successful posts to the login page will redirect to the original url. Unsuccessful attempts will continue to render the login page with a 200 status code. """ login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index')) # Super User response = self.client.get(self.index_url) self.assertRedirects(response, login_url) login = self.client.post(login_url, self.super_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) self.client.get(reverse('admin:logout')) # Test if user enters email address response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.super_email_login) self.assertContains(login, ERROR_MESSAGE) # only correct passwords get a username hint login = self.client.post(login_url, self.super_email_bad_login) self.assertContains(login, ERROR_MESSAGE) new_user = User(username='jondoe', password='secret', email='[email protected]') new_user.save() # check to ensure if there are multiple email addresses a user doesn't get a 500 login = self.client.post(login_url, self.super_email_login) self.assertContains(login, ERROR_MESSAGE) # View User response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.viewuser_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) self.client.get(reverse('admin:logout')) # Add User response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.adduser_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) self.client.get(reverse('admin:logout')) # Change User response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.changeuser_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) self.client.get(reverse('admin:logout')) # Delete User response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.deleteuser_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) self.client.get(reverse('admin:logout')) # Regular User should not be able to login. response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.joepublic_login) self.assertContains(login, ERROR_MESSAGE) # Requests without username should not return 500 errors. response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.no_username_login) self.assertEqual(login.status_code, 200) self.assertFormError(login, 'form', 'username', ['This field is required.']) def test_login_redirect_for_direct_get(self): """ Login redirect should be to the admin index page when going directly to /admin/login/. """ response = self.client.get(reverse('admin:login')) self.assertEqual(response.status_code, 200) self.assertEqual(response.context[REDIRECT_FIELD_NAME], reverse('admin:index')) def test_login_has_permission(self): # Regular User should not be able to login. response = self.client.get(reverse('has_permission_admin:index')) self.assertEqual(response.status_code, 302) login = self.client.post(reverse('has_permission_admin:login'), self.joepublic_login) self.assertContains(login, 'permission denied') # User with permissions should be able to login. response = self.client.get(reverse('has_permission_admin:index')) self.assertEqual(response.status_code, 302) login = self.client.post(reverse('has_permission_admin:login'), self.nostaff_login) self.assertRedirects(login, reverse('has_permission_admin:index')) self.assertFalse(login.context) self.client.get(reverse('has_permission_admin:logout')) # Staff should be able to login. response = self.client.get(reverse('has_permission_admin:index')) self.assertEqual(response.status_code, 302) login = self.client.post(reverse('has_permission_admin:login'), { REDIRECT_FIELD_NAME: reverse('has_permission_admin:index'), 'username': 'deleteuser', 'password': 'secret', }) self.assertRedirects(login, reverse('has_permission_admin:index')) self.assertFalse(login.context) self.client.get(reverse('has_permission_admin:logout')) def test_login_successfully_redirects_to_original_URL(self): response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) query_string = 'the-answer=42' redirect_url = '%s?%s' % (self.index_url, query_string) new_next = {REDIRECT_FIELD_NAME: redirect_url} post_data = self.super_login.copy() post_data.pop(REDIRECT_FIELD_NAME) login = self.client.post( '%s?%s' % (reverse('admin:login'), urlencode(new_next)), post_data) self.assertRedirects(login, redirect_url) def test_double_login_is_not_allowed(self): """Regression test for #19327""" login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index')) response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) # Establish a valid admin session login = self.client.post(login_url, self.super_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) # Logging in with non-admin user fails login = self.client.post(login_url, self.joepublic_login) self.assertContains(login, ERROR_MESSAGE) # Establish a valid admin session login = self.client.post(login_url, self.super_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) # Logging in with admin user while already logged in login = self.client.post(login_url, self.super_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) self.client.get(reverse('admin:logout')) def test_login_page_notice_for_non_staff_users(self): """ A logged-in non-staff user trying to access the admin index should be presented with the login page and a hint indicating that the current user doesn't have access to it. """ hint_template = 'You are authenticated as {}' # Anonymous user should not be shown the hint response = self.client.get(self.index_url, follow=True) self.assertContains(response, 'login-form') self.assertNotContains(response, hint_template.format(''), status_code=200) # Non-staff user should be shown the hint self.client.force_login(self.nostaffuser) response = self.client.get(self.index_url, follow=True) self.assertContains(response, 'login-form') self.assertContains(response, hint_template.format(self.nostaffuser.username), status_code=200) def test_add_view(self): """Test add view restricts access and actually adds items.""" add_dict = { 'title': 'Døm ikke', 'content': '<p>great article</p>', 'date_0': '2008-03-18', 'date_1': '10:54:39', 'section': self.s1.pk, } # Change User should not have access to add articles self.client.force_login(self.changeuser) # make sure the view removes test cookie self.assertIs(self.client.session.test_cookie_worked(), False) response = self.client.get(reverse('admin:admin_views_article_add')) self.assertEqual(response.status_code, 403) # Try POST just to make sure post = self.client.post(reverse('admin:admin_views_article_add'), add_dict) self.assertEqual(post.status_code, 403) self.assertEqual(Article.objects.count(), 3) self.client.get(reverse('admin:logout')) # View User should not have access to add articles self.client.force_login(self.viewuser) response = self.client.get(reverse('admin:admin_views_article_add')) self.assertEqual(response.status_code, 403) # Try POST just to make sure post = self.client.post(reverse('admin:admin_views_article_add'), add_dict) self.assertEqual(post.status_code, 403) self.assertEqual(Article.objects.count(), 3) # Now give the user permission to add but not change. self.viewuser.user_permissions.add(get_perm(Article, get_permission_codename('add', Article._meta))) response = self.client.get(reverse('admin:admin_views_article_add')) self.assertEqual(response.context['title'], 'Add article') self.assertContains(response, '<title>Add article | Django site admin</title>') self.assertContains(response, '<input type="submit" value="Save and view" name="_continue">') post = self.client.post(reverse('admin:admin_views_article_add'), add_dict, follow=False) self.assertEqual(post.status_code, 302) self.assertEqual(Article.objects.count(), 4) article = Article.objects.latest('pk') response = self.client.get(reverse('admin:admin_views_article_change', args=(article.pk,))) self.assertContains(response, '<li class="success">The article “Døm ikke” was added successfully.</li>') article.delete() self.client.get(reverse('admin:logout')) # Add user may login and POST to add view, then redirect to admin root self.client.force_login(self.adduser) addpage = self.client.get(reverse('admin:admin_views_article_add')) change_list_link = '&rsaquo; <a href="%s">Articles</a>' % reverse('admin:admin_views_article_changelist') self.assertNotContains( addpage, change_list_link, msg_prefix='User restricted to add permission is given link to change list view in breadcrumbs.' ) post = self.client.post(reverse('admin:admin_views_article_add'), add_dict) self.assertRedirects(post, self.index_url) self.assertEqual(Article.objects.count(), 4) self.assertEqual(len(mail.outbox), 2) self.assertEqual(mail.outbox[0].subject, 'Greetings from a created object') self.client.get(reverse('admin:logout')) # The addition was logged correctly addition_log = LogEntry.objects.all()[0] new_article = Article.objects.last() article_ct = ContentType.objects.get_for_model(Article) self.assertEqual(addition_log.user_id, self.adduser.pk) self.assertEqual(addition_log.content_type_id, article_ct.pk) self.assertEqual(addition_log.object_id, str(new_article.pk)) self.assertEqual(addition_log.object_repr, "Døm ikke") self.assertEqual(addition_log.action_flag, ADDITION) self.assertEqual(addition_log.get_change_message(), "Added.") # Super can add too, but is redirected to the change list view self.client.force_login(self.superuser) addpage = self.client.get(reverse('admin:admin_views_article_add')) self.assertContains( addpage, change_list_link, msg_prefix='Unrestricted user is not given link to change list view in breadcrumbs.' ) post = self.client.post(reverse('admin:admin_views_article_add'), add_dict) self.assertRedirects(post, reverse('admin:admin_views_article_changelist')) self.assertEqual(Article.objects.count(), 5) self.client.get(reverse('admin:logout')) # 8509 - if a normal user is already logged in, it is possible # to change user into the superuser without error self.client.force_login(self.joepublicuser) # Check and make sure that if user expires, data still persists self.client.force_login(self.superuser) # make sure the view removes test cookie self.assertIs(self.client.session.test_cookie_worked(), False) @mock.patch('django.contrib.admin.options.InlineModelAdmin.has_change_permission') def test_add_view_with_view_only_inlines(self, has_change_permission): """User with add permission to a section but view-only for inlines.""" self.viewuser.user_permissions.add(get_perm(Section, get_permission_codename('add', Section._meta))) self.client.force_login(self.viewuser) # Valid POST creates a new section. data = { 'name': 'New obj', 'article_set-TOTAL_FORMS': 0, 'article_set-INITIAL_FORMS': 0, } response = self.client.post(reverse('admin:admin_views_section_add'), data) self.assertRedirects(response, reverse('admin:index')) self.assertEqual(Section.objects.latest('id').name, data['name']) # InlineModelAdmin.has_change_permission()'s obj argument is always # None during object add. self.assertEqual([obj for (request, obj), _ in has_change_permission.call_args_list], [None, None]) def test_change_view(self): """Change view should restrict access and allow users to edit items.""" change_dict = { 'title': 'Ikke fordømt', 'content': '<p>edited article</p>', 'date_0': '2008-03-18', 'date_1': '10:54:39', 'section': self.s1.pk, } article_change_url = reverse('admin:admin_views_article_change', args=(self.a1.pk,)) article_changelist_url = reverse('admin:admin_views_article_changelist') # add user should not be able to view the list of article or change any of them self.client.force_login(self.adduser) response = self.client.get(article_changelist_url) self.assertEqual(response.status_code, 403) response = self.client.get(article_change_url) self.assertEqual(response.status_code, 403) post = self.client.post(article_change_url, change_dict) self.assertEqual(post.status_code, 403) self.client.get(reverse('admin:logout')) # view user can view articles but not make changes. self.client.force_login(self.viewuser) response = self.client.get(article_changelist_url) self.assertContains( response, '<title>Select article to view | Django site admin</title>', ) self.assertContains(response, '<h1>Select article to view</h1>') self.assertEqual(response.context['title'], 'Select article to view') response = self.client.get(article_change_url) self.assertContains(response, '<title>View article | Django site admin</title>') self.assertContains(response, '<h1>View article</h1>') self.assertContains(response, '<label>Extra form field:</label>') self.assertContains(response, '<a href="/test_admin/admin/admin_views/article/" class="closelink">Close</a>') self.assertEqual(response.context['title'], 'View article') post = self.client.post(article_change_url, change_dict) self.assertEqual(post.status_code, 403) self.assertEqual(Article.objects.get(pk=self.a1.pk).content, '<p>Middle content</p>') self.client.get(reverse('admin:logout')) # change user can view all items and edit them self.client.force_login(self.changeuser) response = self.client.get(article_changelist_url) self.assertEqual(response.context['title'], 'Select article to change') self.assertContains( response, '<title>Select article to change | Django site admin</title>', ) self.assertContains(response, '<h1>Select article to change</h1>') response = self.client.get(article_change_url) self.assertEqual(response.context['title'], 'Change article') self.assertContains( response, '<title>Change article | Django site admin</title>', ) self.assertContains(response, '<h1>Change article</h1>') post = self.client.post(article_change_url, change_dict) self.assertRedirects(post, article_changelist_url) self.assertEqual(Article.objects.get(pk=self.a1.pk).content, '<p>edited article</p>') # one error in form should produce singular error message, multiple errors plural change_dict['title'] = '' post = self.client.post(article_change_url, change_dict) self.assertContains( post, 'Please correct the error below.', msg_prefix='Singular error message not found in response to post with one error' ) change_dict['content'] = '' post = self.client.post(article_change_url, change_dict) self.assertContains( post, 'Please correct the errors below.', msg_prefix='Plural error message not found in response to post with multiple errors' ) self.client.get(reverse('admin:logout')) # Test redirection when using row-level change permissions. Refs #11513. r1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id") r2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id") r3 = RowLevelChangePermissionModel.objects.create(id=3, name='odd id mult 3') r6 = RowLevelChangePermissionModel.objects.create(id=6, name='even id mult 3') change_url_1 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r1.pk,)) change_url_2 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r2.pk,)) change_url_3 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r3.pk,)) change_url_6 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r6.pk,)) logins = [self.superuser, self.viewuser, self.adduser, self.changeuser, self.deleteuser] for login_user in logins: with self.subTest(login_user.username): self.client.force_login(login_user) response = self.client.get(change_url_1) self.assertEqual(response.status_code, 403) response = self.client.post(change_url_1, {'name': 'changed'}) self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id') self.assertEqual(response.status_code, 403) response = self.client.get(change_url_2) self.assertEqual(response.status_code, 200) response = self.client.post(change_url_2, {'name': 'changed'}) self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed') self.assertRedirects(response, self.index_url) response = self.client.get(change_url_3) self.assertEqual(response.status_code, 200) response = self.client.post(change_url_3, {'name': 'changed'}) self.assertEqual(response.status_code, 403) self.assertEqual(RowLevelChangePermissionModel.objects.get(id=3).name, 'odd id mult 3') response = self.client.get(change_url_6) self.assertEqual(response.status_code, 200) response = self.client.post(change_url_6, {'name': 'changed'}) self.assertEqual(RowLevelChangePermissionModel.objects.get(id=6).name, 'changed') self.assertRedirects(response, self.index_url) self.client.get(reverse('admin:logout')) for login_user in [self.joepublicuser, self.nostaffuser]: with self.subTest(login_user.username): self.client.force_login(login_user) response = self.client.get(change_url_1, follow=True) self.assertContains(response, 'login-form') response = self.client.post(change_url_1, {'name': 'changed'}, follow=True) self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id') self.assertContains(response, 'login-form') response = self.client.get(change_url_2, follow=True) self.assertContains(response, 'login-form') response = self.client.post(change_url_2, {'name': 'changed again'}, follow=True) self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed') self.assertContains(response, 'login-form') self.client.get(reverse('admin:logout')) def test_change_view_without_object_change_permission(self): """ The object should be read-only if the user has permission to view it and change objects of that type but not to change the current object. """ change_url = reverse('admin9:admin_views_article_change', args=(self.a1.pk,)) self.client.force_login(self.viewuser) response = self.client.get(change_url) self.assertEqual(response.context['title'], 'View article') self.assertContains(response, '<title>View article | Django site admin</title>') self.assertContains(response, '<h1>View article</h1>') self.assertContains(response, '<a href="/test_admin/admin9/admin_views/article/" class="closelink">Close</a>') def test_change_view_save_as_new(self): """ 'Save as new' should raise PermissionDenied for users without the 'add' permission. """ change_dict_save_as_new = { '_saveasnew': 'Save as new', 'title': 'Ikke fordømt', 'content': '<p>edited article</p>', 'date_0': '2008-03-18', 'date_1': '10:54:39', 'section': self.s1.pk, } article_change_url = reverse('admin:admin_views_article_change', args=(self.a1.pk,)) # Add user can perform "Save as new". article_count = Article.objects.count() self.client.force_login(self.adduser) post = self.client.post(article_change_url, change_dict_save_as_new) self.assertRedirects(post, self.index_url) self.assertEqual(Article.objects.count(), article_count + 1) self.client.logout() # Change user cannot perform "Save as new" (no 'add' permission). article_count = Article.objects.count() self.client.force_login(self.changeuser) post = self.client.post(article_change_url, change_dict_save_as_new) self.assertEqual(post.status_code, 403) self.assertEqual(Article.objects.count(), article_count) # User with both add and change permissions should be redirected to the # change page for the newly created object. article_count = Article.objects.count() self.client.force_login(self.superuser) post = self.client.post(article_change_url, change_dict_save_as_new) self.assertEqual(Article.objects.count(), article_count + 1) new_article = Article.objects.latest('id') self.assertRedirects(post, reverse('admin:admin_views_article_change', args=(new_article.pk,))) def test_change_view_with_view_only_inlines(self): """ User with change permission to a section but view-only for inlines. """ self.viewuser.user_permissions.add(get_perm(Section, get_permission_codename('change', Section._meta))) self.client.force_login(self.viewuser) # GET shows inlines. response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,))) self.assertEqual(len(response.context['inline_admin_formsets']), 1) formset = response.context['inline_admin_formsets'][0] self.assertEqual(len(formset.forms), 3) # Valid POST changes the name. data = { 'name': 'Can edit name with view-only inlines', 'article_set-TOTAL_FORMS': 3, 'article_set-INITIAL_FORMS': 3 } response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data) self.assertRedirects(response, reverse('admin:admin_views_section_changelist')) self.assertEqual(Section.objects.get(pk=self.s1.pk).name, data['name']) # Invalid POST reshows inlines. del data['name'] response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.context['inline_admin_formsets']), 1) formset = response.context['inline_admin_formsets'][0] self.assertEqual(len(formset.forms), 3) def test_change_view_with_view_and_add_inlines(self): """User has view and add permissions on the inline model.""" self.viewuser.user_permissions.add(get_perm(Section, get_permission_codename('change', Section._meta))) self.viewuser.user_permissions.add(get_perm(Article, get_permission_codename('add', Article._meta))) self.client.force_login(self.viewuser) # GET shows inlines. response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,))) self.assertEqual(len(response.context['inline_admin_formsets']), 1) formset = response.context['inline_admin_formsets'][0] self.assertEqual(len(formset.forms), 6) # Valid POST creates a new article. data = { 'name': 'Can edit name with view-only inlines', 'article_set-TOTAL_FORMS': 6, 'article_set-INITIAL_FORMS': 3, 'article_set-3-id': [''], 'article_set-3-title': ['A title'], 'article_set-3-content': ['Added content'], 'article_set-3-date_0': ['2008-3-18'], 'article_set-3-date_1': ['11:54:58'], 'article_set-3-section': [str(self.s1.pk)], } response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data) self.assertRedirects(response, reverse('admin:admin_views_section_changelist')) self.assertEqual(Section.objects.get(pk=self.s1.pk).name, data['name']) self.assertEqual(Article.objects.count(), 4) # Invalid POST reshows inlines. del data['name'] response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.context['inline_admin_formsets']), 1) formset = response.context['inline_admin_formsets'][0] self.assertEqual(len(formset.forms), 6) def test_change_view_with_view_and_delete_inlines(self): """User has view and delete permissions on the inline model.""" self.viewuser.user_permissions.add(get_perm(Section, get_permission_codename('change', Section._meta))) self.client.force_login(self.viewuser) data = { 'name': 'Name is required.', 'article_set-TOTAL_FORMS': 6, 'article_set-INITIAL_FORMS': 3, 'article_set-0-id': [str(self.a1.pk)], 'article_set-0-DELETE': ['on'], } # Inline POST details are ignored without delete permission. response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data) self.assertRedirects(response, reverse('admin:admin_views_section_changelist')) self.assertEqual(Article.objects.count(), 3) # Deletion successful when delete permission is added. self.viewuser.user_permissions.add(get_perm(Article, get_permission_codename('delete', Article._meta))) data = { 'name': 'Name is required.', 'article_set-TOTAL_FORMS': 6, 'article_set-INITIAL_FORMS': 3, 'article_set-0-id': [str(self.a1.pk)], 'article_set-0-DELETE': ['on'], } response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data) self.assertRedirects(response, reverse('admin:admin_views_section_changelist')) self.assertEqual(Article.objects.count(), 2) def test_delete_view(self): """Delete view should restrict access and actually delete items.""" delete_dict = {'post': 'yes'} delete_url = reverse('admin:admin_views_article_delete', args=(self.a1.pk,)) # add user should not be able to delete articles self.client.force_login(self.adduser) response = self.client.get(delete_url) self.assertEqual(response.status_code, 403) post = self.client.post(delete_url, delete_dict) self.assertEqual(post.status_code, 403) self.assertEqual(Article.objects.count(), 3) self.client.logout() # view user should not be able to delete articles self.client.force_login(self.viewuser) response = self.client.get(delete_url) self.assertEqual(response.status_code, 403) post = self.client.post(delete_url, delete_dict) self.assertEqual(post.status_code, 403) self.assertEqual(Article.objects.count(), 3) self.client.logout() # Delete user can delete self.client.force_login(self.deleteuser) response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,))) self.assertContains(response, "<h2>Summary</h2>") self.assertContains(response, "<li>Articles: 3</li>") # test response contains link to related Article self.assertContains(response, "admin_views/article/%s/" % self.a1.pk) response = self.client.get(delete_url) self.assertContains(response, "admin_views/article/%s/" % self.a1.pk) self.assertContains(response, "<h2>Summary</h2>") self.assertContains(response, "<li>Articles: 1</li>") post = self.client.post(delete_url, delete_dict) self.assertRedirects(post, self.index_url) self.assertEqual(Article.objects.count(), 2) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].subject, 'Greetings from a deleted object') article_ct = ContentType.objects.get_for_model(Article) logged = LogEntry.objects.get(content_type=article_ct, action_flag=DELETION) self.assertEqual(logged.object_id, str(self.a1.pk)) def test_delete_view_with_no_default_permissions(self): """ The delete view allows users to delete collected objects without a 'delete' permission (ReadOnlyPizza.Meta.default_permissions is empty). """ pizza = ReadOnlyPizza.objects.create(name='Double Cheese') delete_url = reverse('admin:admin_views_readonlypizza_delete', args=(pizza.pk,)) self.client.force_login(self.adduser) response = self.client.get(delete_url) self.assertContains(response, 'admin_views/readonlypizza/%s/' % pizza.pk) self.assertContains(response, '<h2>Summary</h2>') self.assertContains(response, '<li>Read only pizzas: 1</li>') post = self.client.post(delete_url, {'post': 'yes'}) self.assertRedirects(post, reverse('admin:admin_views_readonlypizza_changelist')) self.assertEqual(ReadOnlyPizza.objects.count(), 0) def test_delete_view_nonexistent_obj(self): self.client.force_login(self.deleteuser) url = reverse('admin:admin_views_article_delete', args=('nonexistent',)) response = self.client.get(url, follow=True) self.assertRedirects(response, reverse('admin:index')) self.assertEqual( [m.message for m in response.context['messages']], ['article with ID “nonexistent” doesn’t exist. Perhaps it was deleted?'] ) def test_history_view(self): """History view should restrict access.""" # add user should not be able to view the list of article or change any of them self.client.force_login(self.adduser) response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,))) self.assertEqual(response.status_code, 403) self.client.get(reverse('admin:logout')) # view user can view all items self.client.force_login(self.viewuser) response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,))) self.assertEqual(response.status_code, 200) self.client.get(reverse('admin:logout')) # change user can view all items and edit them self.client.force_login(self.changeuser) response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,))) self.assertEqual(response.status_code, 200) # Test redirection when using row-level change permissions. Refs #11513. rl1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id") rl2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id") logins = [self.superuser, self.viewuser, self.adduser, self.changeuser, self.deleteuser] for login_user in logins: with self.subTest(login_user.username): self.client.force_login(login_user) url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl1.pk,)) response = self.client.get(url) self.assertEqual(response.status_code, 403) url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl2.pk,)) response = self.client.get(url) self.assertEqual(response.status_code, 200) self.client.get(reverse('admin:logout')) for login_user in [self.joepublicuser, self.nostaffuser]: with self.subTest(login_user.username): self.client.force_login(login_user) url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl1.pk,)) response = self.client.get(url, follow=True) self.assertContains(response, 'login-form') url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl2.pk,)) response = self.client.get(url, follow=True) self.assertContains(response, 'login-form') self.client.get(reverse('admin:logout')) def test_history_view_bad_url(self): self.client.force_login(self.changeuser) response = self.client.get(reverse('admin:admin_views_article_history', args=('foo',)), follow=True) self.assertRedirects(response, reverse('admin:index')) self.assertEqual( [m.message for m in response.context['messages']], ['article with ID “foo” doesn’t exist. Perhaps it was deleted?'] ) def test_conditionally_show_add_section_link(self): """ The foreign key widget should only show the "add related" button if the user has permission to add that related item. """ self.client.force_login(self.adduser) # The user can't add sections yet, so they shouldn't see the "add section" link. url = reverse('admin:admin_views_article_add') add_link_text = 'add_id_section' response = self.client.get(url) self.assertNotContains(response, add_link_text) # Allow the user to add sections too. Now they can see the "add section" link. user = User.objects.get(username='adduser') perm = get_perm(Section, get_permission_codename('add', Section._meta)) user.user_permissions.add(perm) response = self.client.get(url) self.assertContains(response, add_link_text) def test_conditionally_show_change_section_link(self): """ The foreign key widget should only show the "change related" button if the user has permission to change that related item. """ def get_change_related(response): return response.context['adminform'].form.fields['section'].widget.can_change_related self.client.force_login(self.adduser) # The user can't change sections yet, so they shouldn't see the "change section" link. url = reverse('admin:admin_views_article_add') change_link_text = 'change_id_section' response = self.client.get(url) self.assertFalse(get_change_related(response)) self.assertNotContains(response, change_link_text) # Allow the user to change sections too. Now they can see the "change section" link. user = User.objects.get(username='adduser') perm = get_perm(Section, get_permission_codename('change', Section._meta)) user.user_permissions.add(perm) response = self.client.get(url) self.assertTrue(get_change_related(response)) self.assertContains(response, change_link_text) def test_conditionally_show_delete_section_link(self): """ The foreign key widget should only show the "delete related" button if the user has permission to delete that related item. """ def get_delete_related(response): return response.context['adminform'].form.fields['sub_section'].widget.can_delete_related self.client.force_login(self.adduser) # The user can't delete sections yet, so they shouldn't see the "delete section" link. url = reverse('admin:admin_views_article_add') delete_link_text = 'delete_id_sub_section' response = self.client.get(url) self.assertFalse(get_delete_related(response)) self.assertNotContains(response, delete_link_text) # Allow the user to delete sections too. Now they can see the "delete section" link. user = User.objects.get(username='adduser') perm = get_perm(Section, get_permission_codename('delete', Section._meta)) user.user_permissions.add(perm) response = self.client.get(url) self.assertTrue(get_delete_related(response)) self.assertContains(response, delete_link_text) def test_disabled_permissions_when_logged_in(self): self.client.force_login(self.superuser) superuser = User.objects.get(username='super') superuser.is_active = False superuser.save() response = self.client.get(self.index_url, follow=True) self.assertContains(response, 'id="login-form"') self.assertNotContains(response, 'Log out') response = self.client.get(reverse('secure_view'), follow=True) self.assertContains(response, 'id="login-form"') def test_disabled_staff_permissions_when_logged_in(self): self.client.force_login(self.superuser) superuser = User.objects.get(username='super') superuser.is_staff = False superuser.save() response = self.client.get(self.index_url, follow=True) self.assertContains(response, 'id="login-form"') self.assertNotContains(response, 'Log out') response = self.client.get(reverse('secure_view'), follow=True) self.assertContains(response, 'id="login-form"') def test_app_list_permissions(self): """ If a user has no module perms, the app list returns a 404. """ opts = Article._meta change_user = User.objects.get(username='changeuser') permission = get_perm(Article, get_permission_codename('change', opts)) self.client.force_login(self.changeuser) # the user has no module permissions change_user.user_permissions.remove(permission) response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertEqual(response.status_code, 404) # the user now has module permissions change_user.user_permissions.add(permission) response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertEqual(response.status_code, 200) def test_shortcut_view_only_available_to_staff(self): """ Only admin users should be able to use the admin shortcut view. """ model_ctype = ContentType.objects.get_for_model(ModelWithStringPrimaryKey) obj = ModelWithStringPrimaryKey.objects.create(string_pk='foo') shortcut_url = reverse('admin:view_on_site', args=(model_ctype.pk, obj.pk)) # Not logged in: we should see the login page. response = self.client.get(shortcut_url, follow=True) self.assertTemplateUsed(response, 'admin/login.html') # Logged in? Redirect. self.client.force_login(self.superuser) response = self.client.get(shortcut_url, follow=False) # Can't use self.assertRedirects() because User.get_absolute_url() is silly. self.assertEqual(response.status_code, 302) # Domain may depend on contrib.sites tests also run self.assertRegex(response.url, 'http://(testserver|example.com)/dummy/foo/') def test_has_module_permission(self): """ has_module_permission() returns True for all users who have any permission for that module (add, change, or delete), so that the module is displayed on the admin index page. """ self.client.force_login(self.superuser) response = self.client.get(self.index_url) self.assertContains(response, 'admin_views') self.assertContains(response, 'Articles') self.client.logout() self.client.force_login(self.viewuser) response = self.client.get(self.index_url) self.assertContains(response, 'admin_views') self.assertContains(response, 'Articles') self.client.logout() self.client.force_login(self.adduser) response = self.client.get(self.index_url) self.assertContains(response, 'admin_views') self.assertContains(response, 'Articles') self.client.logout() self.client.force_login(self.changeuser) response = self.client.get(self.index_url) self.assertContains(response, 'admin_views') self.assertContains(response, 'Articles') self.client.logout() self.client.force_login(self.deleteuser) response = self.client.get(self.index_url) self.assertContains(response, 'admin_views') self.assertContains(response, 'Articles') def test_overriding_has_module_permission(self): """ If has_module_permission() always returns False, the module shouldn't be displayed on the admin index page for any users. """ articles = Article._meta.verbose_name_plural.title() sections = Section._meta.verbose_name_plural.title() index_url = reverse('admin7:index') self.client.force_login(self.superuser) response = self.client.get(index_url) self.assertContains(response, sections) self.assertNotContains(response, articles) self.client.logout() self.client.force_login(self.viewuser) response = self.client.get(index_url) self.assertNotContains(response, 'admin_views') self.assertNotContains(response, articles) self.client.logout() self.client.force_login(self.adduser) response = self.client.get(index_url) self.assertNotContains(response, 'admin_views') self.assertNotContains(response, articles) self.client.logout() self.client.force_login(self.changeuser) response = self.client.get(index_url) self.assertNotContains(response, 'admin_views') self.assertNotContains(response, articles) self.client.logout() self.client.force_login(self.deleteuser) response = self.client.get(index_url) self.assertNotContains(response, articles) # The app list displays Sections but not Articles as the latter has # ModelAdmin.has_module_permission() = False. self.client.force_login(self.superuser) response = self.client.get(reverse('admin7:app_list', args=('admin_views',))) self.assertContains(response, sections) self.assertNotContains(response, articles) def test_post_save_message_no_forbidden_links_visible(self): """ Post-save message shouldn't contain a link to the change form if the user doesn't have the change permission. """ self.client.force_login(self.adduser) # Emulate Article creation for user with add-only permission. post_data = { "title": "Fun & games", "content": "Some content", "date_0": "2015-10-31", "date_1": "16:35:00", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_article_add'), post_data, follow=True) self.assertContains( response, '<li class="success">The article “Fun &amp; games” was added successfully.</li>', html=True ) @override_settings( ROOT_URLCONF='admin_views.urls', TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }], ) class AdminViewProxyModelPermissionsTests(TestCase): """Tests for proxy models permissions in the admin.""" @classmethod def setUpTestData(cls): cls.viewuser = User.objects.create_user(username='viewuser', password='secret', is_staff=True) cls.adduser = User.objects.create_user(username='adduser', password='secret', is_staff=True) cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True) cls.deleteuser = User.objects.create_user(username='deleteuser', password='secret', is_staff=True) # Setup permissions. opts = UserProxy._meta cls.viewuser.user_permissions.add(get_perm(UserProxy, get_permission_codename('view', opts))) cls.adduser.user_permissions.add(get_perm(UserProxy, get_permission_codename('add', opts))) cls.changeuser.user_permissions.add(get_perm(UserProxy, get_permission_codename('change', opts))) cls.deleteuser.user_permissions.add(get_perm(UserProxy, get_permission_codename('delete', opts))) # UserProxy instances. cls.user_proxy = UserProxy.objects.create(username='user_proxy', password='secret') def test_add(self): self.client.force_login(self.adduser) url = reverse('admin:admin_views_userproxy_add') data = { 'username': 'can_add', 'password': 'secret', 'date_joined_0': '2019-01-15', 'date_joined_1': '16:59:10', } response = self.client.post(url, data, follow=True) self.assertEqual(response.status_code, 200) self.assertTrue(UserProxy.objects.filter(username='can_add').exists()) def test_view(self): self.client.force_login(self.viewuser) response = self.client.get(reverse('admin:admin_views_userproxy_changelist')) self.assertContains(response, '<h1>Select user proxy to view</h1>') response = self.client.get(reverse('admin:admin_views_userproxy_change', args=(self.user_proxy.pk,))) self.assertContains(response, '<h1>View user proxy</h1>') self.assertContains(response, '<div class="readonly">user_proxy</div>') def test_change(self): self.client.force_login(self.changeuser) data = { 'password': self.user_proxy.password, 'username': self.user_proxy.username, 'date_joined_0': self.user_proxy.date_joined.strftime('%Y-%m-%d'), 'date_joined_1': self.user_proxy.date_joined.strftime('%H:%M:%S'), 'first_name': 'first_name', } url = reverse('admin:admin_views_userproxy_change', args=(self.user_proxy.pk,)) response = self.client.post(url, data) self.assertRedirects(response, reverse('admin:admin_views_userproxy_changelist')) self.assertEqual(UserProxy.objects.get(pk=self.user_proxy.pk).first_name, 'first_name') def test_delete(self): self.client.force_login(self.deleteuser) url = reverse('admin:admin_views_userproxy_delete', args=(self.user_proxy.pk,)) response = self.client.post(url, {'post': 'yes'}, follow=True) self.assertEqual(response.status_code, 200) self.assertFalse(UserProxy.objects.filter(pk=self.user_proxy.pk).exists()) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewsNoUrlTest(TestCase): """Regression test for #17333""" @classmethod def setUpTestData(cls): # User who can change Reports cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True) cls.changeuser.user_permissions.add(get_perm(Report, get_permission_codename('change', Report._meta))) def test_no_standard_modeladmin_urls(self): """Admin index views don't break when user's ModelAdmin removes standard urls""" self.client.force_login(self.changeuser) r = self.client.get(reverse('admin:index')) # we shouldn't get a 500 error caused by a NoReverseMatch self.assertEqual(r.status_code, 200) self.client.get(reverse('admin:logout')) @skipUnlessDBFeature('can_defer_constraint_checks') @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewDeletedObjectsTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.deleteuser = User.objects.create_user(username='deleteuser', password='secret', is_staff=True) cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') cls.v1 = Villain.objects.create(name='Adam') cls.v2 = Villain.objects.create(name='Sue') cls.sv1 = SuperVillain.objects.create(name='Bob') cls.pl1 = Plot.objects.create(name='World Domination', team_leader=cls.v1, contact=cls.v2) cls.pl2 = Plot.objects.create(name='World Peace', team_leader=cls.v2, contact=cls.v2) cls.pl3 = Plot.objects.create(name='Corn Conspiracy', team_leader=cls.v1, contact=cls.v1) cls.pd1 = PlotDetails.objects.create(details='almost finished', plot=cls.pl1) cls.sh1 = SecretHideout.objects.create(location='underground bunker', villain=cls.v1) cls.sh2 = SecretHideout.objects.create(location='floating castle', villain=cls.sv1) cls.ssh1 = SuperSecretHideout.objects.create(location='super floating castle!', supervillain=cls.sv1) cls.cy1 = CyclicOne.objects.create(name='I am recursive', two_id=1) cls.cy2 = CyclicTwo.objects.create(name='I am recursive too', one_id=1) def setUp(self): self.client.force_login(self.superuser) def test_nesting(self): """ Objects should be nested to display the relationships that cause them to be scheduled for deletion. """ pattern = re.compile( r'<li>Plot: <a href="%s">World Domination</a>\s*<ul>\s*' r'<li>Plot details: <a href="%s">almost finished</a>' % ( reverse('admin:admin_views_plot_change', args=(self.pl1.pk,)), reverse('admin:admin_views_plotdetails_change', args=(self.pd1.pk,)), ) ) response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,))) self.assertRegex(response.content.decode(), pattern) def test_cyclic(self): """ Cyclic relationships should still cause each object to only be listed once. """ one = '<li>Cyclic one: <a href="%s">I am recursive</a>' % ( reverse('admin:admin_views_cyclicone_change', args=(self.cy1.pk,)), ) two = '<li>Cyclic two: <a href="%s">I am recursive too</a>' % ( reverse('admin:admin_views_cyclictwo_change', args=(self.cy2.pk,)), ) response = self.client.get(reverse('admin:admin_views_cyclicone_delete', args=(self.cy1.pk,))) self.assertContains(response, one, 1) self.assertContains(response, two, 1) def test_perms_needed(self): self.client.logout() delete_user = User.objects.get(username='deleteuser') delete_user.user_permissions.add(get_perm(Plot, get_permission_codename('delete', Plot._meta))) self.client.force_login(self.deleteuser) response = self.client.get(reverse('admin:admin_views_plot_delete', args=(self.pl1.pk,))) self.assertContains(response, "your account doesn't have permission to delete the following types of objects") self.assertContains(response, "<li>plot details</li>") def test_protected(self): q = Question.objects.create(question="Why?") a1 = Answer.objects.create(question=q, answer="Because.") a2 = Answer.objects.create(question=q, answer="Yes.") response = self.client.get(reverse('admin:admin_views_question_delete', args=(q.pk,))) self.assertContains(response, "would require deleting the following protected related objects") self.assertContains( response, '<li>Answer: <a href="%s">Because.</a></li>' % reverse('admin:admin_views_answer_change', args=(a1.pk,)) ) self.assertContains( response, '<li>Answer: <a href="%s">Yes.</a></li>' % reverse('admin:admin_views_answer_change', args=(a2.pk,)) ) def test_post_delete_protected(self): """ A POST request to delete protected objects should display the page which says the deletion is prohibited. """ q = Question.objects.create(question='Why?') Answer.objects.create(question=q, answer='Because.') response = self.client.post(reverse('admin:admin_views_question_delete', args=(q.pk,)), {'post': 'yes'}) self.assertEqual(Question.objects.count(), 1) self.assertContains(response, "would require deleting the following protected related objects") def test_restricted(self): album = Album.objects.create(title='Amaryllis') song = Song.objects.create(album=album, name='Unity') response = self.client.get(reverse('admin:admin_views_album_delete', args=(album.pk,))) self.assertContains( response, 'would require deleting the following protected related objects', ) self.assertContains( response, '<li>Song: <a href="%s">Unity</a></li>' % reverse('admin:admin_views_song_change', args=(song.pk,)) ) def test_post_delete_restricted(self): album = Album.objects.create(title='Amaryllis') Song.objects.create(album=album, name='Unity') response = self.client.post( reverse('admin:admin_views_album_delete', args=(album.pk,)), {'post': 'yes'}, ) self.assertEqual(Album.objects.count(), 1) self.assertContains( response, 'would require deleting the following protected related objects', ) def test_not_registered(self): should_contain = """<li>Secret hideout: underground bunker""" response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,))) self.assertContains(response, should_contain, 1) def test_multiple_fkeys_to_same_model(self): """ If a deleted object has two relationships from another model, both of those should be followed in looking for related objects to delete. """ should_contain = '<li>Plot: <a href="%s">World Domination</a>' % reverse( 'admin:admin_views_plot_change', args=(self.pl1.pk,) ) response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,))) self.assertContains(response, should_contain) response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v2.pk,))) self.assertContains(response, should_contain) def test_multiple_fkeys_to_same_instance(self): """ If a deleted object has two relationships pointing to it from another object, the other object should still only be listed once. """ should_contain = '<li>Plot: <a href="%s">World Peace</a></li>' % reverse( 'admin:admin_views_plot_change', args=(self.pl2.pk,) ) response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v2.pk,))) self.assertContains(response, should_contain, 1) def test_inheritance(self): """ In the case of an inherited model, if either the child or parent-model instance is deleted, both instances are listed for deletion, as well as any relationships they have. """ should_contain = [ '<li>Villain: <a href="%s">Bob</a>' % reverse('admin:admin_views_villain_change', args=(self.sv1.pk,)), '<li>Super villain: <a href="%s">Bob</a>' % reverse( 'admin:admin_views_supervillain_change', args=(self.sv1.pk,) ), '<li>Secret hideout: floating castle', '<li>Super secret hideout: super floating castle!', ] response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.sv1.pk,))) for should in should_contain: self.assertContains(response, should, 1) response = self.client.get(reverse('admin:admin_views_supervillain_delete', args=(self.sv1.pk,))) for should in should_contain: self.assertContains(response, should, 1) def test_generic_relations(self): """ If a deleted object has GenericForeignKeys pointing to it, those objects should be listed for deletion. """ plot = self.pl3 tag = FunkyTag.objects.create(content_object=plot, name='hott') should_contain = '<li>Funky tag: <a href="%s">hott' % reverse( 'admin:admin_views_funkytag_change', args=(tag.id,)) response = self.client.get(reverse('admin:admin_views_plot_delete', args=(plot.pk,))) self.assertContains(response, should_contain) def test_generic_relations_with_related_query_name(self): """ If a deleted object has GenericForeignKey with GenericRelation(related_query_name='...') pointing to it, those objects should be listed for deletion. """ bookmark = Bookmark.objects.create(name='djangoproject') tag = FunkyTag.objects.create(content_object=bookmark, name='django') tag_url = reverse('admin:admin_views_funkytag_change', args=(tag.id,)) should_contain = '<li>Funky tag: <a href="%s">django' % tag_url response = self.client.get(reverse('admin:admin_views_bookmark_delete', args=(bookmark.pk,))) self.assertContains(response, should_contain) def test_delete_view_uses_get_deleted_objects(self): """The delete view uses ModelAdmin.get_deleted_objects().""" book = Book.objects.create(name='Test Book') response = self.client.get(reverse('admin2:admin_views_book_delete', args=(book.pk,))) # BookAdmin.get_deleted_objects() returns custom text. self.assertContains(response, 'a deletable object') @override_settings(ROOT_URLCONF='admin_views.urls') class TestGenericRelations(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.v1 = Villain.objects.create(name='Adam') cls.pl3 = Plot.objects.create(name='Corn Conspiracy', team_leader=cls.v1, contact=cls.v1) def setUp(self): self.client.force_login(self.superuser) def test_generic_content_object_in_list_display(self): FunkyTag.objects.create(content_object=self.pl3, name='hott') response = self.client.get(reverse('admin:admin_views_funkytag_changelist')) self.assertContains(response, "%s</td>" % self.pl3) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewStringPrimaryKeyTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') cls.pk = ( "abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ 1234567890 " r"""-_.!~*'() ;/?:@&=+$, <>#%" {}|\^[]`""" ) cls.m1 = ModelWithStringPrimaryKey.objects.create(string_pk=cls.pk) content_type_pk = ContentType.objects.get_for_model(ModelWithStringPrimaryKey).pk user_pk = cls.superuser.pk LogEntry.objects.log_action(user_pk, content_type_pk, cls.pk, cls.pk, 2, change_message='Changed something') def setUp(self): self.client.force_login(self.superuser) def test_get_history_view(self): """ Retrieving the history for an object using urlencoded form of primary key should work. Refs #12349, #18550. """ response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_history', args=(self.pk,))) self.assertContains(response, escape(self.pk)) self.assertContains(response, 'Changed something') def test_get_change_view(self): "Retrieving the object using urlencoded form of primary key should work" response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_change', args=(self.pk,))) self.assertContains(response, escape(self.pk)) def test_changelist_to_changeform_link(self): "Link to the changeform of the object in changelist should use reverse() and be quoted -- #18072" response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_changelist')) # this URL now comes through reverse(), thus url quoting and iri_to_uri encoding pk_final_url = escape(iri_to_uri(quote(self.pk))) change_url = reverse( 'admin:admin_views_modelwithstringprimarykey_change', args=('__fk__',) ).replace('__fk__', pk_final_url) should_contain = '<th class="field-__str__"><a href="%s">%s</a></th>' % (change_url, escape(self.pk)) self.assertContains(response, should_contain) def test_recentactions_link(self): "The link from the recent actions list referring to the changeform of the object should be quoted" response = self.client.get(reverse('admin:index')) link = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(self.pk),)) should_contain = """<a href="%s">%s</a>""" % (escape(link), escape(self.pk)) self.assertContains(response, should_contain) def test_deleteconfirmation_link(self): "The link from the delete confirmation page referring back to the changeform of the object should be quoted" url = reverse('admin:admin_views_modelwithstringprimarykey_delete', args=(quote(self.pk),)) response = self.client.get(url) # this URL now comes through reverse(), thus url quoting and iri_to_uri encoding change_url = reverse( 'admin:admin_views_modelwithstringprimarykey_change', args=('__fk__',) ).replace('__fk__', escape(iri_to_uri(quote(self.pk)))) should_contain = '<a href="%s">%s</a>' % (change_url, escape(self.pk)) self.assertContains(response, should_contain) def test_url_conflicts_with_add(self): "A model with a primary key that ends with add or is `add` should be visible" add_model = ModelWithStringPrimaryKey.objects.create(pk="i have something to add") add_model.save() response = self.client.get( reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(add_model.pk),)) ) should_contain = """<h1>Change model with string primary key</h1>""" self.assertContains(response, should_contain) add_model2 = ModelWithStringPrimaryKey.objects.create(pk="add") add_url = reverse('admin:admin_views_modelwithstringprimarykey_add') change_url = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(add_model2.pk),)) self.assertNotEqual(add_url, change_url) def test_url_conflicts_with_delete(self): "A model with a primary key that ends with delete should be visible" delete_model = ModelWithStringPrimaryKey(pk="delete") delete_model.save() response = self.client.get( reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(delete_model.pk),)) ) should_contain = """<h1>Change model with string primary key</h1>""" self.assertContains(response, should_contain) def test_url_conflicts_with_history(self): "A model with a primary key that ends with history should be visible" history_model = ModelWithStringPrimaryKey(pk="history") history_model.save() response = self.client.get( reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(history_model.pk),)) ) should_contain = """<h1>Change model with string primary key</h1>""" self.assertContains(response, should_contain) def test_shortcut_view_with_escaping(self): "'View on site should' work properly with char fields" model = ModelWithStringPrimaryKey(pk='abc_123') model.save() response = self.client.get( reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(model.pk),)) ) should_contain = '/%s/" class="viewsitelink">' % model.pk self.assertContains(response, should_contain) def test_change_view_history_link(self): """Object history button link should work and contain the pk value quoted.""" url = reverse( 'admin:%s_modelwithstringprimarykey_change' % ModelWithStringPrimaryKey._meta.app_label, args=(quote(self.pk),) ) response = self.client.get(url) self.assertEqual(response.status_code, 200) expected_link = reverse( 'admin:%s_modelwithstringprimarykey_history' % ModelWithStringPrimaryKey._meta.app_label, args=(quote(self.pk),) ) self.assertContains(response, '<a href="%s" class="historylink"' % escape(expected_link)) def test_redirect_on_add_view_continue_button(self): """As soon as an object is added using "Save and continue editing" button, the user should be redirected to the object's change_view. In case primary key is a string containing some special characters like slash or underscore, these characters must be escaped (see #22266) """ response = self.client.post( reverse('admin:admin_views_modelwithstringprimarykey_add'), { 'string_pk': '123/history', "_continue": "1", # Save and continue editing } ) self.assertEqual(response.status_code, 302) # temporary redirect self.assertIn('/123_2Fhistory/', response.headers['location']) # PK is quoted @override_settings(ROOT_URLCONF='admin_views.urls') class SecureViewTests(TestCase): """ Test behavior of a view protected by the staff_member_required decorator. """ def test_secure_view_shows_login_if_not_logged_in(self): secure_url = reverse('secure_view') response = self.client.get(secure_url) self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), secure_url)) response = self.client.get(secure_url, follow=True) self.assertTemplateUsed(response, 'admin/login.html') self.assertEqual(response.context[REDIRECT_FIELD_NAME], secure_url) def test_staff_member_required_decorator_works_with_argument(self): """ Staff_member_required decorator works with an argument (redirect_field_name). """ secure_url = '/test_admin/admin/secure-view2/' response = self.client.get(secure_url) self.assertRedirects(response, '%s?myfield=%s' % (reverse('admin:login'), secure_url)) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewUnicodeTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.b1 = Book.objects.create(name='Lærdommer') cls.p1 = Promo.objects.create(name='<Promo for Lærdommer>', book=cls.b1) cls.chap1 = Chapter.objects.create( title='Norske bostaver æøå skaper problemer', content='<p>Svært frustrerende med UnicodeDecodeErro</p>', book=cls.b1 ) cls.chap2 = Chapter.objects.create( title='Kjærlighet', content='<p>La kjærligheten til de lidende seire.</p>', book=cls.b1) cls.chap3 = Chapter.objects.create(title='Kjærlighet', content='<p>Noe innhold</p>', book=cls.b1) cls.chap4 = ChapterXtra1.objects.create(chap=cls.chap1, xtra='<Xtra(1) Norske bostaver æøå skaper problemer>') cls.chap5 = ChapterXtra1.objects.create(chap=cls.chap2, xtra='<Xtra(1) Kjærlighet>') cls.chap6 = ChapterXtra1.objects.create(chap=cls.chap3, xtra='<Xtra(1) Kjærlighet>') cls.chap7 = ChapterXtra2.objects.create(chap=cls.chap1, xtra='<Xtra(2) Norske bostaver æøå skaper problemer>') cls.chap8 = ChapterXtra2.objects.create(chap=cls.chap2, xtra='<Xtra(2) Kjærlighet>') cls.chap9 = ChapterXtra2.objects.create(chap=cls.chap3, xtra='<Xtra(2) Kjærlighet>') def setUp(self): self.client.force_login(self.superuser) def test_unicode_edit(self): """ A test to ensure that POST on edit_view handles non-ASCII characters. """ post_data = { "name": "Test lærdommer", # inline data "chapter_set-TOTAL_FORMS": "6", "chapter_set-INITIAL_FORMS": "3", "chapter_set-MAX_NUM_FORMS": "0", "chapter_set-0-id": self.chap1.pk, "chapter_set-0-title": "Norske bostaver æøå skaper problemer", "chapter_set-0-content": "&lt;p&gt;Svært frustrerende med UnicodeDecodeError&lt;/p&gt;", "chapter_set-1-id": self.chap2.id, "chapter_set-1-title": "Kjærlighet.", "chapter_set-1-content": "&lt;p&gt;La kjærligheten til de lidende seire.&lt;/p&gt;", "chapter_set-2-id": self.chap3.id, "chapter_set-2-title": "Need a title.", "chapter_set-2-content": "&lt;p&gt;Newest content&lt;/p&gt;", "chapter_set-3-id": "", "chapter_set-3-title": "", "chapter_set-3-content": "", "chapter_set-4-id": "", "chapter_set-4-title": "", "chapter_set-4-content": "", "chapter_set-5-id": "", "chapter_set-5-title": "", "chapter_set-5-content": "", } response = self.client.post(reverse('admin:admin_views_book_change', args=(self.b1.pk,)), post_data) self.assertEqual(response.status_code, 302) # redirect somewhere def test_unicode_delete(self): """ The delete_view handles non-ASCII characters """ delete_dict = {'post': 'yes'} delete_url = reverse('admin:admin_views_book_delete', args=(self.b1.pk,)) response = self.client.get(delete_url) self.assertEqual(response.status_code, 200) response = self.client.post(delete_url, delete_dict) self.assertRedirects(response, reverse('admin:admin_views_book_changelist')) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewListEditable(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True) cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False) cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True) def setUp(self): self.client.force_login(self.superuser) def test_inheritance(self): Podcast.objects.create(name="This Week in Django", release_date=datetime.date.today()) response = self.client.get(reverse('admin:admin_views_podcast_changelist')) self.assertEqual(response.status_code, 200) def test_inheritance_2(self): Vodcast.objects.create(name="This Week in Django", released=True) response = self.client.get(reverse('admin:admin_views_vodcast_changelist')) self.assertEqual(response.status_code, 200) def test_custom_pk(self): Language.objects.create(iso='en', name='English', english_name='English') response = self.client.get(reverse('admin:admin_views_language_changelist')) self.assertEqual(response.status_code, 200) def test_changelist_input_html(self): response = self.client.get(reverse('admin:admin_views_person_changelist')) # 2 inputs per object(the field and the hidden id field) = 6 # 4 management hidden fields = 4 # 4 action inputs (3 regular checkboxes, 1 checkbox to select all) # main form submit button = 1 # search field and search submit button = 2 # CSRF field = 1 # field to track 'select all' across paginated views = 1 # 6 + 4 + 4 + 1 + 2 + 1 + 1 = 19 inputs self.assertContains(response, "<input", count=20) # 1 select per object = 3 selects self.assertContains(response, "<select", count=4) def test_post_messages(self): # Ticket 12707: Saving inline editable should not show admin # action warnings data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-gender": "1", "form-0-id": str(self.per1.pk), "form-1-gender": "2", "form-1-id": str(self.per2.pk), "form-2-alive": "checked", "form-2-gender": "1", "form-2-id": str(self.per3.pk), "_save": "Save", } response = self.client.post(reverse('admin:admin_views_person_changelist'), data, follow=True) self.assertEqual(len(response.context['messages']), 1) def test_post_submission(self): data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-gender": "1", "form-0-id": str(self.per1.pk), "form-1-gender": "2", "form-1-id": str(self.per2.pk), "form-2-alive": "checked", "form-2-gender": "1", "form-2-id": str(self.per3.pk), "_save": "Save", } self.client.post(reverse('admin:admin_views_person_changelist'), data) self.assertIs(Person.objects.get(name="John Mauchly").alive, False) self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2) # test a filtered page data = { "form-TOTAL_FORMS": "2", "form-INITIAL_FORMS": "2", "form-MAX_NUM_FORMS": "0", "form-0-id": str(self.per1.pk), "form-0-gender": "1", "form-0-alive": "checked", "form-1-id": str(self.per3.pk), "form-1-gender": "1", "form-1-alive": "checked", "_save": "Save", } self.client.post(reverse('admin:admin_views_person_changelist') + '?gender__exact=1', data) self.assertIs(Person.objects.get(name="John Mauchly").alive, True) # test a searched page data = { "form-TOTAL_FORMS": "1", "form-INITIAL_FORMS": "1", "form-MAX_NUM_FORMS": "0", "form-0-id": str(self.per1.pk), "form-0-gender": "1", "_save": "Save", } self.client.post(reverse('admin:admin_views_person_changelist') + '?q=john', data) self.assertIs(Person.objects.get(name="John Mauchly").alive, False) def test_non_field_errors(self): """ Non-field errors are displayed for each of the forms in the changelist's formset. """ fd1 = FoodDelivery.objects.create(reference='123', driver='bill', restaurant='thai') fd2 = FoodDelivery.objects.create(reference='456', driver='bill', restaurant='india') fd3 = FoodDelivery.objects.create(reference='789', driver='bill', restaurant='pizza') data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-id": str(fd1.id), "form-0-reference": "123", "form-0-driver": "bill", "form-0-restaurant": "thai", # Same data as above: Forbidden because of unique_together! "form-1-id": str(fd2.id), "form-1-reference": "456", "form-1-driver": "bill", "form-1-restaurant": "thai", "form-2-id": str(fd3.id), "form-2-reference": "789", "form-2-driver": "bill", "form-2-restaurant": "pizza", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_fooddelivery_changelist'), data) self.assertContains( response, '<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery ' 'with this Driver and Restaurant already exists.</li></ul></td></tr>', 1, html=True ) data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-id": str(fd1.id), "form-0-reference": "123", "form-0-driver": "bill", "form-0-restaurant": "thai", # Same data as above: Forbidden because of unique_together! "form-1-id": str(fd2.id), "form-1-reference": "456", "form-1-driver": "bill", "form-1-restaurant": "thai", # Same data also. "form-2-id": str(fd3.id), "form-2-reference": "789", "form-2-driver": "bill", "form-2-restaurant": "thai", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_fooddelivery_changelist'), data) self.assertContains( response, '<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery ' 'with this Driver and Restaurant already exists.</li></ul></td></tr>', 2, html=True ) def test_non_form_errors(self): # test if non-form errors are handled; ticket #12716 data = { "form-TOTAL_FORMS": "1", "form-INITIAL_FORMS": "1", "form-MAX_NUM_FORMS": "0", "form-0-id": str(self.per2.pk), "form-0-alive": "1", "form-0-gender": "2", # The form processing understands this as a list_editable "Save" # and not an action "Go". "_save": "Save", } response = self.client.post(reverse('admin:admin_views_person_changelist'), data) self.assertContains(response, "Grace is not a Zombie") def test_non_form_errors_is_errorlist(self): # test if non-form errors are correctly handled; ticket #12878 data = { "form-TOTAL_FORMS": "1", "form-INITIAL_FORMS": "1", "form-MAX_NUM_FORMS": "0", "form-0-id": str(self.per2.pk), "form-0-alive": "1", "form-0-gender": "2", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_person_changelist'), data) non_form_errors = response.context['cl'].formset.non_form_errors() self.assertIsInstance(non_form_errors, ErrorList) self.assertEqual(str(non_form_errors), str(ErrorList(["Grace is not a Zombie"]))) def test_list_editable_ordering(self): collector = Collector.objects.create(id=1, name="Frederick Clegg") Category.objects.create(id=1, order=1, collector=collector) Category.objects.create(id=2, order=2, collector=collector) Category.objects.create(id=3, order=0, collector=collector) Category.objects.create(id=4, order=0, collector=collector) # NB: The order values must be changed so that the items are reordered. data = { "form-TOTAL_FORMS": "4", "form-INITIAL_FORMS": "4", "form-MAX_NUM_FORMS": "0", "form-0-order": "14", "form-0-id": "1", "form-0-collector": "1", "form-1-order": "13", "form-1-id": "2", "form-1-collector": "1", "form-2-order": "1", "form-2-id": "3", "form-2-collector": "1", "form-3-order": "0", "form-3-id": "4", "form-3-collector": "1", # The form processing understands this as a list_editable "Save" # and not an action "Go". "_save": "Save", } response = self.client.post(reverse('admin:admin_views_category_changelist'), data) # Successful post will redirect self.assertEqual(response.status_code, 302) # The order values have been applied to the right objects self.assertEqual(Category.objects.get(id=1).order, 14) self.assertEqual(Category.objects.get(id=2).order, 13) self.assertEqual(Category.objects.get(id=3).order, 1) self.assertEqual(Category.objects.get(id=4).order, 0) def test_list_editable_pagination(self): """ Pagination works for list_editable items. """ UnorderedObject.objects.create(id=1, name='Unordered object #1') UnorderedObject.objects.create(id=2, name='Unordered object #2') UnorderedObject.objects.create(id=3, name='Unordered object #3') response = self.client.get(reverse('admin:admin_views_unorderedobject_changelist')) self.assertContains(response, 'Unordered object #3') self.assertContains(response, 'Unordered object #2') self.assertNotContains(response, 'Unordered object #1') response = self.client.get(reverse('admin:admin_views_unorderedobject_changelist') + '?p=2') self.assertNotContains(response, 'Unordered object #3') self.assertNotContains(response, 'Unordered object #2') self.assertContains(response, 'Unordered object #1') def test_list_editable_action_submit(self): # List editable changes should not be executed if the action "Go" button is # used to submit the form. data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-gender": "1", "form-0-id": "1", "form-1-gender": "2", "form-1-id": "2", "form-2-alive": "checked", "form-2-gender": "1", "form-2-id": "3", "index": "0", "_selected_action": ['3'], "action": ['', 'delete_selected'], } self.client.post(reverse('admin:admin_views_person_changelist'), data) self.assertIs(Person.objects.get(name="John Mauchly").alive, True) self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 1) def test_list_editable_action_choices(self): # List editable changes should be executed if the "Save" button is # used to submit the form - any action choices should be ignored. data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-gender": "1", "form-0-id": str(self.per1.pk), "form-1-gender": "2", "form-1-id": str(self.per2.pk), "form-2-alive": "checked", "form-2-gender": "1", "form-2-id": str(self.per3.pk), "_save": "Save", "_selected_action": ['1'], "action": ['', 'delete_selected'], } self.client.post(reverse('admin:admin_views_person_changelist'), data) self.assertIs(Person.objects.get(name="John Mauchly").alive, False) self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2) def test_list_editable_popup(self): """ Fields should not be list-editable in popups. """ response = self.client.get(reverse('admin:admin_views_person_changelist')) self.assertNotEqual(response.context['cl'].list_editable, ()) response = self.client.get(reverse('admin:admin_views_person_changelist') + '?%s' % IS_POPUP_VAR) self.assertEqual(response.context['cl'].list_editable, ()) def test_pk_hidden_fields(self): """ hidden pk fields aren't displayed in the table body and their corresponding human-readable value is displayed instead. The hidden pk fields are displayed but separately (not in the table) and only once. """ story1 = Story.objects.create(title='The adventures of Guido', content='Once upon a time in Djangoland...') story2 = Story.objects.create( title='Crouching Tiger, Hidden Python', content='The Python was sneaking into...', ) response = self.client.get(reverse('admin:admin_views_story_changelist')) # Only one hidden field, in a separate place than the table. self.assertContains(response, 'id="id_form-0-id"', 1) self.assertContains(response, 'id="id_form-1-id"', 1) self.assertContains( response, '<div class="hiddenfields">\n' '<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">' '<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id">\n</div>' % (story2.id, story1.id), html=True ) self.assertContains(response, '<td class="field-id">%d</td>' % story1.id, 1) self.assertContains(response, '<td class="field-id">%d</td>' % story2.id, 1) def test_pk_hidden_fields_with_list_display_links(self): """ Similarly as test_pk_hidden_fields, but when the hidden pk fields are referenced in list_display_links. Refs #12475. """ story1 = OtherStory.objects.create( title='The adventures of Guido', content='Once upon a time in Djangoland...', ) story2 = OtherStory.objects.create( title='Crouching Tiger, Hidden Python', content='The Python was sneaking into...', ) link1 = reverse('admin:admin_views_otherstory_change', args=(story1.pk,)) link2 = reverse('admin:admin_views_otherstory_change', args=(story2.pk,)) response = self.client.get(reverse('admin:admin_views_otherstory_changelist')) # Only one hidden field, in a separate place than the table. self.assertContains(response, 'id="id_form-0-id"', 1) self.assertContains(response, 'id="id_form-1-id"', 1) self.assertContains( response, '<div class="hiddenfields">\n' '<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">' '<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id">\n</div>' % (story2.id, story1.id), html=True ) self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link1, story1.id), 1) self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link2, story2.id), 1) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminSearchTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True) cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False) cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True) Person.objects.create(name='John Doe', gender=1) Person.objects.create(name='John O"Hara', gender=1) Person.objects.create(name="John O'Hara", gender=1) cls.t1 = Recommender.objects.create() cls.t2 = Recommendation.objects.create(the_recommender=cls.t1) cls.t3 = Recommender.objects.create() cls.t4 = Recommendation.objects.create(the_recommender=cls.t3) cls.tt1 = TitleTranslation.objects.create(title=cls.t1, text='Bar') cls.tt2 = TitleTranslation.objects.create(title=cls.t2, text='Foo') cls.tt3 = TitleTranslation.objects.create(title=cls.t3, text='Few') cls.tt4 = TitleTranslation.objects.create(title=cls.t4, text='Bas') def setUp(self): self.client.force_login(self.superuser) def test_search_on_sibling_models(self): "A search that mentions sibling models" response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar') # confirm the search returned 1 object self.assertContains(response, "\n1 recommendation\n") def test_with_fk_to_field(self): """ The to_field GET parameter is preserved when a search is performed. Refs #10918. """ response = self.client.get(reverse('admin:auth_user_changelist') + '?q=joe&%s=id' % TO_FIELD_VAR) self.assertContains(response, "\n1 user\n") self.assertContains(response, '<input type="hidden" name="%s" value="id">' % TO_FIELD_VAR, html=True) def test_exact_matches(self): response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar') # confirm the search returned one object self.assertContains(response, "\n1 recommendation\n") response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=ba') # confirm the search returned zero objects self.assertContains(response, "\n0 recommendations\n") def test_beginning_matches(self): response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=Gui') # confirm the search returned one object self.assertContains(response, "\n1 person\n") self.assertContains(response, "Guido") response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=uido') # confirm the search returned zero objects self.assertContains(response, "\n0 persons\n") self.assertNotContains(response, "Guido") def test_pluggable_search(self): PluggableSearchPerson.objects.create(name="Bob", age=10) PluggableSearchPerson.objects.create(name="Amy", age=20) response = self.client.get(reverse('admin:admin_views_pluggablesearchperson_changelist') + '?q=Bob') # confirm the search returned one object self.assertContains(response, "\n1 pluggable search person\n") self.assertContains(response, "Bob") response = self.client.get(reverse('admin:admin_views_pluggablesearchperson_changelist') + '?q=20') # confirm the search returned one object self.assertContains(response, "\n1 pluggable search person\n") self.assertContains(response, "Amy") def test_reset_link(self): """ Test presence of reset link in search bar ("1 result (_x total_)"). """ # 1 query for session + 1 for fetching user # + 1 for filtered result + 1 for filtered count # + 1 for total count with self.assertNumQueries(5): response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=Gui') self.assertContains( response, """<span class="small quiet">1 result (<a href="?">6 total</a>)</span>""", html=True ) def test_no_total_count(self): """ #8408 -- "Show all" should be displayed instead of the total count if ModelAdmin.show_full_result_count is False. """ # 1 query for session + 1 for fetching user # + 1 for filtered result + 1 for filtered count with self.assertNumQueries(4): response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar') self.assertContains( response, """<span class="small quiet">1 result (<a href="?">Show all</a>)</span>""", html=True ) self.assertTrue(response.context['cl'].show_admin_actions) def test_search_with_spaces(self): url = reverse('admin:admin_views_person_changelist') + '?q=%s' tests = [ ('"John Doe"', 1), ("'John Doe'", 1), ('John Doe', 0), ('"John Doe" John', 1), ("'John Doe' John", 1), ("John Doe John", 0), ('"John Do"', 1), ("'John Do'", 1), ("'John O\'Hara'", 0), ("'John O\\'Hara'", 1), ('"John O\"Hara"', 0), ('"John O\\"Hara"', 1), ] for search, hits in tests: with self.subTest(search=search): response = self.client.get(url % search) self.assertContains(response, '\n%s person' % hits) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminInheritedInlinesTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_inline(self): """ Inline models which inherit from a common parent are correctly handled. """ foo_user = "foo username" bar_user = "bar username" name_re = re.compile(b'name="(.*?)"') # test the add case response = self.client.get(reverse('admin:admin_views_persona_add')) names = name_re.findall(response.content) # make sure we have no duplicate HTML names self.assertEqual(len(names), len(set(names))) # test the add case post_data = { "name": "Test Name", # inline data "accounts-TOTAL_FORMS": "1", "accounts-INITIAL_FORMS": "0", "accounts-MAX_NUM_FORMS": "0", "accounts-0-username": foo_user, "accounts-2-TOTAL_FORMS": "1", "accounts-2-INITIAL_FORMS": "0", "accounts-2-MAX_NUM_FORMS": "0", "accounts-2-0-username": bar_user, } response = self.client.post(reverse('admin:admin_views_persona_add'), post_data) self.assertEqual(response.status_code, 302) # redirect somewhere self.assertEqual(Persona.objects.count(), 1) self.assertEqual(FooAccount.objects.count(), 1) self.assertEqual(BarAccount.objects.count(), 1) self.assertEqual(FooAccount.objects.all()[0].username, foo_user) self.assertEqual(BarAccount.objects.all()[0].username, bar_user) self.assertEqual(Persona.objects.all()[0].accounts.count(), 2) persona_id = Persona.objects.all()[0].id foo_id = FooAccount.objects.all()[0].id bar_id = BarAccount.objects.all()[0].id # test the edit case response = self.client.get(reverse('admin:admin_views_persona_change', args=(persona_id,))) names = name_re.findall(response.content) # make sure we have no duplicate HTML names self.assertEqual(len(names), len(set(names))) post_data = { "name": "Test Name", "accounts-TOTAL_FORMS": "2", "accounts-INITIAL_FORMS": "1", "accounts-MAX_NUM_FORMS": "0", "accounts-0-username": "%s-1" % foo_user, "accounts-0-account_ptr": str(foo_id), "accounts-0-persona": str(persona_id), "accounts-2-TOTAL_FORMS": "2", "accounts-2-INITIAL_FORMS": "1", "accounts-2-MAX_NUM_FORMS": "0", "accounts-2-0-username": "%s-1" % bar_user, "accounts-2-0-account_ptr": str(bar_id), "accounts-2-0-persona": str(persona_id), } response = self.client.post(reverse('admin:admin_views_persona_change', args=(persona_id,)), post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Persona.objects.count(), 1) self.assertEqual(FooAccount.objects.count(), 1) self.assertEqual(BarAccount.objects.count(), 1) self.assertEqual(FooAccount.objects.all()[0].username, "%s-1" % foo_user) self.assertEqual(BarAccount.objects.all()[0].username, "%s-1" % bar_user) self.assertEqual(Persona.objects.all()[0].accounts.count(), 2) @override_settings(ROOT_URLCONF='admin_views.urls') class TestCustomChangeList(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_custom_changelist(self): """ Validate that a custom ChangeList class can be used (#9749) """ # Insert some data post_data = {"name": "First Gadget"} response = self.client.post(reverse('admin:admin_views_gadget_add'), post_data) self.assertEqual(response.status_code, 302) # redirect somewhere # Hit the page once to get messages out of the queue message list response = self.client.get(reverse('admin:admin_views_gadget_changelist')) # Data is still not visible on the page response = self.client.get(reverse('admin:admin_views_gadget_changelist')) self.assertNotContains(response, 'First Gadget') @override_settings(ROOT_URLCONF='admin_views.urls') class TestInlineNotEditable(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_GET_parent_add(self): """ InlineModelAdmin broken? """ response = self.client.get(reverse('admin:admin_views_parent_add')) self.assertEqual(response.status_code, 200) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminCustomQuerysetTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.pks = [EmptyModel.objects.create().id for i in range(3)] def setUp(self): self.client.force_login(self.superuser) self.super_login = { REDIRECT_FIELD_NAME: reverse('admin:index'), 'username': 'super', 'password': 'secret', } def test_changelist_view(self): response = self.client.get(reverse('admin:admin_views_emptymodel_changelist')) for i in self.pks: if i > 1: self.assertContains(response, 'Primary key = %s' % i) else: self.assertNotContains(response, 'Primary key = %s' % i) def test_changelist_view_count_queries(self): # create 2 Person objects Person.objects.create(name='person1', gender=1) Person.objects.create(name='person2', gender=2) changelist_url = reverse('admin:admin_views_person_changelist') # 5 queries are expected: 1 for the session, 1 for the user, # 2 for the counts and 1 for the objects on the page with self.assertNumQueries(5): resp = self.client.get(changelist_url) self.assertEqual(resp.context['selection_note'], '0 of 2 selected') self.assertEqual(resp.context['selection_note_all'], 'All 2 selected') with self.assertNumQueries(5): extra = {'q': 'not_in_name'} resp = self.client.get(changelist_url, extra) self.assertEqual(resp.context['selection_note'], '0 of 0 selected') self.assertEqual(resp.context['selection_note_all'], 'All 0 selected') with self.assertNumQueries(5): extra = {'q': 'person'} resp = self.client.get(changelist_url, extra) self.assertEqual(resp.context['selection_note'], '0 of 2 selected') self.assertEqual(resp.context['selection_note_all'], 'All 2 selected') with self.assertNumQueries(5): extra = {'gender__exact': '1'} resp = self.client.get(changelist_url, extra) self.assertEqual(resp.context['selection_note'], '0 of 1 selected') self.assertEqual(resp.context['selection_note_all'], '1 selected') def test_change_view(self): for i in self.pks: url = reverse('admin:admin_views_emptymodel_change', args=(i,)) response = self.client.get(url, follow=True) if i > 1: self.assertEqual(response.status_code, 200) else: self.assertRedirects(response, reverse('admin:index')) self.assertEqual( [m.message for m in response.context['messages']], ['empty model with ID “1” doesn’t exist. Perhaps it was deleted?'] ) def test_add_model_modeladmin_defer_qs(self): # Test for #14529. defer() is used in ModelAdmin.get_queryset() # model has __str__ method self.assertEqual(CoverLetter.objects.count(), 0) # Emulate model instance creation via the admin post_data = { "author": "Candidate, Best", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_coverletter_add'), post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(CoverLetter.objects.count(), 1) # Message should contain non-ugly model verbose name pk = CoverLetter.objects.all()[0].pk self.assertContains( response, '<li class="success">The cover letter “<a href="%s">' 'Candidate, Best</a>” was added successfully.</li>' % reverse('admin:admin_views_coverletter_change', args=(pk,)), html=True ) # model has no __str__ method self.assertEqual(ShortMessage.objects.count(), 0) # Emulate model instance creation via the admin post_data = { "content": "What's this SMS thing?", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_shortmessage_add'), post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(ShortMessage.objects.count(), 1) # Message should contain non-ugly model verbose name sm = ShortMessage.objects.all()[0] self.assertContains( response, '<li class="success">The short message “<a href="%s">' '%s</a>” was added successfully.</li>' % (reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)), sm), html=True ) def test_add_model_modeladmin_only_qs(self): # Test for #14529. only() is used in ModelAdmin.get_queryset() # model has __str__ method self.assertEqual(Telegram.objects.count(), 0) # Emulate model instance creation via the admin post_data = { "title": "Urgent telegram", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_telegram_add'), post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(Telegram.objects.count(), 1) # Message should contain non-ugly model verbose name pk = Telegram.objects.all()[0].pk self.assertContains( response, '<li class="success">The telegram “<a href="%s">' 'Urgent telegram</a>” was added successfully.</li>' % reverse('admin:admin_views_telegram_change', args=(pk,)), html=True ) # model has no __str__ method self.assertEqual(Paper.objects.count(), 0) # Emulate model instance creation via the admin post_data = { "title": "My Modified Paper Title", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_paper_add'), post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(Paper.objects.count(), 1) # Message should contain non-ugly model verbose name p = Paper.objects.all()[0] self.assertContains( response, '<li class="success">The paper “<a href="%s">' '%s</a>” was added successfully.</li>' % (reverse('admin:admin_views_paper_change', args=(p.pk,)), p), html=True ) def test_edit_model_modeladmin_defer_qs(self): # Test for #14529. defer() is used in ModelAdmin.get_queryset() # model has __str__ method cl = CoverLetter.objects.create(author="John Doe") self.assertEqual(CoverLetter.objects.count(), 1) response = self.client.get(reverse('admin:admin_views_coverletter_change', args=(cl.pk,))) self.assertEqual(response.status_code, 200) # Emulate model instance edit via the admin post_data = { "author": "John Doe II", "_save": "Save", } url = reverse('admin:admin_views_coverletter_change', args=(cl.pk,)) response = self.client.post(url, post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(CoverLetter.objects.count(), 1) # Message should contain non-ugly model verbose name. Instance # representation is set by model's __str__() self.assertContains( response, '<li class="success">The cover letter “<a href="%s">' 'John Doe II</a>” was changed successfully.</li>' % reverse('admin:admin_views_coverletter_change', args=(cl.pk,)), html=True ) # model has no __str__ method sm = ShortMessage.objects.create(content="This is expensive") self.assertEqual(ShortMessage.objects.count(), 1) response = self.client.get(reverse('admin:admin_views_shortmessage_change', args=(sm.pk,))) self.assertEqual(response.status_code, 200) # Emulate model instance edit via the admin post_data = { "content": "Too expensive", "_save": "Save", } url = reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)) response = self.client.post(url, post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(ShortMessage.objects.count(), 1) # Message should contain non-ugly model verbose name. The ugly(!) # instance representation is set by __str__(). self.assertContains( response, '<li class="success">The short message “<a href="%s">' '%s</a>” was changed successfully.</li>' % (reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)), sm), html=True ) def test_edit_model_modeladmin_only_qs(self): # Test for #14529. only() is used in ModelAdmin.get_queryset() # model has __str__ method t = Telegram.objects.create(title="First Telegram") self.assertEqual(Telegram.objects.count(), 1) response = self.client.get(reverse('admin:admin_views_telegram_change', args=(t.pk,))) self.assertEqual(response.status_code, 200) # Emulate model instance edit via the admin post_data = { "title": "Telegram without typo", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_telegram_change', args=(t.pk,)), post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(Telegram.objects.count(), 1) # Message should contain non-ugly model verbose name. The instance # representation is set by model's __str__() self.assertContains( response, '<li class="success">The telegram “<a href="%s">' 'Telegram without typo</a>” was changed successfully.</li>' % reverse('admin:admin_views_telegram_change', args=(t.pk,)), html=True ) # model has no __str__ method p = Paper.objects.create(title="My Paper Title") self.assertEqual(Paper.objects.count(), 1) response = self.client.get(reverse('admin:admin_views_paper_change', args=(p.pk,))) self.assertEqual(response.status_code, 200) # Emulate model instance edit via the admin post_data = { "title": "My Modified Paper Title", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_paper_change', args=(p.pk,)), post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(Paper.objects.count(), 1) # Message should contain non-ugly model verbose name. The ugly(!) # instance representation is set by __str__(). self.assertContains( response, '<li class="success">The paper “<a href="%s">' '%s</a>” was changed successfully.</li>' % (reverse('admin:admin_views_paper_change', args=(p.pk,)), p), html=True ) def test_history_view_custom_qs(self): """ Custom querysets are considered for the admin history view. """ self.client.post(reverse('admin:login'), self.super_login) FilteredManager.objects.create(pk=1) FilteredManager.objects.create(pk=2) response = self.client.get(reverse('admin:admin_views_filteredmanager_changelist')) self.assertContains(response, "PK=1") self.assertContains(response, "PK=2") self.assertEqual( self.client.get(reverse('admin:admin_views_filteredmanager_history', args=(1,))).status_code, 200 ) self.assertEqual( self.client.get(reverse('admin:admin_views_filteredmanager_history', args=(2,))).status_code, 200 ) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminInlineFileUploadTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') file1 = tempfile.NamedTemporaryFile(suffix=".file1") file1.write(b'a' * (2 ** 21)) filename = file1.name file1.close() cls.gallery = Gallery.objects.create(name='Test Gallery') cls.picture = Picture.objects.create( name='Test Picture', image=filename, gallery=cls.gallery, ) def setUp(self): self.client.force_login(self.superuser) def test_form_has_multipart_enctype(self): response = self.client.get( reverse('admin:admin_views_gallery_change', args=(self.gallery.id,)) ) self.assertIs(response.context['has_file_field'], True) self.assertContains(response, MULTIPART_ENCTYPE) def test_inline_file_upload_edit_validation_error_post(self): """ Inline file uploads correctly display prior data (#10002). """ post_data = { "name": "Test Gallery", "pictures-TOTAL_FORMS": "2", "pictures-INITIAL_FORMS": "1", "pictures-MAX_NUM_FORMS": "0", "pictures-0-id": str(self.picture.id), "pictures-0-gallery": str(self.gallery.id), "pictures-0-name": "Test Picture", "pictures-0-image": "", "pictures-1-id": "", "pictures-1-gallery": str(self.gallery.id), "pictures-1-name": "Test Picture 2", "pictures-1-image": "", } response = self.client.post( reverse('admin:admin_views_gallery_change', args=(self.gallery.id,)), post_data ) self.assertContains(response, b"Currently") @override_settings(ROOT_URLCONF='admin_views.urls') class AdminInlineTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.collector = Collector.objects.create(pk=1, name='John Fowles') def setUp(self): self.post_data = { "name": "Test Name", "widget_set-TOTAL_FORMS": "3", "widget_set-INITIAL_FORMS": "0", "widget_set-MAX_NUM_FORMS": "0", "widget_set-0-id": "", "widget_set-0-owner": "1", "widget_set-0-name": "", "widget_set-1-id": "", "widget_set-1-owner": "1", "widget_set-1-name": "", "widget_set-2-id": "", "widget_set-2-owner": "1", "widget_set-2-name": "", "doohickey_set-TOTAL_FORMS": "3", "doohickey_set-INITIAL_FORMS": "0", "doohickey_set-MAX_NUM_FORMS": "0", "doohickey_set-0-owner": "1", "doohickey_set-0-code": "", "doohickey_set-0-name": "", "doohickey_set-1-owner": "1", "doohickey_set-1-code": "", "doohickey_set-1-name": "", "doohickey_set-2-owner": "1", "doohickey_set-2-code": "", "doohickey_set-2-name": "", "grommet_set-TOTAL_FORMS": "3", "grommet_set-INITIAL_FORMS": "0", "grommet_set-MAX_NUM_FORMS": "0", "grommet_set-0-code": "", "grommet_set-0-owner": "1", "grommet_set-0-name": "", "grommet_set-1-code": "", "grommet_set-1-owner": "1", "grommet_set-1-name": "", "grommet_set-2-code": "", "grommet_set-2-owner": "1", "grommet_set-2-name": "", "whatsit_set-TOTAL_FORMS": "3", "whatsit_set-INITIAL_FORMS": "0", "whatsit_set-MAX_NUM_FORMS": "0", "whatsit_set-0-owner": "1", "whatsit_set-0-index": "", "whatsit_set-0-name": "", "whatsit_set-1-owner": "1", "whatsit_set-1-index": "", "whatsit_set-1-name": "", "whatsit_set-2-owner": "1", "whatsit_set-2-index": "", "whatsit_set-2-name": "", "fancydoodad_set-TOTAL_FORMS": "3", "fancydoodad_set-INITIAL_FORMS": "0", "fancydoodad_set-MAX_NUM_FORMS": "0", "fancydoodad_set-0-doodad_ptr": "", "fancydoodad_set-0-owner": "1", "fancydoodad_set-0-name": "", "fancydoodad_set-0-expensive": "on", "fancydoodad_set-1-doodad_ptr": "", "fancydoodad_set-1-owner": "1", "fancydoodad_set-1-name": "", "fancydoodad_set-1-expensive": "on", "fancydoodad_set-2-doodad_ptr": "", "fancydoodad_set-2-owner": "1", "fancydoodad_set-2-name": "", "fancydoodad_set-2-expensive": "on", "category_set-TOTAL_FORMS": "3", "category_set-INITIAL_FORMS": "0", "category_set-MAX_NUM_FORMS": "0", "category_set-0-order": "", "category_set-0-id": "", "category_set-0-collector": "1", "category_set-1-order": "", "category_set-1-id": "", "category_set-1-collector": "1", "category_set-2-order": "", "category_set-2-id": "", "category_set-2-collector": "1", } self.client.force_login(self.superuser) def test_simple_inline(self): "A simple model can be saved as inlines" # First add a new inline self.post_data['widget_set-0-name'] = "Widget 1" collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,)) response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Widget.objects.count(), 1) self.assertEqual(Widget.objects.all()[0].name, "Widget 1") widget_id = Widget.objects.all()[0].id # The PK link exists on the rendered form response = self.client.get(collector_url) self.assertContains(response, 'name="widget_set-0-id"') # No file or image fields, no enctype on the forms self.assertIs(response.context['has_file_field'], False) self.assertNotContains(response, MULTIPART_ENCTYPE) # Now resave that inline self.post_data['widget_set-INITIAL_FORMS'] = "1" self.post_data['widget_set-0-id'] = str(widget_id) self.post_data['widget_set-0-name'] = "Widget 1" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Widget.objects.count(), 1) self.assertEqual(Widget.objects.all()[0].name, "Widget 1") # Now modify that inline self.post_data['widget_set-INITIAL_FORMS'] = "1" self.post_data['widget_set-0-id'] = str(widget_id) self.post_data['widget_set-0-name'] = "Widget 1 Updated" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Widget.objects.count(), 1) self.assertEqual(Widget.objects.all()[0].name, "Widget 1 Updated") def test_explicit_autofield_inline(self): "A model with an explicit autofield primary key can be saved as inlines. Regression for #8093" # First add a new inline self.post_data['grommet_set-0-name'] = "Grommet 1" collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,)) response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Grommet.objects.count(), 1) self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1") # The PK link exists on the rendered form response = self.client.get(collector_url) self.assertContains(response, 'name="grommet_set-0-code"') # Now resave that inline self.post_data['grommet_set-INITIAL_FORMS'] = "1" self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code) self.post_data['grommet_set-0-name'] = "Grommet 1" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Grommet.objects.count(), 1) self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1") # Now modify that inline self.post_data['grommet_set-INITIAL_FORMS'] = "1" self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code) self.post_data['grommet_set-0-name'] = "Grommet 1 Updated" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Grommet.objects.count(), 1) self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1 Updated") def test_char_pk_inline(self): "A model with a character PK can be saved as inlines. Regression for #10992" # First add a new inline self.post_data['doohickey_set-0-code'] = "DH1" self.post_data['doohickey_set-0-name'] = "Doohickey 1" collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,)) response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(DooHickey.objects.count(), 1) self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1") # The PK link exists on the rendered form response = self.client.get(collector_url) self.assertContains(response, 'name="doohickey_set-0-code"') # Now resave that inline self.post_data['doohickey_set-INITIAL_FORMS'] = "1" self.post_data['doohickey_set-0-code'] = "DH1" self.post_data['doohickey_set-0-name'] = "Doohickey 1" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(DooHickey.objects.count(), 1) self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1") # Now modify that inline self.post_data['doohickey_set-INITIAL_FORMS'] = "1" self.post_data['doohickey_set-0-code'] = "DH1" self.post_data['doohickey_set-0-name'] = "Doohickey 1 Updated" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(DooHickey.objects.count(), 1) self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1 Updated") def test_integer_pk_inline(self): "A model with an integer PK can be saved as inlines. Regression for #10992" # First add a new inline self.post_data['whatsit_set-0-index'] = "42" self.post_data['whatsit_set-0-name'] = "Whatsit 1" collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,)) response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Whatsit.objects.count(), 1) self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1") # The PK link exists on the rendered form response = self.client.get(collector_url) self.assertContains(response, 'name="whatsit_set-0-index"') # Now resave that inline self.post_data['whatsit_set-INITIAL_FORMS'] = "1" self.post_data['whatsit_set-0-index'] = "42" self.post_data['whatsit_set-0-name'] = "Whatsit 1" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Whatsit.objects.count(), 1) self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1") # Now modify that inline self.post_data['whatsit_set-INITIAL_FORMS'] = "1" self.post_data['whatsit_set-0-index'] = "42" self.post_data['whatsit_set-0-name'] = "Whatsit 1 Updated" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Whatsit.objects.count(), 1) self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1 Updated") def test_inherited_inline(self): "An inherited model can be saved as inlines. Regression for #11042" # First add a new inline self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1" collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,)) response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(FancyDoodad.objects.count(), 1) self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1") doodad_pk = FancyDoodad.objects.all()[0].pk # The PK link exists on the rendered form response = self.client.get(collector_url) self.assertContains(response, 'name="fancydoodad_set-0-doodad_ptr"') # Now resave that inline self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1" self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk) self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(FancyDoodad.objects.count(), 1) self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1") # Now modify that inline self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1" self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk) self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1 Updated" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(FancyDoodad.objects.count(), 1) self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1 Updated") def test_ordered_inline(self): """ An inline with an editable ordering fields is updated correctly. """ # Create some objects with an initial ordering Category.objects.create(id=1, order=1, collector=self.collector) Category.objects.create(id=2, order=2, collector=self.collector) Category.objects.create(id=3, order=0, collector=self.collector) Category.objects.create(id=4, order=0, collector=self.collector) # NB: The order values must be changed so that the items are reordered. self.post_data.update({ "name": "Frederick Clegg", "category_set-TOTAL_FORMS": "7", "category_set-INITIAL_FORMS": "4", "category_set-MAX_NUM_FORMS": "0", "category_set-0-order": "14", "category_set-0-id": "1", "category_set-0-collector": "1", "category_set-1-order": "13", "category_set-1-id": "2", "category_set-1-collector": "1", "category_set-2-order": "1", "category_set-2-id": "3", "category_set-2-collector": "1", "category_set-3-order": "0", "category_set-3-id": "4", "category_set-3-collector": "1", "category_set-4-order": "", "category_set-4-id": "", "category_set-4-collector": "1", "category_set-5-order": "", "category_set-5-id": "", "category_set-5-collector": "1", "category_set-6-order": "", "category_set-6-id": "", "category_set-6-collector": "1", }) collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,)) response = self.client.post(collector_url, self.post_data) # Successful post will redirect self.assertEqual(response.status_code, 302) # The order values have been applied to the right objects self.assertEqual(self.collector.category_set.count(), 4) self.assertEqual(Category.objects.get(id=1).order, 14) self.assertEqual(Category.objects.get(id=2).order, 13) self.assertEqual(Category.objects.get(id=3).order, 1) self.assertEqual(Category.objects.get(id=4).order, 0) @override_settings(ROOT_URLCONF='admin_views.urls') class NeverCacheTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = Section.objects.create(name='Test section') def setUp(self): self.client.force_login(self.superuser) def test_admin_index(self): "Check the never-cache status of the main index" response = self.client.get(reverse('admin:index')) self.assertEqual(get_max_age(response), 0) def test_app_index(self): "Check the never-cache status of an application index" response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertEqual(get_max_age(response), 0) def test_model_index(self): "Check the never-cache status of a model index" response = self.client.get(reverse('admin:admin_views_fabric_changelist')) self.assertEqual(get_max_age(response), 0) def test_model_add(self): "Check the never-cache status of a model add page" response = self.client.get(reverse('admin:admin_views_fabric_add')) self.assertEqual(get_max_age(response), 0) def test_model_view(self): "Check the never-cache status of a model edit page" response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,))) self.assertEqual(get_max_age(response), 0) def test_model_history(self): "Check the never-cache status of a model history page" response = self.client.get(reverse('admin:admin_views_section_history', args=(self.s1.pk,))) self.assertEqual(get_max_age(response), 0) def test_model_delete(self): "Check the never-cache status of a model delete page" response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,))) self.assertEqual(get_max_age(response), 0) def test_login(self): "Check the never-cache status of login views" self.client.logout() response = self.client.get(reverse('admin:index')) self.assertEqual(get_max_age(response), 0) def test_logout(self): "Check the never-cache status of logout view" response = self.client.get(reverse('admin:logout')) self.assertEqual(get_max_age(response), 0) def test_password_change(self): "Check the never-cache status of the password change view" self.client.logout() response = self.client.get(reverse('admin:password_change')) self.assertIsNone(get_max_age(response)) def test_password_change_done(self): "Check the never-cache status of the password change done view" response = self.client.get(reverse('admin:password_change_done')) self.assertIsNone(get_max_age(response)) def test_JS_i18n(self): "Check the never-cache status of the JavaScript i18n view" response = self.client.get(reverse('admin:jsi18n')) self.assertIsNone(get_max_age(response)) @override_settings(ROOT_URLCONF='admin_views.urls') class PrePopulatedTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') def setUp(self): self.client.force_login(self.superuser) def test_prepopulated_on(self): response = self.client.get(reverse('admin:admin_views_prepopulatedpost_add')) self.assertContains(response, "&quot;id&quot;: &quot;#id_slug&quot;") self.assertContains(response, "&quot;dependency_ids&quot;: [&quot;#id_title&quot;]") self.assertContains(response, "&quot;id&quot;: &quot;#id_prepopulatedsubpost_set-0-subslug&quot;") def test_prepopulated_off(self): response = self.client.get(reverse('admin:admin_views_prepopulatedpost_change', args=(self.p1.pk,))) self.assertContains(response, "A Long Title") self.assertNotContains(response, "&quot;id&quot;: &quot;#id_slug&quot;") self.assertNotContains(response, "&quot;dependency_ids&quot;: [&quot;#id_title&quot;]") self.assertNotContains( response, "&quot;id&quot;: &quot;#id_prepopulatedsubpost_set-0-subslug&quot;" ) @override_settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=True) def test_prepopulated_maxlength_localized(self): """ Regression test for #15938: if USE_THOUSAND_SEPARATOR is set, make sure that maxLength (in the JavaScript) is rendered without separators. """ response = self.client.get(reverse('admin:admin_views_prepopulatedpostlargeslug_add')) self.assertContains(response, "&quot;maxLength&quot;: 1000") # instead of 1,000 def test_view_only_add_form(self): """ PrePopulatedPostReadOnlyAdmin.prepopulated_fields includes 'slug' which is present in the add view, even if the ModelAdmin.has_change_permission() returns False. """ response = self.client.get(reverse('admin7:admin_views_prepopulatedpost_add')) self.assertContains(response, 'data-prepopulated-fields=') self.assertContains(response, '&quot;id&quot;: &quot;#id_slug&quot;') def test_view_only_change_form(self): """ PrePopulatedPostReadOnlyAdmin.prepopulated_fields includes 'slug'. That doesn't break a view-only change view. """ response = self.client.get(reverse('admin7:admin_views_prepopulatedpost_change', args=(self.p1.pk,))) self.assertContains(response, 'data-prepopulated-fields="[]"') self.assertContains(response, '<div class="readonly">%s</div>' % self.p1.slug) @override_settings(ROOT_URLCONF='admin_views.urls') class SeleniumTests(AdminSeleniumTestCase): available_apps = ['admin_views'] + AdminSeleniumTestCase.available_apps def setUp(self): self.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') self.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') def test_login_button_centered(self): self.selenium.get(self.live_server_url + reverse('admin:login')) button = self.selenium.find_element_by_css_selector('.submit-row input') offset_left = button.get_property('offsetLeft') offset_right = ( button.get_property('offsetParent').get_property('offsetWidth') - (offset_left + button.get_property('offsetWidth')) ) # Use assertAlmostEqual to avoid pixel rounding errors. self.assertAlmostEqual(offset_left, offset_right, delta=3) def test_prepopulated_fields(self): """ The JavaScript-automated prepopulated fields work with the main form and with stacked and tabular inlines. Refs #13068, #9264, #9983, #9784. """ self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) self.selenium.get(self.live_server_url + reverse('admin:admin_views_mainprepopulated_add')) self.wait_for('.select2') # Main form ---------------------------------------------------------- self.selenium.find_element_by_id('id_pubdate').send_keys('2012-02-18') self.select_option('#id_status', 'option two') self.selenium.find_element_by_id('id_name').send_keys(' the mAin nÀMë and it\'s awεšomeıııİ') slug1 = self.selenium.find_element_by_id('id_slug1').get_attribute('value') slug2 = self.selenium.find_element_by_id('id_slug2').get_attribute('value') slug3 = self.selenium.find_element_by_id('id_slug3').get_attribute('value') self.assertEqual(slug1, 'the-main-name-and-its-awesomeiiii-2012-02-18') self.assertEqual(slug2, 'option-two-the-main-name-and-its-awesomeiiii') self.assertEqual(slug3, 'the-main-n\xe0m\xeb-and-its-aw\u03b5\u0161ome\u0131\u0131\u0131i') # Stacked inlines ---------------------------------------------------- # Initial inline self.selenium.find_element_by_id('id_relatedprepopulated_set-0-pubdate').send_keys('2011-12-17') self.select_option('#id_relatedprepopulated_set-0-status', 'option one') self.selenium.find_element_by_id('id_relatedprepopulated_set-0-name').send_keys( ' here is a sŤāÇkeð inline ! ' ) slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-0-slug1').get_attribute('value') slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-0-slug2').get_attribute('value') self.assertEqual(slug1, 'here-is-a-stacked-inline-2011-12-17') self.assertEqual(slug2, 'option-one-here-is-a-stacked-inline') initial_select2_inputs = self.selenium.find_elements_by_class_name('select2-selection') # Inline formsets have empty/invisible forms. # Only the 4 visible select2 inputs are initialized. num_initial_select2_inputs = len(initial_select2_inputs) self.assertEqual(num_initial_select2_inputs, 4) # Add an inline self.selenium.find_elements_by_link_text('Add another Related prepopulated')[0].click() self.assertEqual( len(self.selenium.find_elements_by_class_name('select2-selection')), num_initial_select2_inputs + 2 ) self.selenium.find_element_by_id('id_relatedprepopulated_set-1-pubdate').send_keys('1999-01-25') self.select_option('#id_relatedprepopulated_set-1-status', 'option two') self.selenium.find_element_by_id('id_relatedprepopulated_set-1-name').send_keys( ' now you haVe anöther sŤāÇkeð inline with a very ... ' 'loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooog text... ' ) slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-1-slug1').get_attribute('value') slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-1-slug2').get_attribute('value') # 50 characters maximum for slug1 field self.assertEqual(slug1, 'now-you-have-another-stacked-inline-with-a-very-lo') # 60 characters maximum for slug2 field self.assertEqual(slug2, 'option-two-now-you-have-another-stacked-inline-with-a-very-l') # Tabular inlines ---------------------------------------------------- # Initial inline self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-pubdate').send_keys('1234-12-07') self.select_option('#id_relatedprepopulated_set-2-0-status', 'option two') self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-name').send_keys( 'And now, with a tÃbűlaŘ inline !!!' ) slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-slug1').get_attribute('value') slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-slug2').get_attribute('value') self.assertEqual(slug1, 'and-now-with-a-tabular-inline-1234-12-07') self.assertEqual(slug2, 'option-two-and-now-with-a-tabular-inline') # Add an inline # Button may be outside the browser frame. element = self.selenium.find_elements_by_link_text('Add another Related prepopulated')[1] self.selenium.execute_script('window.scrollTo(0, %s);' % element.location['y']) element.click() self.assertEqual( len(self.selenium.find_elements_by_class_name('select2-selection')), num_initial_select2_inputs + 4 ) self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-pubdate').send_keys('1981-08-22') self.select_option('#id_relatedprepopulated_set-2-1-status', 'option one') self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-name').send_keys( r'tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters' ) slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-slug1').get_attribute('value') slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-slug2').get_attribute('value') self.assertEqual(slug1, 'tabular-inline-with-ignored-characters-1981-08-22') self.assertEqual(slug2, 'option-one-tabular-inline-with-ignored-characters') # Add an inline without an initial inline. # The button is outside of the browser frame. self.selenium.execute_script("window.scrollTo(0, document.body.scrollHeight);") self.selenium.find_elements_by_link_text('Add another Related prepopulated')[2].click() self.assertEqual( len(self.selenium.find_elements_by_class_name('select2-selection')), num_initial_select2_inputs + 6 ) # Save and check that everything is properly stored in the database with self.wait_page_loaded(): self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.assertEqual(MainPrepopulated.objects.all().count(), 1) MainPrepopulated.objects.get( name=' the mAin nÀMë and it\'s awεšomeıııİ', pubdate='2012-02-18', status='option two', slug1='the-main-name-and-its-awesomeiiii-2012-02-18', slug2='option-two-the-main-name-and-its-awesomeiiii', slug3='the-main-nàmë-and-its-awεšomeıııi', ) self.assertEqual(RelatedPrepopulated.objects.all().count(), 4) RelatedPrepopulated.objects.get( name=' here is a sŤāÇkeð inline ! ', pubdate='2011-12-17', status='option one', slug1='here-is-a-stacked-inline-2011-12-17', slug2='option-one-here-is-a-stacked-inline', ) RelatedPrepopulated.objects.get( # 75 characters in name field name=' now you haVe anöther sŤāÇkeð inline with a very ... loooooooooooooooooo', pubdate='1999-01-25', status='option two', slug1='now-you-have-another-stacked-inline-with-a-very-lo', slug2='option-two-now-you-have-another-stacked-inline-with-a-very-l', ) RelatedPrepopulated.objects.get( name='And now, with a tÃbűlaŘ inline !!!', pubdate='1234-12-07', status='option two', slug1='and-now-with-a-tabular-inline-1234-12-07', slug2='option-two-and-now-with-a-tabular-inline', ) RelatedPrepopulated.objects.get( name=r'tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters', pubdate='1981-08-22', status='option one', slug1='tabular-inline-with-ignored-characters-1981-08-22', slug2='option-one-tabular-inline-with-ignored-characters', ) def test_populate_existing_object(self): """ The prepopulation works for existing objects too, as long as the original field is empty (#19082). """ # Slugs are empty to start with. item = MainPrepopulated.objects.create( name=' this is the mAin nÀMë', pubdate='2012-02-18', status='option two', slug1='', slug2='', ) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) object_url = self.live_server_url + reverse('admin:admin_views_mainprepopulated_change', args=(item.id,)) self.selenium.get(object_url) self.selenium.find_element_by_id('id_name').send_keys(' the best') # The slugs got prepopulated since they were originally empty slug1 = self.selenium.find_element_by_id('id_slug1').get_attribute('value') slug2 = self.selenium.find_element_by_id('id_slug2').get_attribute('value') self.assertEqual(slug1, 'this-is-the-main-name-the-best-2012-02-18') self.assertEqual(slug2, 'option-two-this-is-the-main-name-the-best') # Save the object with self.wait_page_loaded(): self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.selenium.get(object_url) self.selenium.find_element_by_id('id_name').send_keys(' hello') # The slugs got prepopulated didn't change since they were originally not empty slug1 = self.selenium.find_element_by_id('id_slug1').get_attribute('value') slug2 = self.selenium.find_element_by_id('id_slug2').get_attribute('value') self.assertEqual(slug1, 'this-is-the-main-name-the-best-2012-02-18') self.assertEqual(slug2, 'option-two-this-is-the-main-name-the-best') def test_collapsible_fieldset(self): """ The 'collapse' class in fieldsets definition allows to show/hide the appropriate field section. """ self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) self.selenium.get(self.live_server_url + reverse('admin:admin_views_article_add')) self.assertFalse(self.selenium.find_element_by_id('id_title').is_displayed()) self.selenium.find_elements_by_link_text('Show')[0].click() self.assertTrue(self.selenium.find_element_by_id('id_title').is_displayed()) self.assertEqual(self.selenium.find_element_by_id('fieldsetcollapser0').text, "Hide") def test_first_field_focus(self): """JavaScript-assisted auto-focus on first usable form field.""" # First form field has a single widget self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) with self.wait_page_loaded(): self.selenium.get(self.live_server_url + reverse('admin:admin_views_picture_add')) self.assertEqual( self.selenium.switch_to.active_element, self.selenium.find_element_by_id('id_name') ) # First form field has a MultiWidget with self.wait_page_loaded(): self.selenium.get(self.live_server_url + reverse('admin:admin_views_reservation_add')) self.assertEqual( self.selenium.switch_to.active_element, self.selenium.find_element_by_id('id_start_date_0') ) def test_cancel_delete_confirmation(self): "Cancelling the deletion of an object takes the user back one page." pizza = Pizza.objects.create(name="Double Cheese") url = reverse('admin:admin_views_pizza_change', args=(pizza.id,)) full_url = self.live_server_url + url self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) self.selenium.get(full_url) self.selenium.find_element_by_class_name('deletelink').click() # Click 'cancel' on the delete page. self.selenium.find_element_by_class_name('cancel-link').click() # Wait until we're back on the change page. self.wait_for_text('#content h1', 'Change pizza') self.assertEqual(self.selenium.current_url, full_url) self.assertEqual(Pizza.objects.count(), 1) def test_cancel_delete_related_confirmation(self): """ Cancelling the deletion of an object with relations takes the user back one page. """ pizza = Pizza.objects.create(name="Double Cheese") topping1 = Topping.objects.create(name="Cheddar") topping2 = Topping.objects.create(name="Mozzarella") pizza.toppings.add(topping1, topping2) url = reverse('admin:admin_views_pizza_change', args=(pizza.id,)) full_url = self.live_server_url + url self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) self.selenium.get(full_url) self.selenium.find_element_by_class_name('deletelink').click() # Click 'cancel' on the delete page. self.selenium.find_element_by_class_name('cancel-link').click() # Wait until we're back on the change page. self.wait_for_text('#content h1', 'Change pizza') self.assertEqual(self.selenium.current_url, full_url) self.assertEqual(Pizza.objects.count(), 1) self.assertEqual(Topping.objects.count(), 2) def test_list_editable_popups(self): """ list_editable foreign keys have add/change popups. """ from selenium.webdriver.support.ui import Select s1 = Section.objects.create(name='Test section') Article.objects.create( title='foo', content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=s1, ) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) self.selenium.get(self.live_server_url + reverse('admin:admin_views_article_changelist')) # Change popup self.selenium.find_element_by_id('change_id_form-0-section').click() self.wait_for_and_switch_to_popup() self.wait_for_text('#content h1', 'Change section') name_input = self.selenium.find_element_by_id('id_name') name_input.clear() name_input.send_keys('<i>edited section</i>') self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.selenium.switch_to.window(self.selenium.window_handles[0]) # Hide sidebar. toggle_button = self.selenium.find_element_by_css_selector('#toggle-nav-sidebar') toggle_button.click() select = Select(self.selenium.find_element_by_id('id_form-0-section')) self.assertEqual(select.first_selected_option.text, '<i>edited section</i>') # Rendered select2 input. select2_display = self.selenium.find_element_by_class_name('select2-selection__rendered') # Clear button (×\n) is included in text. self.assertEqual(select2_display.text, '×\n<i>edited section</i>') # Add popup self.selenium.find_element_by_id('add_id_form-0-section').click() self.wait_for_and_switch_to_popup() self.wait_for_text('#content h1', 'Add section') self.selenium.find_element_by_id('id_name').send_keys('new section') self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.selenium.switch_to.window(self.selenium.window_handles[0]) select = Select(self.selenium.find_element_by_id('id_form-0-section')) self.assertEqual(select.first_selected_option.text, 'new section') select2_display = self.selenium.find_element_by_class_name('select2-selection__rendered') # Clear button (×\n) is included in text. self.assertEqual(select2_display.text, '×\nnew section') def test_inline_uuid_pk_edit_with_popup(self): from selenium.webdriver.support.ui import Select parent = ParentWithUUIDPK.objects.create(title='test') related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_change', args=(related_with_parent.id,)) self.selenium.get(self.live_server_url + change_url) self.selenium.find_element_by_id('change_id_parent').click() self.wait_for_and_switch_to_popup() self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.selenium.switch_to.window(self.selenium.window_handles[0]) select = Select(self.selenium.find_element_by_id('id_parent')) self.assertEqual(select.first_selected_option.text, str(parent.id)) self.assertEqual(select.first_selected_option.get_attribute('value'), str(parent.id)) def test_inline_uuid_pk_add_with_popup(self): from selenium.webdriver.support.ui import Select self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) self.selenium.get(self.live_server_url + reverse('admin:admin_views_relatedwithuuidpkmodel_add')) self.selenium.find_element_by_id('add_id_parent').click() self.wait_for_and_switch_to_popup() self.selenium.find_element_by_id('id_title').send_keys('test') self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.selenium.switch_to.window(self.selenium.window_handles[0]) select = Select(self.selenium.find_element_by_id('id_parent')) uuid_id = str(ParentWithUUIDPK.objects.first().id) self.assertEqual(select.first_selected_option.text, uuid_id) self.assertEqual(select.first_selected_option.get_attribute('value'), uuid_id) def test_inline_uuid_pk_delete_with_popup(self): from selenium.webdriver.support.ui import Select parent = ParentWithUUIDPK.objects.create(title='test') related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_change', args=(related_with_parent.id,)) self.selenium.get(self.live_server_url + change_url) self.selenium.find_element_by_id('delete_id_parent').click() self.wait_for_and_switch_to_popup() self.selenium.find_element_by_xpath('//input[@value="Yes, I’m sure"]').click() self.selenium.switch_to.window(self.selenium.window_handles[0]) select = Select(self.selenium.find_element_by_id('id_parent')) self.assertEqual(ParentWithUUIDPK.objects.count(), 0) self.assertEqual(select.first_selected_option.text, '---------') self.assertEqual(select.first_selected_option.get_attribute('value'), '') def test_inline_with_popup_cancel_delete(self): """Clicking ""No, take me back" on a delete popup closes the window.""" parent = ParentWithUUIDPK.objects.create(title='test') related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_change', args=(related_with_parent.id,)) self.selenium.get(self.live_server_url + change_url) self.selenium.find_element_by_id('delete_id_parent').click() self.wait_for_and_switch_to_popup() self.selenium.find_element_by_xpath('//a[text()="No, take me back"]').click() self.selenium.switch_to.window(self.selenium.window_handles[0]) self.assertEqual(len(self.selenium.window_handles), 1) def test_list_editable_raw_id_fields(self): parent = ParentWithUUIDPK.objects.create(title='test') parent2 = ParentWithUUIDPK.objects.create(title='test2') RelatedWithUUIDPKModel.objects.create(parent=parent) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_changelist', current_app=site2.name) self.selenium.get(self.live_server_url + change_url) self.selenium.find_element_by_id('lookup_id_form-0-parent').click() self.wait_for_and_switch_to_popup() # Select "parent2" in the popup. self.selenium.find_element_by_link_text(str(parent2.pk)).click() self.selenium.switch_to.window(self.selenium.window_handles[0]) # The newly selected pk should appear in the raw id input. value = self.selenium.find_element_by_id('id_form-0-parent').get_attribute('value') self.assertEqual(value, str(parent2.pk)) def test_input_element_font(self): """ Browsers' default stylesheets override the font of inputs. The admin adds additional CSS to handle this. """ self.selenium.get(self.live_server_url + reverse('admin:login')) element = self.selenium.find_element_by_id('id_username') # Some browsers quotes the fonts, some don't. fonts = [ font.strip().strip('"') for font in element.value_of_css_property('font-family').split(',') ] self.assertEqual( fonts, ['Roboto', 'Lucida Grande', 'Verdana', 'Arial', 'sans-serif'], ) def test_search_input_filtered_page(self): Person.objects.create(name='Guido van Rossum', gender=1, alive=True) Person.objects.create(name='Grace Hopper', gender=1, alive=False) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) person_url = reverse('admin:admin_views_person_changelist') + '?q=Gui' self.selenium.get(self.live_server_url + person_url) self.assertGreater( self.selenium.find_element_by_id('searchbar').rect['width'], 50, ) @override_settings(ROOT_URLCONF='admin_views.urls') class ReadonlyTest(AdminFieldExtractionMixin, TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_readonly_get(self): response = self.client.get(reverse('admin:admin_views_post_add')) self.assertNotContains(response, 'name="posted"') # 3 fields + 2 submit buttons + 5 inline management form fields, + 2 # hidden fields for inlines + 1 field for the inline + 2 empty form self.assertContains(response, "<input", count=16) self.assertContains(response, formats.localize(datetime.date.today())) self.assertContains(response, "<label>Awesomeness level:</label>") self.assertContains(response, "Very awesome.") self.assertContains(response, "Unknown coolness.") self.assertContains(response, "foo") # Multiline text in a readonly field gets <br> tags self.assertContains(response, 'Multiline<br>test<br>string') self.assertContains(response, '<div class="readonly">Multiline<br>html<br>content</div>', html=True) self.assertContains(response, 'InlineMultiline<br>test<br>string') self.assertContains(response, formats.localize(datetime.date.today() - datetime.timedelta(days=7))) self.assertContains(response, '<div class="form-row field-coolness">') self.assertContains(response, '<div class="form-row field-awesomeness_level">') self.assertContains(response, '<div class="form-row field-posted">') self.assertContains(response, '<div class="form-row field-value">') self.assertContains(response, '<div class="form-row">') self.assertContains(response, '<div class="help">', 3) self.assertContains( response, '<div class="help">Some help text for the title (with Unicode ŠĐĆŽćžšđ)</div>', html=True ) self.assertContains( response, '<div class="help">Some help text for the content (with Unicode ŠĐĆŽćžšđ)</div>', html=True ) self.assertContains( response, '<div class="help">Some help text for the date (with Unicode ŠĐĆŽćžšđ)</div>', html=True ) p = Post.objects.create(title="I worked on readonly_fields", content="Its good stuff") response = self.client.get(reverse('admin:admin_views_post_change', args=(p.pk,))) self.assertContains(response, "%d amount of cool" % p.pk) def test_readonly_text_field(self): p = Post.objects.create( title="Readonly test", content="test", readonly_content='test\r\n\r\ntest\r\n\r\ntest\r\n\r\ntest', ) Link.objects.create( url="http://www.djangoproject.com", post=p, readonly_link_content="test\r\nlink", ) response = self.client.get(reverse('admin:admin_views_post_change', args=(p.pk,))) # Checking readonly field. self.assertContains(response, 'test<br><br>test<br><br>test<br><br>test') # Checking readonly field in inline. self.assertContains(response, 'test<br>link') def test_readonly_post(self): data = { "title": "Django Got Readonly Fields", "content": "This is an incredible development.", "link_set-TOTAL_FORMS": "1", "link_set-INITIAL_FORMS": "0", "link_set-MAX_NUM_FORMS": "0", } response = self.client.post(reverse('admin:admin_views_post_add'), data) self.assertEqual(response.status_code, 302) self.assertEqual(Post.objects.count(), 1) p = Post.objects.get() self.assertEqual(p.posted, datetime.date.today()) data["posted"] = "10-8-1990" # some date that's not today response = self.client.post(reverse('admin:admin_views_post_add'), data) self.assertEqual(response.status_code, 302) self.assertEqual(Post.objects.count(), 2) p = Post.objects.order_by('-id')[0] self.assertEqual(p.posted, datetime.date.today()) def test_readonly_manytomany(self): "Regression test for #13004" response = self.client.get(reverse('admin:admin_views_pizza_add')) self.assertEqual(response.status_code, 200) def test_user_password_change_limited_queryset(self): su = User.objects.filter(is_superuser=True)[0] response = self.client.get(reverse('admin2:auth_user_password_change', args=(su.pk,))) self.assertEqual(response.status_code, 404) def test_change_form_renders_correct_null_choice_value(self): """ Regression test for #17911. """ choice = Choice.objects.create(choice=None) response = self.client.get(reverse('admin:admin_views_choice_change', args=(choice.pk,))) self.assertContains(response, '<div class="readonly">No opinion</div>', html=True) def test_readonly_foreignkey_links(self): """ ForeignKey readonly fields render as links if the target model is registered in admin. """ chapter = Chapter.objects.create( title='Chapter 1', content='content', book=Book.objects.create(name='Book 1'), ) language = Language.objects.create(iso='_40', name='Test') obj = ReadOnlyRelatedField.objects.create( chapter=chapter, language=language, user=self.superuser, ) response = self.client.get( reverse('admin:admin_views_readonlyrelatedfield_change', args=(obj.pk,)), ) # Related ForeignKey object registered in admin. user_url = reverse('admin:auth_user_change', args=(self.superuser.pk,)) self.assertContains( response, '<div class="readonly"><a href="%s">super</a></div>' % user_url, html=True, ) # Related ForeignKey with the string primary key registered in admin. language_url = reverse( 'admin:admin_views_language_change', args=(quote(language.pk),), ) self.assertContains( response, '<div class="readonly"><a href="%s">_40</a></div>' % language_url, html=True, ) # Related ForeignKey object not registered in admin. self.assertContains(response, '<div class="readonly">Chapter 1</div>', html=True) def test_readonly_manytomany_backwards_ref(self): """ Regression test for #16433 - backwards references for related objects broke if the related field is read-only due to the help_text attribute """ topping = Topping.objects.create(name='Salami') pizza = Pizza.objects.create(name='Americano') pizza.toppings.add(topping) response = self.client.get(reverse('admin:admin_views_topping_add')) self.assertEqual(response.status_code, 200) def test_readonly_manytomany_forwards_ref(self): topping = Topping.objects.create(name='Salami') pizza = Pizza.objects.create(name='Americano') pizza.toppings.add(topping) response = self.client.get(reverse('admin:admin_views_pizza_change', args=(pizza.pk,))) self.assertContains(response, '<label>Toppings:</label>', html=True) self.assertContains(response, '<div class="readonly">Salami</div>', html=True) def test_readonly_onetoone_backwards_ref(self): """ Can reference a reverse OneToOneField in ModelAdmin.readonly_fields. """ v1 = Villain.objects.create(name='Adam') pl = Plot.objects.create(name='Test Plot', team_leader=v1, contact=v1) pd = PlotDetails.objects.create(details='Brand New Plot', plot=pl) response = self.client.get(reverse('admin:admin_views_plotproxy_change', args=(pl.pk,))) field = self.get_admin_readonly_field(response, 'plotdetails') pd_url = reverse('admin:admin_views_plotdetails_change', args=(pd.pk,)) self.assertEqual(field.contents(), '<a href="%s">Brand New Plot</a>' % pd_url) # The reverse relation also works if the OneToOneField is null. pd.plot = None pd.save() response = self.client.get(reverse('admin:admin_views_plotproxy_change', args=(pl.pk,))) field = self.get_admin_readonly_field(response, 'plotdetails') self.assertEqual(field.contents(), '-') # default empty value def test_readonly_field_overrides(self): """ Regression test for #22087 - ModelForm Meta overrides are ignored by AdminReadonlyField """ p = FieldOverridePost.objects.create(title="Test Post", content="Test Content") response = self.client.get(reverse('admin:admin_views_fieldoverridepost_change', args=(p.pk,))) self.assertContains(response, '<div class="help">Overridden help text for the date</div>') self.assertContains(response, '<label for="id_public">Overridden public label:</label>', html=True) self.assertNotContains(response, 'Some help text for the date (with Unicode ŠĐĆŽćžšđ)') def test_correct_autoescaping(self): """ Make sure that non-field readonly elements are properly autoescaped (#24461) """ section = Section.objects.create(name='<a>evil</a>') response = self.client.get(reverse('admin:admin_views_section_change', args=(section.pk,))) self.assertNotContains(response, "<a>evil</a>", status_code=200) self.assertContains(response, "&lt;a&gt;evil&lt;/a&gt;", status_code=200) def test_label_suffix_translated(self): pizza = Pizza.objects.create(name='Americano') url = reverse('admin:admin_views_pizza_change', args=(pizza.pk,)) with self.settings(LANGUAGE_CODE='fr'): response = self.client.get(url) self.assertContains(response, '<label>Toppings\u00A0:</label>', html=True) @override_settings(ROOT_URLCONF='admin_views.urls') class LimitChoicesToInAdminTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_limit_choices_to_as_callable(self): """Test for ticket 2445 changes to admin.""" threepwood = Character.objects.create( username='threepwood', last_action=datetime.datetime.today() + datetime.timedelta(days=1), ) marley = Character.objects.create( username='marley', last_action=datetime.datetime.today() - datetime.timedelta(days=1), ) response = self.client.get(reverse('admin:admin_views_stumpjoke_add')) # The allowed option should appear twice; the limited option should not appear. self.assertContains(response, threepwood.username, count=2) self.assertNotContains(response, marley.username) @override_settings(ROOT_URLCONF='admin_views.urls') class RawIdFieldsTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_limit_choices_to(self): """Regression test for 14880""" actor = Actor.objects.create(name="Palin", age=27) Inquisition.objects.create(expected=True, leader=actor, country="England") Inquisition.objects.create(expected=False, leader=actor, country="Spain") response = self.client.get(reverse('admin:admin_views_sketch_add')) # Find the link m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_inquisition"', response.content) self.assertTrue(m) # Got a match popup_url = m[1].decode().replace('&amp;', '&') # Handle relative links popup_url = urljoin(response.request['PATH_INFO'], popup_url) # Get the popup and verify the correct objects show up in the resulting # page. This step also tests integers, strings and booleans in the # lookup query string; in model we define inquisition field to have a # limit_choices_to option that includes a filter on a string field # (inquisition__actor__name), a filter on an integer field # (inquisition__actor__age), and a filter on a boolean field # (inquisition__expected). response2 = self.client.get(popup_url) self.assertContains(response2, "Spain") self.assertNotContains(response2, "England") def test_limit_choices_to_isnull_false(self): """Regression test for 20182""" Actor.objects.create(name="Palin", age=27) Actor.objects.create(name="Kilbraken", age=50, title="Judge") response = self.client.get(reverse('admin:admin_views_sketch_add')) # Find the link m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant0"', response.content) self.assertTrue(m) # Got a match popup_url = m[1].decode().replace('&amp;', '&') # Handle relative links popup_url = urljoin(response.request['PATH_INFO'], popup_url) # Get the popup and verify the correct objects show up in the resulting # page. This step tests field__isnull=0 gets parsed correctly from the # lookup query string; in model we define defendant0 field to have a # limit_choices_to option that includes "actor__title__isnull=False". response2 = self.client.get(popup_url) self.assertContains(response2, "Kilbraken") self.assertNotContains(response2, "Palin") def test_limit_choices_to_isnull_true(self): """Regression test for 20182""" Actor.objects.create(name="Palin", age=27) Actor.objects.create(name="Kilbraken", age=50, title="Judge") response = self.client.get(reverse('admin:admin_views_sketch_add')) # Find the link m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant1"', response.content) self.assertTrue(m) # Got a match popup_url = m[1].decode().replace('&amp;', '&') # Handle relative links popup_url = urljoin(response.request['PATH_INFO'], popup_url) # Get the popup and verify the correct objects show up in the resulting # page. This step tests field__isnull=1 gets parsed correctly from the # lookup query string; in model we define defendant1 field to have a # limit_choices_to option that includes "actor__title__isnull=True". response2 = self.client.get(popup_url) self.assertNotContains(response2, "Kilbraken") self.assertContains(response2, "Palin") def test_list_display_method_same_name_as_reverse_accessor(self): """ Should be able to use a ModelAdmin method in list_display that has the same name as a reverse model field ("sketch" in this case). """ actor = Actor.objects.create(name="Palin", age=27) Inquisition.objects.create(expected=True, leader=actor, country="England") response = self.client.get(reverse('admin:admin_views_inquisition_changelist')) self.assertContains(response, 'list-display-sketch') @override_settings(ROOT_URLCONF='admin_views.urls') class UserAdminTest(TestCase): """ Tests user CRUD functionality. """ @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.adduser = User.objects.create_user(username='adduser', password='secret', is_staff=True) cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True) cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True) cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False) cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True) def setUp(self): self.client.force_login(self.superuser) def test_save_button(self): user_count = User.objects.count() response = self.client.post(reverse('admin:auth_user_add'), { 'username': 'newuser', 'password1': 'newpassword', 'password2': 'newpassword', }) new_user = User.objects.get(username='newuser') self.assertRedirects(response, reverse('admin:auth_user_change', args=(new_user.pk,))) self.assertEqual(User.objects.count(), user_count + 1) self.assertTrue(new_user.has_usable_password()) def test_save_continue_editing_button(self): user_count = User.objects.count() response = self.client.post(reverse('admin:auth_user_add'), { 'username': 'newuser', 'password1': 'newpassword', 'password2': 'newpassword', '_continue': '1', }) new_user = User.objects.get(username='newuser') new_user_url = reverse('admin:auth_user_change', args=(new_user.pk,)) self.assertRedirects(response, new_user_url, fetch_redirect_response=False) self.assertEqual(User.objects.count(), user_count + 1) self.assertTrue(new_user.has_usable_password()) response = self.client.get(new_user_url) self.assertContains( response, '<li class="success">The user “<a href="%s">' '%s</a>” was added successfully. You may edit it again below.</li>' % (new_user_url, new_user), html=True, ) def test_password_mismatch(self): response = self.client.post(reverse('admin:auth_user_add'), { 'username': 'newuser', 'password1': 'newpassword', 'password2': 'mismatch', }) self.assertEqual(response.status_code, 200) self.assertFormError(response, 'adminform', 'password', []) self.assertFormError(response, 'adminform', 'password2', ['The two password fields didn’t match.']) def test_user_fk_add_popup(self): """User addition through a FK popup should return the appropriate JavaScript response.""" response = self.client.get(reverse('admin:admin_views_album_add')) self.assertContains(response, reverse('admin:auth_user_add')) self.assertContains(response, 'class="related-widget-wrapper-link add-related" id="add_id_owner"') response = self.client.get(reverse('admin:auth_user_add') + '?_popup=1') self.assertNotContains(response, 'name="_continue"') self.assertNotContains(response, 'name="_addanother"') data = { 'username': 'newuser', 'password1': 'newpassword', 'password2': 'newpassword', '_popup': '1', '_save': '1', } response = self.client.post(reverse('admin:auth_user_add') + '?_popup=1', data, follow=True) self.assertContains(response, '&quot;obj&quot;: &quot;newuser&quot;') def test_user_fk_change_popup(self): """User change through a FK popup should return the appropriate JavaScript response.""" response = self.client.get(reverse('admin:admin_views_album_add')) self.assertContains(response, reverse('admin:auth_user_change', args=('__fk__',))) self.assertContains(response, 'class="related-widget-wrapper-link change-related" id="change_id_owner"') user = User.objects.get(username='changeuser') url = reverse('admin:auth_user_change', args=(user.pk,)) + '?_popup=1' response = self.client.get(url) self.assertNotContains(response, 'name="_continue"') self.assertNotContains(response, 'name="_addanother"') data = { 'username': 'newuser', 'password1': 'newpassword', 'password2': 'newpassword', 'last_login_0': '2007-05-30', 'last_login_1': '13:20:10', 'date_joined_0': '2007-05-30', 'date_joined_1': '13:20:10', '_popup': '1', '_save': '1', } response = self.client.post(url, data, follow=True) self.assertContains(response, '&quot;obj&quot;: &quot;newuser&quot;') self.assertContains(response, '&quot;action&quot;: &quot;change&quot;') def test_user_fk_delete_popup(self): """User deletion through a FK popup should return the appropriate JavaScript response.""" response = self.client.get(reverse('admin:admin_views_album_add')) self.assertContains(response, reverse('admin:auth_user_delete', args=('__fk__',))) self.assertContains(response, 'class="related-widget-wrapper-link change-related" id="change_id_owner"') user = User.objects.get(username='changeuser') url = reverse('admin:auth_user_delete', args=(user.pk,)) + '?_popup=1' response = self.client.get(url) self.assertEqual(response.status_code, 200) data = { 'post': 'yes', '_popup': '1', } response = self.client.post(url, data, follow=True) self.assertContains(response, '&quot;action&quot;: &quot;delete&quot;') def test_save_add_another_button(self): user_count = User.objects.count() response = self.client.post(reverse('admin:auth_user_add'), { 'username': 'newuser', 'password1': 'newpassword', 'password2': 'newpassword', '_addanother': '1', }) new_user = User.objects.order_by('-id')[0] self.assertRedirects(response, reverse('admin:auth_user_add')) self.assertEqual(User.objects.count(), user_count + 1) self.assertTrue(new_user.has_usable_password()) def test_user_permission_performance(self): u = User.objects.all()[0] # Don't depend on a warm cache, see #17377. ContentType.objects.clear_cache() with self.assertNumQueries(10): response = self.client.get(reverse('admin:auth_user_change', args=(u.pk,))) self.assertEqual(response.status_code, 200) def test_form_url_present_in_context(self): u = User.objects.all()[0] response = self.client.get(reverse('admin3:auth_user_password_change', args=(u.pk,))) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['form_url'], 'pony') @override_settings(ROOT_URLCONF='admin_views.urls') class GroupAdminTest(TestCase): """ Tests group CRUD functionality. """ @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_save_button(self): group_count = Group.objects.count() response = self.client.post(reverse('admin:auth_group_add'), { 'name': 'newgroup', }) Group.objects.order_by('-id')[0] self.assertRedirects(response, reverse('admin:auth_group_changelist')) self.assertEqual(Group.objects.count(), group_count + 1) def test_group_permission_performance(self): g = Group.objects.create(name="test_group") # Ensure no queries are skipped due to cached content type for Group. ContentType.objects.clear_cache() with self.assertNumQueries(8): response = self.client.get(reverse('admin:auth_group_change', args=(g.pk,))) self.assertEqual(response.status_code, 200) @override_settings(ROOT_URLCONF='admin_views.urls') class CSSTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') def setUp(self): self.client.force_login(self.superuser) def test_field_prefix_css_classes(self): """ Fields have a CSS class name with a 'field-' prefix. """ response = self.client.get(reverse('admin:admin_views_post_add')) # The main form self.assertContains(response, 'class="form-row field-title"') self.assertContains(response, 'class="form-row field-content"') self.assertContains(response, 'class="form-row field-public"') self.assertContains(response, 'class="form-row field-awesomeness_level"') self.assertContains(response, 'class="form-row field-coolness"') self.assertContains(response, 'class="form-row field-value"') self.assertContains(response, 'class="form-row"') # The lambda function # The tabular inline self.assertContains(response, '<td class="field-url">') self.assertContains(response, '<td class="field-posted">') def test_index_css_classes(self): """ CSS class names are used for each app and model on the admin index pages (#17050). """ # General index page response = self.client.get(reverse('admin:index')) self.assertContains(response, '<div class="app-admin_views module') self.assertContains(response, '<tr class="model-actor">') self.assertContains(response, '<tr class="model-album">') # App index page response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertContains(response, '<div class="app-admin_views module') self.assertContains(response, '<tr class="model-actor">') self.assertContains(response, '<tr class="model-album">') def test_app_model_in_form_body_class(self): """ Ensure app and model tag are correctly read by change_form template """ response = self.client.get(reverse('admin:admin_views_section_add')) self.assertContains(response, '<body class=" app-admin_views model-section ') def test_app_model_in_list_body_class(self): """ Ensure app and model tag are correctly read by change_list template """ response = self.client.get(reverse('admin:admin_views_section_changelist')) self.assertContains(response, '<body class=" app-admin_views model-section ') def test_app_model_in_delete_confirmation_body_class(self): """ Ensure app and model tag are correctly read by delete_confirmation template """ response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,))) self.assertContains(response, '<body class=" app-admin_views model-section ') def test_app_model_in_app_index_body_class(self): """ Ensure app and model tag are correctly read by app_index template """ response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertContains(response, '<body class=" dashboard app-admin_views') def test_app_model_in_delete_selected_confirmation_body_class(self): """ Ensure app and model tag are correctly read by delete_selected_confirmation template """ action_data = { ACTION_CHECKBOX_NAME: [self.s1.pk], 'action': 'delete_selected', 'index': 0, } response = self.client.post(reverse('admin:admin_views_section_changelist'), action_data) self.assertContains(response, '<body class=" app-admin_views model-section ') def test_changelist_field_classes(self): """ Cells of the change list table should contain the field name in their class attribute Refs #11195. """ Podcast.objects.create(name="Django Dose", release_date=datetime.date.today()) response = self.client.get(reverse('admin:admin_views_podcast_changelist')) self.assertContains(response, '<th class="field-name">') self.assertContains(response, '<td class="field-release_date nowrap">') self.assertContains(response, '<td class="action-checkbox">') try: import docutils except ImportError: docutils = None @unittest.skipUnless(docutils, "no docutils installed.") @override_settings(ROOT_URLCONF='admin_views.urls') @modify_settings(INSTALLED_APPS={'append': ['django.contrib.admindocs', 'django.contrib.flatpages']}) class AdminDocsTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_tags(self): response = self.client.get(reverse('django-admindocs-tags')) # The builtin tag group exists self.assertContains(response, "<h2>Built-in tags</h2>", count=2, html=True) # A builtin tag exists in both the index and detail self.assertContains(response, '<h3 id="built_in-autoescape">autoescape</h3>', html=True) self.assertContains(response, '<li><a href="#built_in-autoescape">autoescape</a></li>', html=True) # An app tag exists in both the index and detail self.assertContains(response, '<h3 id="flatpages-get_flatpages">get_flatpages</h3>', html=True) self.assertContains(response, '<li><a href="#flatpages-get_flatpages">get_flatpages</a></li>', html=True) # The admin list tag group exists self.assertContains(response, "<h2>admin_list</h2>", count=2, html=True) # An admin list tag exists in both the index and detail self.assertContains(response, '<h3 id="admin_list-admin_actions">admin_actions</h3>', html=True) self.assertContains(response, '<li><a href="#admin_list-admin_actions">admin_actions</a></li>', html=True) def test_filters(self): response = self.client.get(reverse('django-admindocs-filters')) # The builtin filter group exists self.assertContains(response, "<h2>Built-in filters</h2>", count=2, html=True) # A builtin filter exists in both the index and detail self.assertContains(response, '<h3 id="built_in-add">add</h3>', html=True) self.assertContains(response, '<li><a href="#built_in-add">add</a></li>', html=True) @override_settings( ROOT_URLCONF='admin_views.urls', TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }], USE_I18N=False, ) class ValidXHTMLTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_lang_name_present(self): response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertNotContains(response, ' lang=""') self.assertNotContains(response, ' xml:lang=""') @override_settings(ROOT_URLCONF='admin_views.urls', USE_THOUSAND_SEPARATOR=True, USE_L10N=True) class DateHierarchyTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def assert_non_localized_year(self, response, year): """ The year is not localized with USE_THOUSAND_SEPARATOR (#15234). """ self.assertNotContains(response, formats.number_format(year)) def assert_contains_year_link(self, response, date): self.assertContains(response, '?release_date__year=%d"' % date.year) def assert_contains_month_link(self, response, date): self.assertContains( response, '?release_date__month=%d&amp;release_date__year=%d"' % ( date.month, date.year)) def assert_contains_day_link(self, response, date): self.assertContains( response, '?release_date__day=%d&amp;' 'release_date__month=%d&amp;release_date__year=%d"' % ( date.day, date.month, date.year)) def test_empty(self): """ No date hierarchy links display with empty changelist. """ response = self.client.get( reverse('admin:admin_views_podcast_changelist')) self.assertNotContains(response, 'release_date__year=') self.assertNotContains(response, 'release_date__month=') self.assertNotContains(response, 'release_date__day=') def test_single(self): """ Single day-level date hierarchy appears for single object. """ DATE = datetime.date(2000, 6, 30) Podcast.objects.create(release_date=DATE) url = reverse('admin:admin_views_podcast_changelist') response = self.client.get(url) self.assert_contains_day_link(response, DATE) self.assert_non_localized_year(response, 2000) def test_within_month(self): """ day-level links appear for changelist within single month. """ DATES = (datetime.date(2000, 6, 30), datetime.date(2000, 6, 15), datetime.date(2000, 6, 3)) for date in DATES: Podcast.objects.create(release_date=date) url = reverse('admin:admin_views_podcast_changelist') response = self.client.get(url) for date in DATES: self.assert_contains_day_link(response, date) self.assert_non_localized_year(response, 2000) def test_within_year(self): """ month-level links appear for changelist within single year. """ DATES = (datetime.date(2000, 1, 30), datetime.date(2000, 3, 15), datetime.date(2000, 5, 3)) for date in DATES: Podcast.objects.create(release_date=date) url = reverse('admin:admin_views_podcast_changelist') response = self.client.get(url) # no day-level links self.assertNotContains(response, 'release_date__day=') for date in DATES: self.assert_contains_month_link(response, date) self.assert_non_localized_year(response, 2000) def test_multiple_years(self): """ year-level links appear for year-spanning changelist. """ DATES = (datetime.date(2001, 1, 30), datetime.date(2003, 3, 15), datetime.date(2005, 5, 3)) for date in DATES: Podcast.objects.create(release_date=date) response = self.client.get( reverse('admin:admin_views_podcast_changelist')) # no day/month-level links self.assertNotContains(response, 'release_date__day=') self.assertNotContains(response, 'release_date__month=') for date in DATES: self.assert_contains_year_link(response, date) # and make sure GET parameters still behave correctly for date in DATES: url = '%s?release_date__year=%d' % ( reverse('admin:admin_views_podcast_changelist'), date.year) response = self.client.get(url) self.assert_contains_month_link(response, date) self.assert_non_localized_year(response, 2000) self.assert_non_localized_year(response, 2003) self.assert_non_localized_year(response, 2005) url = '%s?release_date__year=%d&release_date__month=%d' % ( reverse('admin:admin_views_podcast_changelist'), date.year, date.month) response = self.client.get(url) self.assert_contains_day_link(response, date) self.assert_non_localized_year(response, 2000) self.assert_non_localized_year(response, 2003) self.assert_non_localized_year(response, 2005) def test_related_field(self): questions_data = ( # (posted data, number of answers), (datetime.date(2001, 1, 30), 0), (datetime.date(2003, 3, 15), 1), (datetime.date(2005, 5, 3), 2), ) for date, answer_count in questions_data: question = Question.objects.create(posted=date) for i in range(answer_count): question.answer_set.create() response = self.client.get(reverse('admin:admin_views_answer_changelist')) for date, answer_count in questions_data: link = '?question__posted__year=%d"' % date.year if answer_count > 0: self.assertContains(response, link) else: self.assertNotContains(response, link) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminCustomSaveRelatedTests(TestCase): """ One can easily customize the way related objects are saved. Refs #16115. """ @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_should_be_able_to_edit_related_objects_on_add_view(self): post = { 'child_set-TOTAL_FORMS': '3', 'child_set-INITIAL_FORMS': '0', 'name': 'Josh Stone', 'child_set-0-name': 'Paul', 'child_set-1-name': 'Catherine', } self.client.post(reverse('admin:admin_views_parent_add'), post) self.assertEqual(1, Parent.objects.count()) self.assertEqual(2, Child.objects.count()) children_names = list(Child.objects.order_by('name').values_list('name', flat=True)) self.assertEqual('Josh Stone', Parent.objects.latest('id').name) self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names) def test_should_be_able_to_edit_related_objects_on_change_view(self): parent = Parent.objects.create(name='Josh Stone') paul = Child.objects.create(parent=parent, name='Paul') catherine = Child.objects.create(parent=parent, name='Catherine') post = { 'child_set-TOTAL_FORMS': '5', 'child_set-INITIAL_FORMS': '2', 'name': 'Josh Stone', 'child_set-0-name': 'Paul', 'child_set-0-id': paul.id, 'child_set-1-name': 'Catherine', 'child_set-1-id': catherine.id, } self.client.post(reverse('admin:admin_views_parent_change', args=(parent.id,)), post) children_names = list(Child.objects.order_by('name').values_list('name', flat=True)) self.assertEqual('Josh Stone', Parent.objects.latest('id').name) self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names) def test_should_be_able_to_edit_related_objects_on_changelist_view(self): parent = Parent.objects.create(name='Josh Rock') Child.objects.create(parent=parent, name='Paul') Child.objects.create(parent=parent, name='Catherine') post = { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '1', 'form-MAX_NUM_FORMS': '0', 'form-0-id': parent.id, 'form-0-name': 'Josh Stone', '_save': 'Save' } self.client.post(reverse('admin:admin_views_parent_changelist'), post) children_names = list(Child.objects.order_by('name').values_list('name', flat=True)) self.assertEqual('Josh Stone', Parent.objects.latest('id').name) self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewLogoutTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def test_logout(self): self.client.force_login(self.superuser) response = self.client.get(reverse('admin:logout')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'registration/logged_out.html') self.assertEqual(response.request['PATH_INFO'], reverse('admin:logout')) self.assertFalse(response.context['has_permission']) self.assertNotContains(response, 'user-tools') # user-tools div shouldn't visible. def test_client_logout_url_can_be_used_to_login(self): response = self.client.get(reverse('admin:logout')) self.assertEqual(response.status_code, 302) # we should be redirected to the login page. # follow the redirect and test results. response = self.client.get(reverse('admin:logout'), follow=True) self.assertContains( response, '<input type="hidden" name="next" value="%s">' % reverse('admin:index'), ) self.assertTemplateUsed(response, 'admin/login.html') self.assertEqual(response.request['PATH_INFO'], reverse('admin:login')) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminUserMessageTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def send_message(self, level): """ Helper that sends a post to the dummy test methods and asserts that a message with the level has appeared in the response. """ action_data = { ACTION_CHECKBOX_NAME: [1], 'action': 'message_%s' % level, 'index': 0, } response = self.client.post(reverse('admin:admin_views_usermessenger_changelist'), action_data, follow=True) self.assertContains(response, '<li class="%s">Test %s</li>' % (level, level), html=True) @override_settings(MESSAGE_LEVEL=10) # Set to DEBUG for this request def test_message_debug(self): self.send_message('debug') def test_message_info(self): self.send_message('info') def test_message_success(self): self.send_message('success') def test_message_warning(self): self.send_message('warning') def test_message_error(self): self.send_message('error') def test_message_extra_tags(self): action_data = { ACTION_CHECKBOX_NAME: [1], 'action': 'message_extra_tags', 'index': 0, } response = self.client.post(reverse('admin:admin_views_usermessenger_changelist'), action_data, follow=True) self.assertContains(response, '<li class="extra_tag info">Test tags</li>', html=True) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminKeepChangeListFiltersTests(TestCase): admin_site = site @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret') def setUp(self): self.client.force_login(self.superuser) def assertURLEqual(self, url1, url2, msg_prefix=''): """ Assert that two URLs are equal despite the ordering of their querystring. Refs #22360. """ parsed_url1 = urlparse(url1) path1 = parsed_url1.path parsed_qs1 = dict(parse_qsl(parsed_url1.query)) parsed_url2 = urlparse(url2) path2 = parsed_url2.path parsed_qs2 = dict(parse_qsl(parsed_url2.query)) for parsed_qs in [parsed_qs1, parsed_qs2]: if '_changelist_filters' in parsed_qs: changelist_filters = parsed_qs['_changelist_filters'] parsed_filters = dict(parse_qsl(changelist_filters)) parsed_qs['_changelist_filters'] = parsed_filters self.assertEqual(path1, path2) self.assertEqual(parsed_qs1, parsed_qs2) def test_assert_url_equal(self): # Test equality. change_user_url = reverse('admin:auth_user_change', args=(self.joepublicuser.pk,)) self.assertURLEqual( 'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format( change_user_url ), 'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format( change_user_url ) ) # Test inequality. with self.assertRaises(AssertionError): self.assertURLEqual( 'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format( change_user_url ), 'http://testserver{}?_changelist_filters=is_staff__exact%3D1%26is_superuser__exact%3D1'.format( change_user_url ) ) # Ignore scheme and host. self.assertURLEqual( 'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format( change_user_url ), '{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url) ) # Ignore ordering of querystring. self.assertURLEqual( '{}?is_staff__exact=0&is_superuser__exact=0'.format(reverse('admin:auth_user_changelist')), '{}?is_superuser__exact=0&is_staff__exact=0'.format(reverse('admin:auth_user_changelist')) ) # Ignore ordering of _changelist_filters. self.assertURLEqual( '{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url), '{}?_changelist_filters=is_superuser__exact%3D0%26is_staff__exact%3D0'.format(change_user_url) ) def get_changelist_filters(self): return { 'is_superuser__exact': 0, 'is_staff__exact': 0, } def get_changelist_filters_querystring(self): return urlencode(self.get_changelist_filters()) def get_preserved_filters_querystring(self): return urlencode({ '_changelist_filters': self.get_changelist_filters_querystring() }) def get_sample_user_id(self): return self.joepublicuser.pk def get_changelist_url(self): return '%s?%s' % ( reverse('admin:auth_user_changelist', current_app=self.admin_site.name), self.get_changelist_filters_querystring(), ) def get_add_url(self, add_preserved_filters=True): url = reverse('admin:auth_user_add', current_app=self.admin_site.name) if add_preserved_filters: url = '%s?%s' % (url, self.get_preserved_filters_querystring()) return url def get_change_url(self, user_id=None, add_preserved_filters=True): if user_id is None: user_id = self.get_sample_user_id() url = reverse('admin:auth_user_change', args=(user_id,), current_app=self.admin_site.name) if add_preserved_filters: url = '%s?%s' % (url, self.get_preserved_filters_querystring()) return url def get_history_url(self, user_id=None): if user_id is None: user_id = self.get_sample_user_id() return "%s?%s" % ( reverse('admin:auth_user_history', args=(user_id,), current_app=self.admin_site.name), self.get_preserved_filters_querystring(), ) def get_delete_url(self, user_id=None): if user_id is None: user_id = self.get_sample_user_id() return "%s?%s" % ( reverse('admin:auth_user_delete', args=(user_id,), current_app=self.admin_site.name), self.get_preserved_filters_querystring(), ) def test_changelist_view(self): response = self.client.get(self.get_changelist_url()) self.assertEqual(response.status_code, 200) # Check the `change_view` link has the correct querystring. detail_link = re.search( '<a href="(.*?)">{}</a>'.format(self.joepublicuser.username), response.content.decode() ) self.assertURLEqual(detail_link[1], self.get_change_url()) def test_change_view(self): # Get the `change_view`. response = self.client.get(self.get_change_url()) self.assertEqual(response.status_code, 200) # Check the form action. form_action = re.search( '<form action="(.*?)" method="post" id="user_form" novalidate>', response.content.decode() ) self.assertURLEqual(form_action[1], '?%s' % self.get_preserved_filters_querystring()) # Check the history link. history_link = re.search( '<a href="(.*?)" class="historylink">History</a>', response.content.decode() ) self.assertURLEqual(history_link[1], self.get_history_url()) # Check the delete link. delete_link = re.search( '<a href="(.*?)" class="deletelink">Delete</a>', response.content.decode() ) self.assertURLEqual(delete_link[1], self.get_delete_url()) # Test redirect on "Save". post_data = { 'username': 'joepublic', 'last_login_0': '2007-05-30', 'last_login_1': '13:20:10', 'date_joined_0': '2007-05-30', 'date_joined_1': '13:20:10', } post_data['_save'] = 1 response = self.client.post(self.get_change_url(), data=post_data) self.assertRedirects(response, self.get_changelist_url()) post_data.pop('_save') # Test redirect on "Save and continue". post_data['_continue'] = 1 response = self.client.post(self.get_change_url(), data=post_data) self.assertRedirects(response, self.get_change_url()) post_data.pop('_continue') # Test redirect on "Save and add new". post_data['_addanother'] = 1 response = self.client.post(self.get_change_url(), data=post_data) self.assertRedirects(response, self.get_add_url()) post_data.pop('_addanother') def test_change_view_without_preserved_filters(self): response = self.client.get(self.get_change_url(add_preserved_filters=False)) # The action attribute is omitted. self.assertContains(response, '<form method="post" id="user_form" novalidate>') def test_add_view(self): # Get the `add_view`. response = self.client.get(self.get_add_url()) self.assertEqual(response.status_code, 200) # Check the form action. form_action = re.search( '<form action="(.*?)" method="post" id="user_form" novalidate>', response.content.decode() ) self.assertURLEqual(form_action[1], '?%s' % self.get_preserved_filters_querystring()) post_data = { 'username': 'dummy', 'password1': 'test', 'password2': 'test', } # Test redirect on "Save". post_data['_save'] = 1 response = self.client.post(self.get_add_url(), data=post_data) self.assertRedirects(response, self.get_change_url(User.objects.get(username='dummy').pk)) post_data.pop('_save') # Test redirect on "Save and continue". post_data['username'] = 'dummy2' post_data['_continue'] = 1 response = self.client.post(self.get_add_url(), data=post_data) self.assertRedirects(response, self.get_change_url(User.objects.get(username='dummy2').pk)) post_data.pop('_continue') # Test redirect on "Save and add new". post_data['username'] = 'dummy3' post_data['_addanother'] = 1 response = self.client.post(self.get_add_url(), data=post_data) self.assertRedirects(response, self.get_add_url()) post_data.pop('_addanother') def test_add_view_without_preserved_filters(self): response = self.client.get(self.get_add_url(add_preserved_filters=False)) # The action attribute is omitted. self.assertContains(response, '<form method="post" id="user_form" novalidate>') def test_delete_view(self): # Test redirect on "Delete". response = self.client.post(self.get_delete_url(), {'post': 'yes'}) self.assertRedirects(response, self.get_changelist_url()) def test_url_prefix(self): context = { 'preserved_filters': self.get_preserved_filters_querystring(), 'opts': User._meta, } prefixes = ('', '/prefix/', '/後台/') for prefix in prefixes: with self.subTest(prefix=prefix), override_script_prefix(prefix): url = reverse('admin:auth_user_changelist', current_app=self.admin_site.name) self.assertURLEqual( self.get_changelist_url(), add_preserved_filters(context, url), ) class NamespacedAdminKeepChangeListFiltersTests(AdminKeepChangeListFiltersTests): admin_site = site2 @override_settings(ROOT_URLCONF='admin_views.urls') class TestLabelVisibility(TestCase): """ #11277 -Labels of hidden fields in admin were not hidden. """ @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_all_fields_visible(self): response = self.client.get(reverse('admin:admin_views_emptymodelvisible_add')) self.assert_fieldline_visible(response) self.assert_field_visible(response, 'first') self.assert_field_visible(response, 'second') def test_all_fields_hidden(self): response = self.client.get(reverse('admin:admin_views_emptymodelhidden_add')) self.assert_fieldline_hidden(response) self.assert_field_hidden(response, 'first') self.assert_field_hidden(response, 'second') def test_mixin(self): response = self.client.get(reverse('admin:admin_views_emptymodelmixin_add')) self.assert_fieldline_visible(response) self.assert_field_hidden(response, 'first') self.assert_field_visible(response, 'second') def assert_field_visible(self, response, field_name): self.assertContains(response, '<div class="fieldBox field-%s">' % field_name) def assert_field_hidden(self, response, field_name): self.assertContains(response, '<div class="fieldBox field-%s hidden">' % field_name) def assert_fieldline_visible(self, response): self.assertContains(response, '<div class="form-row field-first field-second">') def assert_fieldline_hidden(self, response): self.assertContains(response, '<div class="form-row hidden') @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewOnSiteTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = State.objects.create(name='New York') cls.s2 = State.objects.create(name='Illinois') cls.s3 = State.objects.create(name='California') cls.c1 = City.objects.create(state=cls.s1, name='New York') cls.c2 = City.objects.create(state=cls.s2, name='Chicago') cls.c3 = City.objects.create(state=cls.s3, name='San Francisco') cls.r1 = Restaurant.objects.create(city=cls.c1, name='Italian Pizza') cls.r2 = Restaurant.objects.create(city=cls.c1, name='Boulevard') cls.r3 = Restaurant.objects.create(city=cls.c2, name='Chinese Dinner') cls.r4 = Restaurant.objects.create(city=cls.c2, name='Angels') cls.r5 = Restaurant.objects.create(city=cls.c2, name='Take Away') cls.r6 = Restaurant.objects.create(city=cls.c3, name='The Unknown Restaurant') cls.w1 = Worker.objects.create(work_at=cls.r1, name='Mario', surname='Rossi') cls.w2 = Worker.objects.create(work_at=cls.r1, name='Antonio', surname='Bianchi') cls.w3 = Worker.objects.create(work_at=cls.r1, name='John', surname='Doe') def setUp(self): self.client.force_login(self.superuser) def test_add_view_form_and_formsets_run_validation(self): """ Issue #20522 Verifying that if the parent form fails validation, the inlines also run validation even if validation is contingent on parent form data. Also, assertFormError() and assertFormsetError() is usable for admin forms and formsets. """ # The form validation should fail because 'some_required_info' is # not included on the parent form, and the family_name of the parent # does not match that of the child post_data = { 'family_name': 'Test1', 'dependentchild_set-TOTAL_FORMS': '1', 'dependentchild_set-INITIAL_FORMS': '0', 'dependentchild_set-MAX_NUM_FORMS': '1', 'dependentchild_set-0-id': '', 'dependentchild_set-0-parent': '', 'dependentchild_set-0-family_name': 'Test2', } response = self.client.post(reverse('admin:admin_views_parentwithdependentchildren_add'), post_data) self.assertFormError(response, 'adminform', 'some_required_info', ['This field is required.']) msg = "The form 'adminform' in context 0 does not contain the non-field error 'Error'" with self.assertRaisesMessage(AssertionError, msg): self.assertFormError(response, 'adminform', None, ['Error']) self.assertFormsetError( response, 'inline_admin_formset', 0, None, ['Children must share a family name with their parents in this contrived test case'] ) msg = "The formset 'inline_admin_formset' in context 12 does not contain any non-form errors." with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError(response, 'inline_admin_formset', None, None, ['Error']) def test_change_view_form_and_formsets_run_validation(self): """ Issue #20522 Verifying that if the parent form fails validation, the inlines also run validation even if validation is contingent on parent form data """ pwdc = ParentWithDependentChildren.objects.create(some_required_info=6, family_name='Test1') # The form validation should fail because 'some_required_info' is # not included on the parent form, and the family_name of the parent # does not match that of the child post_data = { 'family_name': 'Test2', 'dependentchild_set-TOTAL_FORMS': '1', 'dependentchild_set-INITIAL_FORMS': '0', 'dependentchild_set-MAX_NUM_FORMS': '1', 'dependentchild_set-0-id': '', 'dependentchild_set-0-parent': str(pwdc.id), 'dependentchild_set-0-family_name': 'Test1', } response = self.client.post( reverse('admin:admin_views_parentwithdependentchildren_change', args=(pwdc.id,)), post_data ) self.assertFormError(response, 'adminform', 'some_required_info', ['This field is required.']) self.assertFormsetError( response, 'inline_admin_formset', 0, None, ['Children must share a family name with their parents in this contrived test case'] ) def test_check(self): "The view_on_site value is either a boolean or a callable" try: admin = CityAdmin(City, AdminSite()) CityAdmin.view_on_site = True self.assertEqual(admin.check(), []) CityAdmin.view_on_site = False self.assertEqual(admin.check(), []) CityAdmin.view_on_site = lambda obj: obj.get_absolute_url() self.assertEqual(admin.check(), []) CityAdmin.view_on_site = [] self.assertEqual(admin.check(), [ Error( "The value of 'view_on_site' must be a callable or a boolean value.", obj=CityAdmin, id='admin.E025', ), ]) finally: # Restore the original values for the benefit of other tests. CityAdmin.view_on_site = True def test_false(self): "The 'View on site' button is not displayed if view_on_site is False" response = self.client.get(reverse('admin:admin_views_restaurant_change', args=(self.r1.pk,))) content_type_pk = ContentType.objects.get_for_model(Restaurant).pk self.assertNotContains(response, reverse('admin:view_on_site', args=(content_type_pk, 1))) def test_true(self): "The default behavior is followed if view_on_site is True" response = self.client.get(reverse('admin:admin_views_city_change', args=(self.c1.pk,))) content_type_pk = ContentType.objects.get_for_model(City).pk self.assertContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.c1.pk))) def test_callable(self): "The right link is displayed if view_on_site is a callable" response = self.client.get(reverse('admin:admin_views_worker_change', args=(self.w1.pk,))) self.assertContains(response, '"/worker/%s/%s/"' % (self.w1.surname, self.w1.name)) def test_missing_get_absolute_url(self): "None is returned if model doesn't have get_absolute_url" model_admin = ModelAdmin(Worker, None) self.assertIsNone(model_admin.get_view_on_site_url(Worker())) @override_settings(ROOT_URLCONF='admin_views.urls') class InlineAdminViewOnSiteTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = State.objects.create(name='New York') cls.s2 = State.objects.create(name='Illinois') cls.s3 = State.objects.create(name='California') cls.c1 = City.objects.create(state=cls.s1, name='New York') cls.c2 = City.objects.create(state=cls.s2, name='Chicago') cls.c3 = City.objects.create(state=cls.s3, name='San Francisco') cls.r1 = Restaurant.objects.create(city=cls.c1, name='Italian Pizza') cls.r2 = Restaurant.objects.create(city=cls.c1, name='Boulevard') cls.r3 = Restaurant.objects.create(city=cls.c2, name='Chinese Dinner') cls.r4 = Restaurant.objects.create(city=cls.c2, name='Angels') cls.r5 = Restaurant.objects.create(city=cls.c2, name='Take Away') cls.r6 = Restaurant.objects.create(city=cls.c3, name='The Unknown Restaurant') cls.w1 = Worker.objects.create(work_at=cls.r1, name='Mario', surname='Rossi') cls.w2 = Worker.objects.create(work_at=cls.r1, name='Antonio', surname='Bianchi') cls.w3 = Worker.objects.create(work_at=cls.r1, name='John', surname='Doe') def setUp(self): self.client.force_login(self.superuser) def test_false(self): "The 'View on site' button is not displayed if view_on_site is False" response = self.client.get(reverse('admin:admin_views_state_change', args=(self.s1.pk,))) content_type_pk = ContentType.objects.get_for_model(City).pk self.assertNotContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.c1.pk))) def test_true(self): "The 'View on site' button is displayed if view_on_site is True" response = self.client.get(reverse('admin:admin_views_city_change', args=(self.c1.pk,))) content_type_pk = ContentType.objects.get_for_model(Restaurant).pk self.assertContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.r1.pk))) def test_callable(self): "The right link is displayed if view_on_site is a callable" response = self.client.get(reverse('admin:admin_views_restaurant_change', args=(self.r1.pk,))) self.assertContains(response, '"/worker_inline/%s/%s/"' % (self.w1.surname, self.w1.name)) @override_settings(ROOT_URLCONF='admin_views.urls') class GetFormsetsWithInlinesArgumentTest(TestCase): """ #23934 - When adding a new model instance in the admin, the 'obj' argument of get_formsets_with_inlines() should be None. When changing, it should be equal to the existing model instance. The GetFormsetsArgumentCheckingAdmin ModelAdmin throws an exception if obj is not None during add_view or obj is None during change_view. """ @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_explicitly_provided_pk(self): post_data = {'name': '1'} response = self.client.post(reverse('admin:admin_views_explicitlyprovidedpk_add'), post_data) self.assertEqual(response.status_code, 302) post_data = {'name': '2'} response = self.client.post(reverse('admin:admin_views_explicitlyprovidedpk_change', args=(1,)), post_data) self.assertEqual(response.status_code, 302) def test_implicitly_generated_pk(self): post_data = {'name': '1'} response = self.client.post(reverse('admin:admin_views_implicitlygeneratedpk_add'), post_data) self.assertEqual(response.status_code, 302) post_data = {'name': '2'} response = self.client.post(reverse('admin:admin_views_implicitlygeneratedpk_change', args=(1,)), post_data) self.assertEqual(response.status_code, 302) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminSiteFinalCatchAllPatternTests(TestCase): """ Verifies the behaviour of the admin catch-all view. * Anonynous/non-staff users are redirected to login for all URLs, whether otherwise valid or not. * APPEND_SLASH is applied for staff if needed. * Otherwise Http404. * Catch-all view disabled via AdminSite.final_catch_all_view. """ def test_unknown_url_redirects_login_if_not_authenticated(self): unknown_url = '/test_admin/admin/unknown/' response = self.client.get(unknown_url) self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), unknown_url)) def test_unknown_url_404_if_authenticated(self): superuser = User.objects.create_superuser( username='super', password='secret', email='[email protected]', ) self.client.force_login(superuser) unknown_url = '/test_admin/admin/unknown/' response = self.client.get(unknown_url) self.assertEqual(response.status_code, 404) def test_known_url_redirects_login_if_not_authenticated(self): known_url = reverse('admin:admin_views_article_changelist') response = self.client.get(known_url) self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), known_url)) def test_known_url_missing_slash_redirects_login_if_not_authenticated(self): known_url = reverse('admin:admin_views_article_changelist')[:-1] response = self.client.get(known_url) # Redirects with the next URL also missing the slash. self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), known_url)) def test_non_admin_url_shares_url_prefix(self): url = reverse('non_admin')[:-1] response = self.client.get(url) # Redirects with the next URL also missing the slash. self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), url)) def test_url_without_trailing_slash_if_not_authenticated(self): url = reverse('admin:article_extra_json') response = self.client.get(url) self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), url)) def test_unkown_url_without_trailing_slash_if_not_authenticated(self): url = reverse('admin:article_extra_json')[:-1] response = self.client.get(url) self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), url)) @override_settings(APPEND_SLASH=True) def test_missing_slash_append_slash_true_unknown_url(self): superuser = User.objects.create_user( username='staff', password='secret', email='[email protected]', is_staff=True, ) self.client.force_login(superuser) unknown_url = '/test_admin/admin/unknown/' response = self.client.get(unknown_url[:-1]) self.assertEqual(response.status_code, 404) @override_settings(APPEND_SLASH=True) def test_missing_slash_append_slash_true(self): superuser = User.objects.create_user( username='staff', password='secret', email='[email protected]', is_staff=True, ) self.client.force_login(superuser) known_url = reverse('admin:admin_views_article_changelist') response = self.client.get(known_url[:-1]) self.assertRedirects(response, known_url, status_code=301, target_status_code=403) @override_settings(APPEND_SLASH=True) def test_missing_slash_append_slash_true_non_staff_user(self): user = User.objects.create_user( username='user', password='secret', email='[email protected]', is_staff=False, ) self.client.force_login(user) known_url = reverse('admin:admin_views_article_changelist') response = self.client.get(known_url[:-1]) self.assertRedirects(response, '/test_admin/admin/login/?next=/test_admin/admin/admin_views/article') @override_settings(APPEND_SLASH=False) def test_missing_slash_append_slash_false(self): superuser = User.objects.create_user( username='staff', password='secret', email='[email protected]', is_staff=True, ) self.client.force_login(superuser) known_url = reverse('admin:admin_views_article_changelist') response = self.client.get(known_url[:-1]) self.assertEqual(response.status_code, 404) @override_settings(APPEND_SLASH=True) def test_single_model_no_append_slash(self): superuser = User.objects.create_user( username='staff', password='secret', email='[email protected]', is_staff=True, ) self.client.force_login(superuser) known_url = reverse('admin9:admin_views_actor_changelist') response = self.client.get(known_url[:-1]) self.assertEqual(response.status_code, 404) # Same tests above with final_catch_all_view=False. def test_unknown_url_404_if_not_authenticated_without_final_catch_all_view(self): unknown_url = '/test_admin/admin10/unknown/' response = self.client.get(unknown_url) self.assertEqual(response.status_code, 404) def test_unknown_url_404_if_authenticated_without_final_catch_all_view(self): superuser = User.objects.create_superuser( username='super', password='secret', email='[email protected]', ) self.client.force_login(superuser) unknown_url = '/test_admin/admin10/unknown/' response = self.client.get(unknown_url) self.assertEqual(response.status_code, 404) def test_known_url_redirects_login_if_not_authenticated_without_final_catch_all_view(self): known_url = reverse('admin10:admin_views_article_changelist') response = self.client.get(known_url) self.assertRedirects(response, '%s?next=%s' % (reverse('admin10:login'), known_url)) def test_known_url_missing_slash_redirects_with_slash_if_not_authenticated_without_final_catch_all_view(self): known_url = reverse('admin10:admin_views_article_changelist') response = self.client.get(known_url[:-1]) self.assertRedirects(response, known_url, status_code=301, fetch_redirect_response=False) def test_non_admin_url_shares_url_prefix_without_final_catch_all_view(self): url = reverse('non_admin10') response = self.client.get(url[:-1]) self.assertRedirects(response, url, status_code=301) def test_url_without_trailing_slash_if_not_authenticated_without_final_catch_all_view(self): url = reverse('admin10:article_extra_json') response = self.client.get(url) self.assertRedirects(response, '%s?next=%s' % (reverse('admin10:login'), url)) def test_unkown_url_without_trailing_slash_if_not_authenticated_without_final_catch_all_view(self): url = reverse('admin10:article_extra_json')[:-1] response = self.client.get(url) # Matches test_admin/admin10/admin_views/article/<path:object_id>/ self.assertRedirects(response, url + '/', status_code=301, fetch_redirect_response=False) @override_settings(APPEND_SLASH=True) def test_missing_slash_append_slash_true_unknown_url_without_final_catch_all_view(self): superuser = User.objects.create_user( username='staff', password='secret', email='[email protected]', is_staff=True, ) self.client.force_login(superuser) unknown_url = '/test_admin/admin10/unknown/' response = self.client.get(unknown_url[:-1]) self.assertEqual(response.status_code, 404) @override_settings(APPEND_SLASH=True) def test_missing_slash_append_slash_true_without_final_catch_all_view(self): superuser = User.objects.create_user( username='staff', password='secret', email='[email protected]', is_staff=True, ) self.client.force_login(superuser) known_url = reverse('admin10:admin_views_article_changelist') response = self.client.get(known_url[:-1]) self.assertRedirects(response, known_url, status_code=301, target_status_code=403) @override_settings(APPEND_SLASH=False) def test_missing_slash_append_slash_false_without_final_catch_all_view(self): superuser = User.objects.create_user( username='staff', password='secret', email='[email protected]', is_staff=True, ) self.client.force_login(superuser) known_url = reverse('admin10:admin_views_article_changelist') response = self.client.get(known_url[:-1]) self.assertEqual(response.status_code, 404) # Outside admin. def test_non_admin_url_404_if_not_authenticated(self): unknown_url = '/unknown/' response = self.client.get(unknown_url) # Does not redirect to the admin login. self.assertEqual(response.status_code, 404)
2d4f1e2b17a93dc4a1c4b0cbdee22a8026e1e3a50a81352a3eea3586941db8e4
import datetime import pickle import sys import unittest from operator import attrgetter from threading import Lock from django.core.exceptions import EmptyResultSet, FieldError from django.db import DEFAULT_DB_ALIAS, connection from django.db.models import Count, Exists, F, OuterRef, Q from django.db.models.expressions import RawSQL from django.db.models.sql.constants import LOUTER from django.db.models.sql.where import NothingNode, WhereNode from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature from django.test.utils import CaptureQueriesContext from .models import ( FK1, Annotation, Article, Author, BaseA, Book, CategoryItem, CategoryRelationship, Celebrity, Channel, Chapter, Child, ChildObjectA, Classroom, CommonMixedCaseForeignKeys, Company, Cover, CustomPk, CustomPkTag, DateTimePK, Detail, DumbCategory, Eaten, Employment, ExtraInfo, Fan, Food, Identifier, Individual, Item, Job, JobResponsibilities, Join, LeafA, LeafB, LoopX, LoopZ, ManagedModel, Member, MixedCaseDbColumnCategoryItem, MixedCaseFieldCategoryItem, ModelA, ModelB, ModelC, ModelD, MyObject, NamedCategory, Node, Note, NullableName, Number, ObjectA, ObjectB, ObjectC, OneToOneCategory, Order, OrderItem, Page, Paragraph, Person, Plaything, PointerA, Program, ProxyCategory, ProxyObjectA, ProxyObjectB, Ranking, Related, RelatedIndividual, RelatedObject, Report, ReportComment, ReservedName, Responsibility, School, SharedConnection, SimpleCategory, SingleObject, SpecialCategory, Staff, StaffUser, Student, Tag, Task, Teacher, Ticket21203Child, Ticket21203Parent, Ticket23605A, Ticket23605B, Ticket23605C, TvChef, Valid, X, ) class Queries1Tests(TestCase): @classmethod def setUpTestData(cls): cls.nc1 = generic = NamedCategory.objects.create(name="Generic") cls.t1 = Tag.objects.create(name='t1', category=generic) cls.t2 = Tag.objects.create(name='t2', parent=cls.t1, category=generic) cls.t3 = Tag.objects.create(name='t3', parent=cls.t1) cls.t4 = Tag.objects.create(name='t4', parent=cls.t3) cls.t5 = Tag.objects.create(name='t5', parent=cls.t3) cls.n1 = Note.objects.create(note='n1', misc='foo', id=1) cls.n2 = Note.objects.create(note='n2', misc='bar', id=2) cls.n3 = Note.objects.create(note='n3', misc='foo', id=3, negate=False) cls.ann1 = Annotation.objects.create(name='a1', tag=cls.t1) cls.ann1.notes.add(cls.n1) ann2 = Annotation.objects.create(name='a2', tag=cls.t4) ann2.notes.add(cls.n2, cls.n3) # Create these out of order so that sorting by 'id' will be different to sorting # by 'info'. Helps detect some problems later. cls.e2 = ExtraInfo.objects.create(info='e2', note=cls.n2, value=41, filterable=False) e1 = ExtraInfo.objects.create(info='e1', note=cls.n1, value=42) cls.a1 = Author.objects.create(name='a1', num=1001, extra=e1) cls.a2 = Author.objects.create(name='a2', num=2002, extra=e1) cls.a3 = Author.objects.create(name='a3', num=3003, extra=cls.e2) cls.a4 = Author.objects.create(name='a4', num=4004, extra=cls.e2) cls.time1 = datetime.datetime(2007, 12, 19, 22, 25, 0) cls.time2 = datetime.datetime(2007, 12, 19, 21, 0, 0) time3 = datetime.datetime(2007, 12, 20, 22, 25, 0) time4 = datetime.datetime(2007, 12, 20, 21, 0, 0) cls.i1 = Item.objects.create(name='one', created=cls.time1, modified=cls.time1, creator=cls.a1, note=cls.n3) cls.i1.tags.set([cls.t1, cls.t2]) cls.i2 = Item.objects.create(name='two', created=cls.time2, creator=cls.a2, note=cls.n2) cls.i2.tags.set([cls.t1, cls.t3]) cls.i3 = Item.objects.create(name='three', created=time3, creator=cls.a2, note=cls.n3) cls.i4 = Item.objects.create(name='four', created=time4, creator=cls.a4, note=cls.n3) cls.i4.tags.set([cls.t4]) cls.r1 = Report.objects.create(name='r1', creator=cls.a1) cls.r2 = Report.objects.create(name='r2', creator=cls.a3) cls.r3 = Report.objects.create(name='r3') # Ordering by 'rank' gives us rank2, rank1, rank3. Ordering by the Meta.ordering # will be rank3, rank2, rank1. cls.rank1 = Ranking.objects.create(rank=2, author=cls.a2) cls.c1 = Cover.objects.create(title="first", item=cls.i4) cls.c2 = Cover.objects.create(title="second", item=cls.i2) def test_subquery_condition(self): qs1 = Tag.objects.filter(pk__lte=0) qs2 = Tag.objects.filter(parent__in=qs1) qs3 = Tag.objects.filter(parent__in=qs2) self.assertEqual(qs3.query.subq_aliases, {'T', 'U', 'V'}) self.assertIn('v0', str(qs3.query).lower()) qs4 = qs3.filter(parent__in=qs1) self.assertEqual(qs4.query.subq_aliases, {'T', 'U', 'V'}) # It is possible to reuse U for the second subquery, no need to use W. self.assertNotIn('w0', str(qs4.query).lower()) # So, 'U0."id"' is referenced in SELECT and WHERE twice. self.assertEqual(str(qs4.query).lower().count('u0.'), 4) def test_ticket1050(self): self.assertSequenceEqual( Item.objects.filter(tags__isnull=True), [self.i3], ) self.assertSequenceEqual( Item.objects.filter(tags__id__isnull=True), [self.i3], ) def test_ticket1801(self): self.assertSequenceEqual( Author.objects.filter(item=self.i2), [self.a2], ) self.assertSequenceEqual( Author.objects.filter(item=self.i3), [self.a2], ) self.assertSequenceEqual( Author.objects.filter(item=self.i2) & Author.objects.filter(item=self.i3), [self.a2], ) def test_ticket2306(self): # Checking that no join types are "left outer" joins. query = Item.objects.filter(tags=self.t2).query self.assertNotIn(LOUTER, [x.join_type for x in query.alias_map.values()]) self.assertSequenceEqual( Item.objects.filter(Q(tags=self.t1)).order_by('name'), [self.i1, self.i2], ) self.assertSequenceEqual( Item.objects.filter(Q(tags=self.t1)).filter(Q(tags=self.t2)), [self.i1], ) self.assertSequenceEqual( Item.objects.filter(Q(tags=self.t1)).filter(Q(creator__name='fred') | Q(tags=self.t2)), [self.i1], ) # Each filter call is processed "at once" against a single table, so this is # different from the previous example as it tries to find tags that are two # things at once (rather than two tags). self.assertSequenceEqual( Item.objects.filter(Q(tags=self.t1) & Q(tags=self.t2)), [] ) self.assertSequenceEqual( Item.objects.filter(Q(tags=self.t1), Q(creator__name='fred') | Q(tags=self.t2)), [] ) qs = Author.objects.filter(ranking__rank=2, ranking__id=self.rank1.id) self.assertSequenceEqual(list(qs), [self.a2]) self.assertEqual(2, qs.query.count_active_tables(), 2) qs = Author.objects.filter(ranking__rank=2).filter(ranking__id=self.rank1.id) self.assertEqual(qs.query.count_active_tables(), 3) def test_ticket4464(self): self.assertSequenceEqual( Item.objects.filter(tags=self.t1).filter(tags=self.t2), [self.i1], ) self.assertSequenceEqual( Item.objects.filter(tags__in=[self.t1, self.t2]).distinct().order_by('name'), [self.i1, self.i2], ) self.assertSequenceEqual( Item.objects.filter(tags__in=[self.t1, self.t2]).filter(tags=self.t3), [self.i2], ) # Make sure .distinct() works with slicing (this was broken in Oracle). self.assertSequenceEqual( Item.objects.filter(tags__in=[self.t1, self.t2]).order_by('name')[:3], [self.i1, self.i1, self.i2], ) self.assertSequenceEqual( Item.objects.filter(tags__in=[self.t1, self.t2]).distinct().order_by('name')[:3], [self.i1, self.i2], ) def test_tickets_2080_3592(self): self.assertSequenceEqual( Author.objects.filter(item__name='one') | Author.objects.filter(name='a3'), [self.a1, self.a3], ) self.assertSequenceEqual( Author.objects.filter(Q(item__name='one') | Q(name='a3')), [self.a1, self.a3], ) self.assertSequenceEqual( Author.objects.filter(Q(name='a3') | Q(item__name='one')), [self.a1, self.a3], ) self.assertSequenceEqual( Author.objects.filter(Q(item__name='three') | Q(report__name='r3')), [self.a2], ) def test_ticket6074(self): # Merging two empty result sets shouldn't leave a queryset with no constraints # (which would match everything). self.assertSequenceEqual(Author.objects.filter(Q(id__in=[])), []) self.assertSequenceEqual( Author.objects.filter(Q(id__in=[]) | Q(id__in=[])), [] ) def test_tickets_1878_2939(self): self.assertEqual(Item.objects.values('creator').distinct().count(), 3) # Create something with a duplicate 'name' so that we can test multi-column # cases (which require some tricky SQL transformations under the covers). xx = Item(name='four', created=self.time1, creator=self.a2, note=self.n1) xx.save() self.assertEqual( Item.objects.exclude(name='two').values('creator', 'name').distinct().count(), 4 ) self.assertEqual( ( Item.objects .exclude(name='two') .extra(select={'foo': '%s'}, select_params=(1,)) .values('creator', 'name', 'foo') .distinct() .count() ), 4 ) self.assertEqual( ( Item.objects .exclude(name='two') .extra(select={'foo': '%s'}, select_params=(1,)) .values('creator', 'name') .distinct() .count() ), 4 ) xx.delete() def test_ticket7323(self): self.assertEqual(Item.objects.values('creator', 'name').count(), 4) def test_ticket2253(self): q1 = Item.objects.order_by('name') q2 = Item.objects.filter(id=self.i1.id) self.assertSequenceEqual(q1, [self.i4, self.i1, self.i3, self.i2]) self.assertSequenceEqual(q2, [self.i1]) self.assertSequenceEqual( (q1 | q2).order_by('name'), [self.i4, self.i1, self.i3, self.i2], ) self.assertSequenceEqual((q1 & q2).order_by('name'), [self.i1]) q1 = Item.objects.filter(tags=self.t1) q2 = Item.objects.filter(note=self.n3, tags=self.t2) q3 = Item.objects.filter(creator=self.a4) self.assertSequenceEqual( ((q1 & q2) | q3).order_by('name'), [self.i4, self.i1], ) def test_order_by_tables(self): q1 = Item.objects.order_by('name') q2 = Item.objects.filter(id=self.i1.id) list(q2) combined_query = (q1 & q2).order_by('name').query self.assertEqual(len([ t for t in combined_query.alias_map if combined_query.alias_refcount[t] ]), 1) def test_order_by_join_unref(self): """ This test is related to the above one, testing that there aren't old JOINs in the query. """ qs = Celebrity.objects.order_by('greatest_fan__fan_of') self.assertIn('OUTER JOIN', str(qs.query)) qs = qs.order_by('id') self.assertNotIn('OUTER JOIN', str(qs.query)) def test_get_clears_ordering(self): """ get() should clear ordering for optimization purposes. """ with CaptureQueriesContext(connection) as captured_queries: Author.objects.order_by('name').get(pk=self.a1.pk) self.assertNotIn('order by', captured_queries[0]['sql'].lower()) def test_tickets_4088_4306(self): self.assertSequenceEqual(Report.objects.filter(creator=1001), [self.r1]) self.assertSequenceEqual( Report.objects.filter(creator__num=1001), [self.r1] ) self.assertSequenceEqual(Report.objects.filter(creator__id=1001), []) self.assertSequenceEqual( Report.objects.filter(creator__id=self.a1.id), [self.r1] ) self.assertSequenceEqual( Report.objects.filter(creator__name='a1'), [self.r1] ) def test_ticket4510(self): self.assertSequenceEqual( Author.objects.filter(report__name='r1'), [self.a1], ) def test_ticket7378(self): self.assertSequenceEqual(self.a1.report_set.all(), [self.r1]) def test_tickets_5324_6704(self): self.assertSequenceEqual( Item.objects.filter(tags__name='t4'), [self.i4], ) self.assertSequenceEqual( Item.objects.exclude(tags__name='t4').order_by('name').distinct(), [self.i1, self.i3, self.i2], ) self.assertSequenceEqual( Item.objects.exclude(tags__name='t4').order_by('name').distinct().reverse(), [self.i2, self.i3, self.i1], ) self.assertSequenceEqual( Author.objects.exclude(item__name='one').distinct().order_by('name'), [self.a2, self.a3, self.a4], ) # Excluding across a m2m relation when there is more than one related # object associated was problematic. self.assertSequenceEqual( Item.objects.exclude(tags__name='t1').order_by('name'), [self.i4, self.i3], ) self.assertSequenceEqual( Item.objects.exclude(tags__name='t1').exclude(tags__name='t4'), [self.i3], ) # Excluding from a relation that cannot be NULL should not use outer joins. query = Item.objects.exclude(creator__in=[self.a1, self.a2]).query self.assertNotIn(LOUTER, [x.join_type for x in query.alias_map.values()]) # Similarly, when one of the joins cannot possibly, ever, involve NULL # values (Author -> ExtraInfo, in the following), it should never be # promoted to a left outer join. So the following query should only # involve one "left outer" join (Author -> Item is 0-to-many). qs = Author.objects.filter(id=self.a1.id).filter(Q(extra__note=self.n1) | Q(item__note=self.n3)) self.assertEqual( len([ x for x in qs.query.alias_map.values() if x.join_type == LOUTER and qs.query.alias_refcount[x.table_alias] ]), 1 ) # The previous changes shouldn't affect nullable foreign key joins. self.assertSequenceEqual( Tag.objects.filter(parent__isnull=True).order_by('name'), [self.t1] ) self.assertSequenceEqual( Tag.objects.exclude(parent__isnull=True).order_by('name'), [self.t2, self.t3, self.t4, self.t5], ) self.assertSequenceEqual( Tag.objects.exclude(Q(parent__name='t1') | Q(parent__isnull=True)).order_by('name'), [self.t4, self.t5], ) self.assertSequenceEqual( Tag.objects.exclude(Q(parent__isnull=True) | Q(parent__name='t1')).order_by('name'), [self.t4, self.t5], ) self.assertSequenceEqual( Tag.objects.exclude(Q(parent__parent__isnull=True)).order_by('name'), [self.t4, self.t5], ) self.assertSequenceEqual( Tag.objects.filter(~Q(parent__parent__isnull=True)).order_by('name'), [self.t4, self.t5], ) def test_ticket2091(self): t = Tag.objects.get(name='t4') self.assertSequenceEqual(Item.objects.filter(tags__in=[t]), [self.i4]) def test_avoid_infinite_loop_on_too_many_subqueries(self): x = Tag.objects.filter(pk=1) local_recursion_limit = sys.getrecursionlimit() // 16 msg = 'Maximum recursion depth exceeded: too many subqueries.' with self.assertRaisesMessage(RecursionError, msg): for i in range(local_recursion_limit + 2): x = Tag.objects.filter(pk__in=x) def test_reasonable_number_of_subq_aliases(self): x = Tag.objects.filter(pk=1) for _ in range(20): x = Tag.objects.filter(pk__in=x) self.assertEqual( x.query.subq_aliases, { 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'AA', 'AB', 'AC', 'AD', 'AE', 'AF', 'AG', 'AH', 'AI', 'AJ', 'AK', 'AL', 'AM', 'AN', } ) def test_heterogeneous_qs_combination(self): # Combining querysets built on different models should behave in a well-defined # fashion. We raise an error. with self.assertRaisesMessage(AssertionError, 'Cannot combine queries on two different base models.'): Author.objects.all() & Tag.objects.all() with self.assertRaisesMessage(AssertionError, 'Cannot combine queries on two different base models.'): Author.objects.all() | Tag.objects.all() def test_ticket3141(self): self.assertEqual(Author.objects.extra(select={'foo': '1'}).count(), 4) self.assertEqual( Author.objects.extra(select={'foo': '%s'}, select_params=(1,)).count(), 4 ) def test_ticket2400(self): self.assertSequenceEqual( Author.objects.filter(item__isnull=True), [self.a3], ) self.assertSequenceEqual( Tag.objects.filter(item__isnull=True), [self.t5], ) def test_ticket2496(self): self.assertSequenceEqual( Item.objects.extra(tables=['queries_author']).select_related().order_by('name')[:1], [self.i4], ) def test_error_raised_on_filter_with_dictionary(self): with self.assertRaisesMessage(FieldError, 'Cannot parse keyword query as dict'): Note.objects.filter({'note': 'n1', 'misc': 'foo'}) def test_tickets_2076_7256(self): # Ordering on related tables should be possible, even if the table is # not otherwise involved. self.assertSequenceEqual( Item.objects.order_by('note__note', 'name'), [self.i2, self.i4, self.i1, self.i3], ) # Ordering on a related field should use the remote model's default # ordering as a final step. self.assertSequenceEqual( Author.objects.order_by('extra', '-name'), [self.a2, self.a1, self.a4, self.a3], ) # Using remote model default ordering can span multiple models (in this # case, Cover is ordered by Item's default, which uses Note's default). self.assertSequenceEqual(Cover.objects.all(), [self.c1, self.c2]) # If the remote model does not have a default ordering, we order by its 'id' # field. self.assertSequenceEqual( Item.objects.order_by('creator', 'name'), [self.i1, self.i3, self.i2, self.i4], ) # Ordering by a many-valued attribute (e.g. a many-to-many or reverse # ForeignKey) is legal, but the results might not make sense. That # isn't Django's problem. Garbage in, garbage out. self.assertSequenceEqual( Item.objects.filter(tags__isnull=False).order_by('tags', 'id'), [self.i1, self.i2, self.i1, self.i2, self.i4], ) # If we replace the default ordering, Django adjusts the required # tables automatically. Item normally requires a join with Note to do # the default ordering, but that isn't needed here. qs = Item.objects.order_by('name') self.assertSequenceEqual(qs, [self.i4, self.i1, self.i3, self.i2]) self.assertEqual(len(qs.query.alias_map), 1) def test_tickets_2874_3002(self): qs = Item.objects.select_related().order_by('note__note', 'name') self.assertQuerysetEqual(qs, [self.i2, self.i4, self.i1, self.i3]) # This is also a good select_related() test because there are multiple # Note entries in the SQL. The two Note items should be different. self.assertEqual(repr(qs[0].note), '<Note: n2>') self.assertEqual(repr(qs[0].creator.extra.note), '<Note: n1>') def test_ticket3037(self): self.assertSequenceEqual( Item.objects.filter(Q(creator__name='a3', name='two') | Q(creator__name='a4', name='four')), [self.i4], ) def test_tickets_5321_7070(self): # Ordering columns must be included in the output columns. Note that # this means results that might otherwise be distinct are not (if there # are multiple values in the ordering cols), as in this example. This # isn't a bug; it's a warning to be careful with the selection of # ordering columns. self.assertSequenceEqual( Note.objects.values('misc').distinct().order_by('note', '-misc'), [{'misc': 'foo'}, {'misc': 'bar'}, {'misc': 'foo'}] ) def test_ticket4358(self): # If you don't pass any fields to values(), relation fields are # returned as "foo_id" keys, not "foo". For consistency, you should be # able to pass "foo_id" in the fields list and have it work, too. We # actually allow both "foo" and "foo_id". # The *_id version is returned by default. self.assertIn('note_id', ExtraInfo.objects.values()[0]) # You can also pass it in explicitly. self.assertSequenceEqual(ExtraInfo.objects.values('note_id'), [{'note_id': 1}, {'note_id': 2}]) # ...or use the field name. self.assertSequenceEqual(ExtraInfo.objects.values('note'), [{'note': 1}, {'note': 2}]) def test_ticket6154(self): # Multiple filter statements are joined using "AND" all the time. self.assertSequenceEqual( Author.objects.filter(id=self.a1.id).filter(Q(extra__note=self.n1) | Q(item__note=self.n3)), [self.a1], ) self.assertSequenceEqual( Author.objects.filter(Q(extra__note=self.n1) | Q(item__note=self.n3)).filter(id=self.a1.id), [self.a1], ) def test_ticket6981(self): self.assertSequenceEqual( Tag.objects.select_related('parent').order_by('name'), [self.t1, self.t2, self.t3, self.t4, self.t5], ) def test_ticket9926(self): self.assertSequenceEqual( Tag.objects.select_related("parent", "category").order_by('name'), [self.t1, self.t2, self.t3, self.t4, self.t5], ) self.assertSequenceEqual( Tag.objects.select_related('parent', "parent__category").order_by('name'), [self.t1, self.t2, self.t3, self.t4, self.t5], ) def test_tickets_6180_6203(self): # Dates with limits and/or counts self.assertEqual(Item.objects.count(), 4) self.assertEqual(Item.objects.datetimes('created', 'month').count(), 1) self.assertEqual(Item.objects.datetimes('created', 'day').count(), 2) self.assertEqual(len(Item.objects.datetimes('created', 'day')), 2) self.assertEqual(Item.objects.datetimes('created', 'day')[0], datetime.datetime(2007, 12, 19, 0, 0)) def test_tickets_7087_12242(self): # Dates with extra select columns self.assertSequenceEqual( Item.objects.datetimes('created', 'day').extra(select={'a': 1}), [datetime.datetime(2007, 12, 19, 0, 0), datetime.datetime(2007, 12, 20, 0, 0)], ) self.assertSequenceEqual( Item.objects.extra(select={'a': 1}).datetimes('created', 'day'), [datetime.datetime(2007, 12, 19, 0, 0), datetime.datetime(2007, 12, 20, 0, 0)], ) name = "one" self.assertSequenceEqual( Item.objects.datetimes('created', 'day').extra(where=['name=%s'], params=[name]), [datetime.datetime(2007, 12, 19, 0, 0)], ) self.assertSequenceEqual( Item.objects.extra(where=['name=%s'], params=[name]).datetimes('created', 'day'), [datetime.datetime(2007, 12, 19, 0, 0)], ) def test_ticket7155(self): # Nullable dates self.assertSequenceEqual( Item.objects.datetimes('modified', 'day'), [datetime.datetime(2007, 12, 19, 0, 0)], ) def test_order_by_rawsql(self): self.assertSequenceEqual( Item.objects.values('note__note').order_by( RawSQL('queries_note.note', ()), 'id', ), [ {'note__note': 'n2'}, {'note__note': 'n3'}, {'note__note': 'n3'}, {'note__note': 'n3'}, ], ) def test_ticket7096(self): # Make sure exclude() with multiple conditions continues to work. self.assertSequenceEqual( Tag.objects.filter(parent=self.t1, name='t3').order_by('name'), [self.t3], ) self.assertSequenceEqual( Tag.objects.exclude(parent=self.t1, name='t3').order_by('name'), [self.t1, self.t2, self.t4, self.t5], ) self.assertSequenceEqual( Item.objects.exclude(tags__name='t1', name='one').order_by('name').distinct(), [self.i4, self.i3, self.i2], ) self.assertSequenceEqual( Item.objects.filter(name__in=['three', 'four']).exclude(tags__name='t1').order_by('name'), [self.i4, self.i3], ) # More twisted cases, involving nested negations. self.assertSequenceEqual( Item.objects.exclude(~Q(tags__name='t1', name='one')), [self.i1], ) self.assertSequenceEqual( Item.objects.filter(~Q(tags__name='t1', name='one'), name='two'), [self.i2], ) self.assertSequenceEqual( Item.objects.exclude(~Q(tags__name='t1', name='one'), name='two'), [self.i4, self.i1, self.i3], ) def test_tickets_7204_7506(self): # Make sure querysets with related fields can be pickled. If this # doesn't crash, it's a Good Thing. pickle.dumps(Item.objects.all()) def test_ticket7813(self): # We should also be able to pickle things that use select_related(). # The only tricky thing here is to ensure that we do the related # selections properly after unpickling. qs = Item.objects.select_related() query = qs.query.get_compiler(qs.db).as_sql()[0] query2 = pickle.loads(pickle.dumps(qs.query)) self.assertEqual( query2.get_compiler(qs.db).as_sql()[0], query ) def test_deferred_load_qs_pickling(self): # Check pickling of deferred-loading querysets qs = Item.objects.defer('name', 'creator') q2 = pickle.loads(pickle.dumps(qs)) self.assertEqual(list(qs), list(q2)) q3 = pickle.loads(pickle.dumps(qs, pickle.HIGHEST_PROTOCOL)) self.assertEqual(list(qs), list(q3)) def test_ticket7277(self): self.assertSequenceEqual( self.n1.annotation_set.filter( Q(tag=self.t5) | Q(tag__children=self.t5) | Q(tag__children__children=self.t5) ), [self.ann1], ) def test_tickets_7448_7707(self): # Complex objects should be converted to strings before being used in # lookups. self.assertSequenceEqual( Item.objects.filter(created__in=[self.time1, self.time2]), [self.i1, self.i2], ) def test_ticket7235(self): # An EmptyQuerySet should not raise exceptions if it is filtered. Eaten.objects.create(meal='m') q = Eaten.objects.none() with self.assertNumQueries(0): self.assertQuerysetEqual(q.all(), []) self.assertQuerysetEqual(q.filter(meal='m'), []) self.assertQuerysetEqual(q.exclude(meal='m'), []) self.assertQuerysetEqual(q.complex_filter({'pk': 1}), []) self.assertQuerysetEqual(q.select_related('food'), []) self.assertQuerysetEqual(q.annotate(Count('food')), []) self.assertQuerysetEqual(q.order_by('meal', 'food'), []) self.assertQuerysetEqual(q.distinct(), []) self.assertQuerysetEqual( q.extra(select={'foo': "1"}), [] ) self.assertQuerysetEqual(q.reverse(), []) q.query.low_mark = 1 msg = 'Cannot change a query once a slice has been taken.' with self.assertRaisesMessage(TypeError, msg): q.extra(select={'foo': "1"}) self.assertQuerysetEqual(q.defer('meal'), []) self.assertQuerysetEqual(q.only('meal'), []) def test_ticket7791(self): # There were "issues" when ordering and distinct-ing on fields related # via ForeignKeys. self.assertEqual( len(Note.objects.order_by('extrainfo__info').distinct()), 3 ) # Pickling of QuerySets using datetimes() should work. qs = Item.objects.datetimes('created', 'month') pickle.loads(pickle.dumps(qs)) def test_ticket9997(self): # If a ValuesList or Values queryset is passed as an inner query, we # make sure it's only requesting a single value and use that as the # thing to select. self.assertSequenceEqual( Tag.objects.filter(name__in=Tag.objects.filter(parent=self.t1).values('name')), [self.t2, self.t3], ) # Multi-valued values() and values_list() querysets should raise errors. with self.assertRaisesMessage(TypeError, 'Cannot use multi-field values as a filter value.'): Tag.objects.filter(name__in=Tag.objects.filter(parent=self.t1).values('name', 'id')) with self.assertRaisesMessage(TypeError, 'Cannot use multi-field values as a filter value.'): Tag.objects.filter(name__in=Tag.objects.filter(parent=self.t1).values_list('name', 'id')) def test_ticket9985(self): # qs.values_list(...).values(...) combinations should work. self.assertSequenceEqual( Note.objects.values_list("note", flat=True).values("id").order_by("id"), [{'id': 1}, {'id': 2}, {'id': 3}] ) self.assertSequenceEqual( Annotation.objects.filter(notes__in=Note.objects.filter(note="n1").values_list('note').values('id')), [self.ann1], ) def test_ticket10205(self): # When bailing out early because of an empty "__in" filter, we need # to set things up correctly internally so that subqueries can continue properly. self.assertEqual(Tag.objects.filter(name__in=()).update(name="foo"), 0) def test_ticket10432(self): # Testing an empty "__in" filter with a generator as the value. def f(): return iter([]) n_obj = Note.objects.all()[0] def g(): yield n_obj.pk self.assertQuerysetEqual(Note.objects.filter(pk__in=f()), []) self.assertEqual(list(Note.objects.filter(pk__in=g())), [n_obj]) def test_ticket10742(self): # Queries used in an __in clause don't execute subqueries subq = Author.objects.filter(num__lt=3000) qs = Author.objects.filter(pk__in=subq) self.assertSequenceEqual(qs, [self.a1, self.a2]) # The subquery result cache should not be populated self.assertIsNone(subq._result_cache) subq = Author.objects.filter(num__lt=3000) qs = Author.objects.exclude(pk__in=subq) self.assertSequenceEqual(qs, [self.a3, self.a4]) # The subquery result cache should not be populated self.assertIsNone(subq._result_cache) subq = Author.objects.filter(num__lt=3000) self.assertSequenceEqual( Author.objects.filter(Q(pk__in=subq) & Q(name='a1')), [self.a1], ) # The subquery result cache should not be populated self.assertIsNone(subq._result_cache) def test_ticket7076(self): # Excluding shouldn't eliminate NULL entries. self.assertSequenceEqual( Item.objects.exclude(modified=self.time1).order_by('name'), [self.i4, self.i3, self.i2], ) self.assertSequenceEqual( Tag.objects.exclude(parent__name=self.t1.name), [self.t1, self.t4, self.t5], ) def test_ticket7181(self): # Ordering by related tables should accommodate nullable fields (this # test is a little tricky, since NULL ordering is database dependent. # Instead, we just count the number of results). self.assertEqual(len(Tag.objects.order_by('parent__name')), 5) # Empty querysets can be merged with others. self.assertSequenceEqual( Note.objects.none() | Note.objects.all(), [self.n1, self.n2, self.n3], ) self.assertSequenceEqual( Note.objects.all() | Note.objects.none(), [self.n1, self.n2, self.n3], ) self.assertSequenceEqual(Note.objects.none() & Note.objects.all(), []) self.assertSequenceEqual(Note.objects.all() & Note.objects.none(), []) def test_ticket8439(self): # Complex combinations of conjunctions, disjunctions and nullable # relations. self.assertSequenceEqual( Author.objects.filter(Q(item__note__extrainfo=self.e2) | Q(report=self.r1, name='xyz')), [self.a2], ) self.assertSequenceEqual( Author.objects.filter(Q(report=self.r1, name='xyz') | Q(item__note__extrainfo=self.e2)), [self.a2], ) self.assertSequenceEqual( Annotation.objects.filter(Q(tag__parent=self.t1) | Q(notes__note='n1', name='a1')), [self.ann1], ) xx = ExtraInfo.objects.create(info='xx', note=self.n3) self.assertSequenceEqual( Note.objects.filter(Q(extrainfo__author=self.a1) | Q(extrainfo=xx)), [self.n1, self.n3], ) q = Note.objects.filter(Q(extrainfo__author=self.a1) | Q(extrainfo=xx)).query self.assertEqual( len([x for x in q.alias_map.values() if x.join_type == LOUTER and q.alias_refcount[x.table_alias]]), 1 ) def test_ticket17429(self): """ Meta.ordering=None works the same as Meta.ordering=[] """ original_ordering = Tag._meta.ordering Tag._meta.ordering = None try: self.assertCountEqual( Tag.objects.all(), [self.t1, self.t2, self.t3, self.t4, self.t5], ) finally: Tag._meta.ordering = original_ordering def test_exclude(self): self.assertQuerysetEqual( Item.objects.exclude(tags__name='t4'), Item.objects.filter(~Q(tags__name='t4'))) self.assertQuerysetEqual( Item.objects.exclude(Q(tags__name='t4') | Q(tags__name='t3')), Item.objects.filter(~(Q(tags__name='t4') | Q(tags__name='t3')))) self.assertQuerysetEqual( Item.objects.exclude(Q(tags__name='t4') | ~Q(tags__name='t3')), Item.objects.filter(~(Q(tags__name='t4') | ~Q(tags__name='t3')))) def test_nested_exclude(self): self.assertQuerysetEqual( Item.objects.exclude(~Q(tags__name='t4')), Item.objects.filter(~~Q(tags__name='t4'))) def test_double_exclude(self): self.assertQuerysetEqual( Item.objects.filter(Q(tags__name='t4')), Item.objects.filter(~~Q(tags__name='t4'))) self.assertQuerysetEqual( Item.objects.filter(Q(tags__name='t4')), Item.objects.filter(~Q(~Q(tags__name='t4')))) def test_exclude_in(self): self.assertQuerysetEqual( Item.objects.exclude(Q(tags__name__in=['t4', 't3'])), Item.objects.filter(~Q(tags__name__in=['t4', 't3']))) self.assertQuerysetEqual( Item.objects.filter(Q(tags__name__in=['t4', 't3'])), Item.objects.filter(~~Q(tags__name__in=['t4', 't3']))) def test_ticket_10790_1(self): # Querying direct fields with isnull should trim the left outer join. # It also should not create INNER JOIN. q = Tag.objects.filter(parent__isnull=True) self.assertSequenceEqual(q, [self.t1]) self.assertNotIn('JOIN', str(q.query)) q = Tag.objects.filter(parent__isnull=False) self.assertSequenceEqual(q, [self.t2, self.t3, self.t4, self.t5]) self.assertNotIn('JOIN', str(q.query)) q = Tag.objects.exclude(parent__isnull=True) self.assertSequenceEqual(q, [self.t2, self.t3, self.t4, self.t5]) self.assertNotIn('JOIN', str(q.query)) q = Tag.objects.exclude(parent__isnull=False) self.assertSequenceEqual(q, [self.t1]) self.assertNotIn('JOIN', str(q.query)) q = Tag.objects.exclude(parent__parent__isnull=False) self.assertSequenceEqual(q, [self.t1, self.t2, self.t3]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 1) self.assertNotIn('INNER JOIN', str(q.query)) def test_ticket_10790_2(self): # Querying across several tables should strip only the last outer join, # while preserving the preceding inner joins. q = Tag.objects.filter(parent__parent__isnull=False) self.assertSequenceEqual(q, [self.t4, self.t5]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q.query).count('INNER JOIN'), 1) # Querying without isnull should not convert anything to left outer join. q = Tag.objects.filter(parent__parent=self.t1) self.assertSequenceEqual(q, [self.t4, self.t5]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q.query).count('INNER JOIN'), 1) def test_ticket_10790_3(self): # Querying via indirect fields should populate the left outer join q = NamedCategory.objects.filter(tag__isnull=True) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 1) # join to dumbcategory ptr_id self.assertEqual(str(q.query).count('INNER JOIN'), 1) self.assertSequenceEqual(q, []) # Querying across several tables should strip only the last join, while # preserving the preceding left outer joins. q = NamedCategory.objects.filter(tag__parent__isnull=True) self.assertEqual(str(q.query).count('INNER JOIN'), 1) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 1) self.assertSequenceEqual(q, [self.nc1]) def test_ticket_10790_4(self): # Querying across m2m field should not strip the m2m table from join. q = Author.objects.filter(item__tags__isnull=True) self.assertSequenceEqual(q, [self.a2, self.a3]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 2) self.assertNotIn('INNER JOIN', str(q.query)) q = Author.objects.filter(item__tags__parent__isnull=True) self.assertSequenceEqual(q, [self.a1, self.a2, self.a2, self.a3]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 3) self.assertNotIn('INNER JOIN', str(q.query)) def test_ticket_10790_5(self): # Querying with isnull=False across m2m field should not create outer joins q = Author.objects.filter(item__tags__isnull=False) self.assertSequenceEqual(q, [self.a1, self.a1, self.a2, self.a2, self.a4]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q.query).count('INNER JOIN'), 2) q = Author.objects.filter(item__tags__parent__isnull=False) self.assertSequenceEqual(q, [self.a1, self.a2, self.a4]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q.query).count('INNER JOIN'), 3) q = Author.objects.filter(item__tags__parent__parent__isnull=False) self.assertSequenceEqual(q, [self.a4]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q.query).count('INNER JOIN'), 4) def test_ticket_10790_6(self): # Querying with isnull=True across m2m field should not create inner joins # and strip last outer join q = Author.objects.filter(item__tags__parent__parent__isnull=True) self.assertSequenceEqual( q, [self.a1, self.a1, self.a2, self.a2, self.a2, self.a3], ) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 4) self.assertEqual(str(q.query).count('INNER JOIN'), 0) q = Author.objects.filter(item__tags__parent__isnull=True) self.assertSequenceEqual(q, [self.a1, self.a2, self.a2, self.a3]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 3) self.assertEqual(str(q.query).count('INNER JOIN'), 0) def test_ticket_10790_7(self): # Reverse querying with isnull should not strip the join q = Author.objects.filter(item__isnull=True) self.assertSequenceEqual(q, [self.a3]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 1) self.assertEqual(str(q.query).count('INNER JOIN'), 0) q = Author.objects.filter(item__isnull=False) self.assertSequenceEqual(q, [self.a1, self.a2, self.a2, self.a4]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q.query).count('INNER JOIN'), 1) def test_ticket_10790_8(self): # Querying with combined q-objects should also strip the left outer join q = Tag.objects.filter(Q(parent__isnull=True) | Q(parent=self.t1)) self.assertSequenceEqual(q, [self.t1, self.t2, self.t3]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q.query).count('INNER JOIN'), 0) def test_ticket_10790_combine(self): # Combining queries should not re-populate the left outer join q1 = Tag.objects.filter(parent__isnull=True) q2 = Tag.objects.filter(parent__isnull=False) q3 = q1 | q2 self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3, self.t4, self.t5]) self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q3.query).count('INNER JOIN'), 0) q3 = q1 & q2 self.assertSequenceEqual(q3, []) self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q3.query).count('INNER JOIN'), 0) q2 = Tag.objects.filter(parent=self.t1) q3 = q1 | q2 self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3]) self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q3.query).count('INNER JOIN'), 0) q3 = q2 | q1 self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3]) self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q3.query).count('INNER JOIN'), 0) q1 = Tag.objects.filter(parent__isnull=True) q2 = Tag.objects.filter(parent__parent__isnull=True) q3 = q1 | q2 self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3]) self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 1) self.assertEqual(str(q3.query).count('INNER JOIN'), 0) q3 = q2 | q1 self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3]) self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 1) self.assertEqual(str(q3.query).count('INNER JOIN'), 0) def test_ticket19672(self): self.assertSequenceEqual( Report.objects.filter(Q(creator__isnull=False) & ~Q(creator__extra__value=41)), [self.r1], ) def test_ticket_20250(self): # A negated Q along with an annotated queryset failed in Django 1.4 qs = Author.objects.annotate(Count('item')) qs = qs.filter(~Q(extra__value=0)).order_by('name') self.assertIn('SELECT', str(qs.query)) self.assertSequenceEqual(qs, [self.a1, self.a2, self.a3, self.a4]) def test_lookup_constraint_fielderror(self): msg = ( "Cannot resolve keyword 'unknown_field' into field. Choices are: " "annotation, category, category_id, children, id, item, " "managedmodel, name, note, parent, parent_id" ) with self.assertRaisesMessage(FieldError, msg): Tag.objects.filter(unknown_field__name='generic') def test_common_mixed_case_foreign_keys(self): """ Valid query should be generated when fields fetched from joined tables include FKs whose names only differ by case. """ c1 = SimpleCategory.objects.create(name='c1') c2 = SimpleCategory.objects.create(name='c2') c3 = SimpleCategory.objects.create(name='c3') category = CategoryItem.objects.create(category=c1) mixed_case_field_category = MixedCaseFieldCategoryItem.objects.create(CaTeGoRy=c2) mixed_case_db_column_category = MixedCaseDbColumnCategoryItem.objects.create(category=c3) CommonMixedCaseForeignKeys.objects.create( category=category, mixed_case_field_category=mixed_case_field_category, mixed_case_db_column_category=mixed_case_db_column_category, ) qs = CommonMixedCaseForeignKeys.objects.values( 'category', 'mixed_case_field_category', 'mixed_case_db_column_category', 'category__category', 'mixed_case_field_category__CaTeGoRy', 'mixed_case_db_column_category__category', ) self.assertTrue(qs.first()) def test_excluded_intermediary_m2m_table_joined(self): self.assertSequenceEqual( Note.objects.filter(~Q(tag__annotation__name=F('note'))), [self.n1, self.n2, self.n3], ) self.assertSequenceEqual( Note.objects.filter(tag__annotation__name='a1').filter(~Q(tag__annotation__name=F('note'))), [], ) def test_field_with_filterable(self): self.assertSequenceEqual( Author.objects.filter(extra=self.e2), [self.a3, self.a4], ) def test_negate_field(self): self.assertSequenceEqual( Note.objects.filter(negate=True), [self.n1, self.n2], ) self.assertSequenceEqual(Note.objects.exclude(negate=True), [self.n3]) class Queries2Tests(TestCase): @classmethod def setUpTestData(cls): cls.num4 = Number.objects.create(num=4) cls.num8 = Number.objects.create(num=8) cls.num12 = Number.objects.create(num=12) def test_ticket4289(self): # A slight variation on the restricting the filtering choices by the # lookup constraints. self.assertSequenceEqual(Number.objects.filter(num__lt=4), []) self.assertSequenceEqual(Number.objects.filter(num__gt=8, num__lt=12), []) self.assertSequenceEqual( Number.objects.filter(num__gt=8, num__lt=13), [self.num12], ) self.assertSequenceEqual( Number.objects.filter(Q(num__lt=4) | Q(num__gt=8, num__lt=12)), [] ) self.assertSequenceEqual( Number.objects.filter(Q(num__gt=8, num__lt=12) | Q(num__lt=4)), [] ) self.assertSequenceEqual( Number.objects.filter(Q(num__gt=8) & Q(num__lt=12) | Q(num__lt=4)), [] ) self.assertSequenceEqual( Number.objects.filter(Q(num__gt=7) & Q(num__lt=12) | Q(num__lt=4)), [self.num8], ) def test_ticket12239(self): # Custom lookups are registered to round float values correctly on gte # and lt IntegerField queries. self.assertSequenceEqual( Number.objects.filter(num__gt=11.9), [self.num12], ) self.assertSequenceEqual(Number.objects.filter(num__gt=12), []) self.assertSequenceEqual(Number.objects.filter(num__gt=12.0), []) self.assertSequenceEqual(Number.objects.filter(num__gt=12.1), []) self.assertCountEqual( Number.objects.filter(num__lt=12), [self.num4, self.num8], ) self.assertCountEqual( Number.objects.filter(num__lt=12.0), [self.num4, self.num8], ) self.assertCountEqual( Number.objects.filter(num__lt=12.1), [self.num4, self.num8, self.num12], ) self.assertCountEqual( Number.objects.filter(num__gte=11.9), [self.num12], ) self.assertCountEqual( Number.objects.filter(num__gte=12), [self.num12], ) self.assertCountEqual( Number.objects.filter(num__gte=12.0), [self.num12], ) self.assertSequenceEqual(Number.objects.filter(num__gte=12.1), []) self.assertSequenceEqual(Number.objects.filter(num__gte=12.9), []) self.assertCountEqual( Number.objects.filter(num__lte=11.9), [self.num4, self.num8], ) self.assertCountEqual( Number.objects.filter(num__lte=12), [self.num4, self.num8, self.num12], ) self.assertCountEqual( Number.objects.filter(num__lte=12.0), [self.num4, self.num8, self.num12], ) self.assertCountEqual( Number.objects.filter(num__lte=12.1), [self.num4, self.num8, self.num12], ) self.assertCountEqual( Number.objects.filter(num__lte=12.9), [self.num4, self.num8, self.num12], ) def test_ticket7759(self): # Count should work with a partially read result set. count = Number.objects.count() qs = Number.objects.all() def run(): for obj in qs: return qs.count() == count self.assertTrue(run()) class Queries3Tests(TestCase): def test_ticket7107(self): # This shouldn't create an infinite loop. self.assertQuerysetEqual(Valid.objects.all(), []) def test_ticket8683(self): # An error should be raised when QuerySet.datetimes() is passed the # wrong type of field. with self.assertRaisesMessage(AssertionError, "'name' isn't a DateField, TimeField, or DateTimeField."): Item.objects.datetimes('name', 'month') def test_ticket22023(self): with self.assertRaisesMessage(TypeError, "Cannot call only() after .values() or .values_list()"): Valid.objects.values().only() with self.assertRaisesMessage(TypeError, "Cannot call defer() after .values() or .values_list()"): Valid.objects.values().defer() class Queries4Tests(TestCase): @classmethod def setUpTestData(cls): generic = NamedCategory.objects.create(name="Generic") cls.t1 = Tag.objects.create(name='t1', category=generic) n1 = Note.objects.create(note='n1', misc='foo') n2 = Note.objects.create(note='n2', misc='bar') e1 = ExtraInfo.objects.create(info='e1', note=n1) e2 = ExtraInfo.objects.create(info='e2', note=n2) cls.a1 = Author.objects.create(name='a1', num=1001, extra=e1) cls.a3 = Author.objects.create(name='a3', num=3003, extra=e2) cls.r1 = Report.objects.create(name='r1', creator=cls.a1) cls.r2 = Report.objects.create(name='r2', creator=cls.a3) cls.r3 = Report.objects.create(name='r3') cls.i1 = Item.objects.create(name='i1', created=datetime.datetime.now(), note=n1, creator=cls.a1) cls.i2 = Item.objects.create(name='i2', created=datetime.datetime.now(), note=n1, creator=cls.a3) def test_ticket24525(self): tag = Tag.objects.create() anth100 = tag.note_set.create(note='ANTH', misc='100') math101 = tag.note_set.create(note='MATH', misc='101') s1 = tag.annotation_set.create(name='1') s2 = tag.annotation_set.create(name='2') s1.notes.set([math101, anth100]) s2.notes.set([math101]) result = math101.annotation_set.all() & tag.annotation_set.exclude(notes__in=[anth100]) self.assertEqual(list(result), [s2]) def test_ticket11811(self): unsaved_category = NamedCategory(name="Other") msg = 'Unsaved model instance <NamedCategory: Other> cannot be used in an ORM query.' with self.assertRaisesMessage(ValueError, msg): Tag.objects.filter(pk=self.t1.pk).update(category=unsaved_category) def test_ticket14876(self): # Note: when combining the query we need to have information available # about the join type of the trimmed "creator__isnull" join. If we # don't have that information, then the join is created as INNER JOIN # and results will be incorrect. q1 = Report.objects.filter(Q(creator__isnull=True) | Q(creator__extra__info='e1')) q2 = Report.objects.filter(Q(creator__isnull=True)) | Report.objects.filter(Q(creator__extra__info='e1')) self.assertCountEqual(q1, [self.r1, self.r3]) self.assertEqual(str(q1.query), str(q2.query)) q1 = Report.objects.filter(Q(creator__extra__info='e1') | Q(creator__isnull=True)) q2 = Report.objects.filter(Q(creator__extra__info='e1')) | Report.objects.filter(Q(creator__isnull=True)) self.assertCountEqual(q1, [self.r1, self.r3]) self.assertEqual(str(q1.query), str(q2.query)) q1 = Item.objects.filter(Q(creator=self.a1) | Q(creator__report__name='r1')).order_by() q2 = ( Item.objects .filter(Q(creator=self.a1)).order_by() | Item.objects.filter(Q(creator__report__name='r1')) .order_by() ) self.assertCountEqual(q1, [self.i1]) self.assertEqual(str(q1.query), str(q2.query)) q1 = Item.objects.filter(Q(creator__report__name='e1') | Q(creator=self.a1)).order_by() q2 = ( Item.objects.filter(Q(creator__report__name='e1')).order_by() | Item.objects.filter(Q(creator=self.a1)).order_by() ) self.assertCountEqual(q1, [self.i1]) self.assertEqual(str(q1.query), str(q2.query)) def test_combine_join_reuse(self): # Joins having identical connections are correctly recreated in the # rhs query, in case the query is ORed together (#18748). Report.objects.create(name='r4', creator=self.a1) q1 = Author.objects.filter(report__name='r5') q2 = Author.objects.filter(report__name='r4').filter(report__name='r1') combined = q1 | q2 self.assertEqual(str(combined.query).count('JOIN'), 2) self.assertEqual(len(combined), 1) self.assertEqual(combined[0].name, 'a1') def test_join_reuse_order(self): # Join aliases are reused in order. This shouldn't raise AssertionError # because change_map contains a circular reference (#26522). s1 = School.objects.create() s2 = School.objects.create() s3 = School.objects.create() t1 = Teacher.objects.create() otherteachers = Teacher.objects.exclude(pk=t1.pk).exclude(friends=t1) qs1 = otherteachers.filter(schools=s1).filter(schools=s2) qs2 = otherteachers.filter(schools=s1).filter(schools=s3) self.assertQuerysetEqual(qs1 | qs2, []) def test_ticket7095(self): # Updates that are filtered on the model being updated are somewhat # tricky in MySQL. ManagedModel.objects.create(data='mm1', tag=self.t1, public=True) self.assertEqual(ManagedModel.objects.update(data='mm'), 1) # A values() or values_list() query across joined models must use outer # joins appropriately. # Note: In Oracle, we expect a null CharField to return '' instead of # None. if connection.features.interprets_empty_strings_as_nulls: expected_null_charfield_repr = '' else: expected_null_charfield_repr = None self.assertSequenceEqual( Report.objects.values_list("creator__extra__info", flat=True).order_by("name"), ['e1', 'e2', expected_null_charfield_repr], ) # Similarly for select_related(), joins beyond an initial nullable join # must use outer joins so that all results are included. self.assertSequenceEqual( Report.objects.select_related("creator", "creator__extra").order_by("name"), [self.r1, self.r2, self.r3] ) # When there are multiple paths to a table from another table, we have # to be careful not to accidentally reuse an inappropriate join when # using select_related(). We used to return the parent's Detail record # here by mistake. d1 = Detail.objects.create(data="d1") d2 = Detail.objects.create(data="d2") m1 = Member.objects.create(name="m1", details=d1) m2 = Member.objects.create(name="m2", details=d2) Child.objects.create(person=m2, parent=m1) obj = m1.children.select_related("person__details")[0] self.assertEqual(obj.person.details.data, 'd2') def test_order_by_resetting(self): # Calling order_by() with no parameters removes any existing ordering on the # model. But it should still be possible to add new ordering after that. qs = Author.objects.order_by().order_by('name') self.assertIn('ORDER BY', qs.query.get_compiler(qs.db).as_sql()[0]) def test_order_by_reverse_fk(self): # It is possible to order by reverse of foreign key, although that can lead # to duplicate results. c1 = SimpleCategory.objects.create(name="category1") c2 = SimpleCategory.objects.create(name="category2") CategoryItem.objects.create(category=c1) CategoryItem.objects.create(category=c2) CategoryItem.objects.create(category=c1) self.assertSequenceEqual(SimpleCategory.objects.order_by('categoryitem', 'pk'), [c1, c2, c1]) def test_filter_reverse_non_integer_pk(self): date_obj = DateTimePK.objects.create() extra_obj = ExtraInfo.objects.create(info='extra', date=date_obj) self.assertEqual( DateTimePK.objects.filter(extrainfo=extra_obj).get(), date_obj, ) def test_ticket10181(self): # Avoid raising an EmptyResultSet if an inner query is probably # empty (and hence, not executed). self.assertQuerysetEqual( Tag.objects.filter(id__in=Tag.objects.filter(id__in=[])), [] ) def test_ticket15316_filter_false(self): c1 = SimpleCategory.objects.create(name="category1") c2 = SpecialCategory.objects.create(name="named category1", special_name="special1") c3 = SpecialCategory.objects.create(name="named category2", special_name="special2") CategoryItem.objects.create(category=c1) ci2 = CategoryItem.objects.create(category=c2) ci3 = CategoryItem.objects.create(category=c3) qs = CategoryItem.objects.filter(category__specialcategory__isnull=False) self.assertEqual(qs.count(), 2) self.assertSequenceEqual(qs, [ci2, ci3]) def test_ticket15316_exclude_false(self): c1 = SimpleCategory.objects.create(name="category1") c2 = SpecialCategory.objects.create(name="named category1", special_name="special1") c3 = SpecialCategory.objects.create(name="named category2", special_name="special2") ci1 = CategoryItem.objects.create(category=c1) CategoryItem.objects.create(category=c2) CategoryItem.objects.create(category=c3) qs = CategoryItem.objects.exclude(category__specialcategory__isnull=False) self.assertEqual(qs.count(), 1) self.assertSequenceEqual(qs, [ci1]) def test_ticket15316_filter_true(self): c1 = SimpleCategory.objects.create(name="category1") c2 = SpecialCategory.objects.create(name="named category1", special_name="special1") c3 = SpecialCategory.objects.create(name="named category2", special_name="special2") ci1 = CategoryItem.objects.create(category=c1) CategoryItem.objects.create(category=c2) CategoryItem.objects.create(category=c3) qs = CategoryItem.objects.filter(category__specialcategory__isnull=True) self.assertEqual(qs.count(), 1) self.assertSequenceEqual(qs, [ci1]) def test_ticket15316_exclude_true(self): c1 = SimpleCategory.objects.create(name="category1") c2 = SpecialCategory.objects.create(name="named category1", special_name="special1") c3 = SpecialCategory.objects.create(name="named category2", special_name="special2") CategoryItem.objects.create(category=c1) ci2 = CategoryItem.objects.create(category=c2) ci3 = CategoryItem.objects.create(category=c3) qs = CategoryItem.objects.exclude(category__specialcategory__isnull=True) self.assertEqual(qs.count(), 2) self.assertSequenceEqual(qs, [ci2, ci3]) def test_ticket15316_one2one_filter_false(self): c = SimpleCategory.objects.create(name="cat") c0 = SimpleCategory.objects.create(name="cat0") c1 = SimpleCategory.objects.create(name="category1") OneToOneCategory.objects.create(category=c1, new_name="new1") OneToOneCategory.objects.create(category=c0, new_name="new2") CategoryItem.objects.create(category=c) ci2 = CategoryItem.objects.create(category=c0) ci3 = CategoryItem.objects.create(category=c1) qs = CategoryItem.objects.filter(category__onetoonecategory__isnull=False).order_by('pk') self.assertEqual(qs.count(), 2) self.assertSequenceEqual(qs, [ci2, ci3]) def test_ticket15316_one2one_exclude_false(self): c = SimpleCategory.objects.create(name="cat") c0 = SimpleCategory.objects.create(name="cat0") c1 = SimpleCategory.objects.create(name="category1") OneToOneCategory.objects.create(category=c1, new_name="new1") OneToOneCategory.objects.create(category=c0, new_name="new2") ci1 = CategoryItem.objects.create(category=c) CategoryItem.objects.create(category=c0) CategoryItem.objects.create(category=c1) qs = CategoryItem.objects.exclude(category__onetoonecategory__isnull=False) self.assertEqual(qs.count(), 1) self.assertSequenceEqual(qs, [ci1]) def test_ticket15316_one2one_filter_true(self): c = SimpleCategory.objects.create(name="cat") c0 = SimpleCategory.objects.create(name="cat0") c1 = SimpleCategory.objects.create(name="category1") OneToOneCategory.objects.create(category=c1, new_name="new1") OneToOneCategory.objects.create(category=c0, new_name="new2") ci1 = CategoryItem.objects.create(category=c) CategoryItem.objects.create(category=c0) CategoryItem.objects.create(category=c1) qs = CategoryItem.objects.filter(category__onetoonecategory__isnull=True) self.assertEqual(qs.count(), 1) self.assertSequenceEqual(qs, [ci1]) def test_ticket15316_one2one_exclude_true(self): c = SimpleCategory.objects.create(name="cat") c0 = SimpleCategory.objects.create(name="cat0") c1 = SimpleCategory.objects.create(name="category1") OneToOneCategory.objects.create(category=c1, new_name="new1") OneToOneCategory.objects.create(category=c0, new_name="new2") CategoryItem.objects.create(category=c) ci2 = CategoryItem.objects.create(category=c0) ci3 = CategoryItem.objects.create(category=c1) qs = CategoryItem.objects.exclude(category__onetoonecategory__isnull=True).order_by('pk') self.assertEqual(qs.count(), 2) self.assertSequenceEqual(qs, [ci2, ci3]) class Queries5Tests(TestCase): @classmethod def setUpTestData(cls): # Ordering by 'rank' gives us rank2, rank1, rank3. Ordering by the # Meta.ordering will be rank3, rank2, rank1. cls.n1 = Note.objects.create(note='n1', misc='foo', id=1) cls.n2 = Note.objects.create(note='n2', misc='bar', id=2) e1 = ExtraInfo.objects.create(info='e1', note=cls.n1) e2 = ExtraInfo.objects.create(info='e2', note=cls.n2) a1 = Author.objects.create(name='a1', num=1001, extra=e1) a2 = Author.objects.create(name='a2', num=2002, extra=e1) a3 = Author.objects.create(name='a3', num=3003, extra=e2) cls.rank2 = Ranking.objects.create(rank=2, author=a2) cls.rank1 = Ranking.objects.create(rank=1, author=a3) cls.rank3 = Ranking.objects.create(rank=3, author=a1) def test_ordering(self): # Cross model ordering is possible in Meta, too. self.assertSequenceEqual( Ranking.objects.all(), [self.rank3, self.rank2, self.rank1], ) self.assertSequenceEqual( Ranking.objects.all().order_by('rank'), [self.rank1, self.rank2, self.rank3], ) # Ordering of extra() pieces is possible, too and you can mix extra # fields and model fields in the ordering. self.assertSequenceEqual( Ranking.objects.extra(tables=['django_site'], order_by=['-django_site.id', 'rank']), [self.rank1, self.rank2, self.rank3], ) sql = 'case when %s > 2 then 1 else 0 end' % connection.ops.quote_name('rank') qs = Ranking.objects.extra(select={'good': sql}) self.assertEqual( [o.good for o in qs.extra(order_by=('-good',))], [True, False, False] ) self.assertSequenceEqual( qs.extra(order_by=('-good', 'id')), [self.rank3, self.rank2, self.rank1], ) # Despite having some extra aliases in the query, we can still omit # them in a values() query. dicts = qs.values('id', 'rank').order_by('id') self.assertEqual( [d['rank'] for d in dicts], [2, 1, 3] ) def test_ticket7256(self): # An empty values() call includes all aliases, including those from an # extra() sql = 'case when %s > 2 then 1 else 0 end' % connection.ops.quote_name('rank') qs = Ranking.objects.extra(select={'good': sql}) dicts = qs.values().order_by('id') for d in dicts: del d['id'] del d['author_id'] self.assertEqual( [sorted(d.items()) for d in dicts], [[('good', 0), ('rank', 2)], [('good', 0), ('rank', 1)], [('good', 1), ('rank', 3)]] ) def test_ticket7045(self): # Extra tables used to crash SQL construction on the second use. qs = Ranking.objects.extra(tables=['django_site']) qs.query.get_compiler(qs.db).as_sql() # test passes if this doesn't raise an exception. qs.query.get_compiler(qs.db).as_sql() def test_ticket9848(self): # Make sure that updates which only filter on sub-tables don't # inadvertently update the wrong records (bug #9848). author_start = Author.objects.get(name='a1') ranking_start = Ranking.objects.get(author__name='a1') # Make sure that the IDs from different tables don't happen to match. self.assertSequenceEqual( Ranking.objects.filter(author__name='a1'), [self.rank3], ) self.assertEqual( Ranking.objects.filter(author__name='a1').update(rank=4636), 1 ) r = Ranking.objects.get(author__name='a1') self.assertEqual(r.id, ranking_start.id) self.assertEqual(r.author.id, author_start.id) self.assertEqual(r.rank, 4636) r.rank = 3 r.save() self.assertSequenceEqual( Ranking.objects.all(), [self.rank3, self.rank2, self.rank1], ) def test_ticket5261(self): # Test different empty excludes. self.assertSequenceEqual( Note.objects.exclude(Q()), [self.n1, self.n2], ) self.assertSequenceEqual( Note.objects.filter(~Q()), [self.n1, self.n2], ) self.assertSequenceEqual( Note.objects.filter(~Q() | ~Q()), [self.n1, self.n2], ) self.assertSequenceEqual( Note.objects.exclude(~Q() & ~Q()), [self.n1, self.n2], ) def test_extra_select_literal_percent_s(self): # Allow %%s to escape select clauses self.assertEqual( Note.objects.extra(select={'foo': "'%%s'"})[0].foo, '%s' ) self.assertEqual( Note.objects.extra(select={'foo': "'%%s bar %%s'"})[0].foo, '%s bar %s' ) self.assertEqual( Note.objects.extra(select={'foo': "'bar %%s'"})[0].foo, 'bar %s' ) class SelectRelatedTests(TestCase): def test_tickets_3045_3288(self): # Once upon a time, select_related() with circular relations would loop # infinitely if you forgot to specify "depth". Now we set an arbitrary # default upper bound. self.assertQuerysetEqual(X.objects.all(), []) self.assertQuerysetEqual(X.objects.select_related(), []) class SubclassFKTests(TestCase): def test_ticket7778(self): # Model subclasses could not be deleted if a nullable foreign key # relates to a model that relates back. num_celebs = Celebrity.objects.count() tvc = TvChef.objects.create(name="Huey") self.assertEqual(Celebrity.objects.count(), num_celebs + 1) Fan.objects.create(fan_of=tvc) Fan.objects.create(fan_of=tvc) tvc.delete() # The parent object should have been deleted as well. self.assertEqual(Celebrity.objects.count(), num_celebs) class CustomPkTests(TestCase): def test_ticket7371(self): self.assertQuerysetEqual(Related.objects.order_by('custom'), []) class NullableRelOrderingTests(TestCase): def test_ticket10028(self): # Ordering by model related to nullable relations(!) should use outer # joins, so that all results are included. p1 = Plaything.objects.create(name="p1") self.assertSequenceEqual(Plaything.objects.all(), [p1]) def test_join_already_in_query(self): # Ordering by model related to nullable relations should not change # the join type of already existing joins. Plaything.objects.create(name="p1") s = SingleObject.objects.create(name='s') r = RelatedObject.objects.create(single=s, f=1) p2 = Plaything.objects.create(name="p2", others=r) qs = Plaything.objects.all().filter(others__isnull=False).order_by('pk') self.assertNotIn('JOIN', str(qs.query)) qs = Plaything.objects.all().filter(others__f__isnull=False).order_by('pk') self.assertIn('INNER', str(qs.query)) qs = qs.order_by('others__single__name') # The ordering by others__single__pk will add one new join (to single) # and that join must be LEFT join. The already existing join to related # objects must be kept INNER. So, we have both an INNER and a LEFT join # in the query. self.assertEqual(str(qs.query).count('LEFT'), 1) self.assertEqual(str(qs.query).count('INNER'), 1) self.assertSequenceEqual(qs, [p2]) class DisjunctiveFilterTests(TestCase): @classmethod def setUpTestData(cls): cls.n1 = Note.objects.create(note='n1', misc='foo', id=1) cls.e1 = ExtraInfo.objects.create(info='e1', note=cls.n1) def test_ticket7872(self): # Another variation on the disjunctive filtering theme. # For the purposes of this regression test, it's important that there is no # Join object related to the LeafA we create. l1 = LeafA.objects.create(data='first') self.assertSequenceEqual(LeafA.objects.all(), [l1]) self.assertSequenceEqual( LeafA.objects.filter(Q(data='first') | Q(join__b__data='second')), [l1], ) def test_ticket8283(self): # Checking that applying filters after a disjunction works correctly. self.assertSequenceEqual( (ExtraInfo.objects.filter(note=self.n1) | ExtraInfo.objects.filter(info='e2')).filter(note=self.n1), [self.e1], ) self.assertSequenceEqual( (ExtraInfo.objects.filter(info='e2') | ExtraInfo.objects.filter(note=self.n1)).filter(note=self.n1), [self.e1], ) class Queries6Tests(TestCase): @classmethod def setUpTestData(cls): generic = NamedCategory.objects.create(name="Generic") cls.t1 = Tag.objects.create(name='t1', category=generic) cls.t2 = Tag.objects.create(name='t2', parent=cls.t1, category=generic) cls.t3 = Tag.objects.create(name='t3', parent=cls.t1) cls.t4 = Tag.objects.create(name='t4', parent=cls.t3) cls.t5 = Tag.objects.create(name='t5', parent=cls.t3) n1 = Note.objects.create(note='n1', misc='foo', id=1) cls.ann1 = Annotation.objects.create(name='a1', tag=cls.t1) cls.ann1.notes.add(n1) cls.ann2 = Annotation.objects.create(name='a2', tag=cls.t4) def test_parallel_iterators(self): # Parallel iterators work. qs = Tag.objects.all() i1, i2 = iter(qs), iter(qs) self.assertEqual(repr(next(i1)), '<Tag: t1>') self.assertEqual(repr(next(i1)), '<Tag: t2>') self.assertEqual(repr(next(i2)), '<Tag: t1>') self.assertEqual(repr(next(i2)), '<Tag: t2>') self.assertEqual(repr(next(i2)), '<Tag: t3>') self.assertEqual(repr(next(i1)), '<Tag: t3>') qs = X.objects.all() self.assertFalse(qs) self.assertFalse(qs) def test_nested_queries_sql(self): # Nested queries should not evaluate the inner query as part of constructing the # SQL (so we should see a nested query here, indicated by two "SELECT" calls). qs = Annotation.objects.filter(notes__in=Note.objects.filter(note="xyzzy")) self.assertEqual( qs.query.get_compiler(qs.db).as_sql()[0].count('SELECT'), 2 ) def test_tickets_8921_9188(self): # Incorrect SQL was being generated for certain types of exclude() # queries that crossed multi-valued relations (#8921, #9188 and some # preemptively discovered cases). self.assertSequenceEqual( PointerA.objects.filter(connection__pointerb__id=1), [] ) self.assertSequenceEqual( PointerA.objects.exclude(connection__pointerb__id=1), [] ) self.assertSequenceEqual( Tag.objects.exclude(children=None), [self.t1, self.t3], ) # This example is tricky because the parent could be NULL, so only checking # parents with annotations omits some results (tag t1, in this case). self.assertSequenceEqual( Tag.objects.exclude(parent__annotation__name="a1"), [self.t1, self.t4, self.t5], ) # The annotation->tag link is single values and tag->children links is # multi-valued. So we have to split the exclude filter in the middle # and then optimize the inner query without losing results. self.assertSequenceEqual( Annotation.objects.exclude(tag__children__name="t2"), [self.ann2], ) # Nested queries are possible (although should be used with care, since # they have performance problems on backends like MySQL. self.assertSequenceEqual( Annotation.objects.filter(notes__in=Note.objects.filter(note="n1")), [self.ann1], ) def test_ticket3739(self): # The all() method on querysets returns a copy of the queryset. q1 = Tag.objects.order_by('name') self.assertIsNot(q1, q1.all()) def test_ticket_11320(self): qs = Tag.objects.exclude(category=None).exclude(category__name='foo') self.assertEqual(str(qs.query).count(' INNER JOIN '), 1) def test_distinct_ordered_sliced_subquery_aggregation(self): self.assertEqual(Tag.objects.distinct().order_by('category__name')[:3].count(), 3) def test_multiple_columns_with_the_same_name_slice(self): self.assertEqual( list(Tag.objects.order_by('name').values_list('name', 'category__name')[:2]), [('t1', 'Generic'), ('t2', 'Generic')], ) self.assertSequenceEqual( Tag.objects.order_by('name').select_related('category')[:2], [self.t1, self.t2], ) self.assertEqual( list(Tag.objects.order_by('-name').values_list('name', 'parent__name')[:2]), [('t5', 't3'), ('t4', 't3')], ) self.assertSequenceEqual( Tag.objects.order_by('-name').select_related('parent')[:2], [self.t5, self.t4], ) class RawQueriesTests(TestCase): @classmethod def setUpTestData(cls): Note.objects.create(note='n1', misc='foo', id=1) def test_ticket14729(self): # Test representation of raw query with one or few parameters passed as list query = "SELECT * FROM queries_note WHERE note = %s" params = ['n1'] qs = Note.objects.raw(query, params=params) self.assertEqual(repr(qs), "<RawQuerySet: SELECT * FROM queries_note WHERE note = n1>") query = "SELECT * FROM queries_note WHERE note = %s and misc = %s" params = ['n1', 'foo'] qs = Note.objects.raw(query, params=params) self.assertEqual(repr(qs), "<RawQuerySet: SELECT * FROM queries_note WHERE note = n1 and misc = foo>") class GeneratorExpressionTests(SimpleTestCase): def test_ticket10432(self): # Using an empty iterator as the rvalue for an "__in" # lookup is legal. self.assertCountEqual(Note.objects.filter(pk__in=iter(())), []) class ComparisonTests(TestCase): @classmethod def setUpTestData(cls): cls.n1 = Note.objects.create(note='n1', misc='foo', id=1) e1 = ExtraInfo.objects.create(info='e1', note=cls.n1) cls.a2 = Author.objects.create(name='a2', num=2002, extra=e1) def test_ticket8597(self): # Regression tests for case-insensitive comparisons item_ab = Item.objects.create(name="a_b", created=datetime.datetime.now(), creator=self.a2, note=self.n1) item_xy = Item.objects.create(name="x%y", created=datetime.datetime.now(), creator=self.a2, note=self.n1) self.assertSequenceEqual( Item.objects.filter(name__iexact="A_b"), [item_ab], ) self.assertSequenceEqual( Item.objects.filter(name__iexact="x%Y"), [item_xy], ) self.assertSequenceEqual( Item.objects.filter(name__istartswith="A_b"), [item_ab], ) self.assertSequenceEqual( Item.objects.filter(name__iendswith="A_b"), [item_ab], ) class ExistsSql(TestCase): def test_exists(self): with CaptureQueriesContext(connection) as captured_queries: self.assertFalse(Tag.objects.exists()) # Ok - so the exist query worked - but did it include too many columns? self.assertEqual(len(captured_queries), 1) qstr = captured_queries[0]['sql'] id, name = connection.ops.quote_name('id'), connection.ops.quote_name('name') self.assertNotIn(id, qstr) self.assertNotIn(name, qstr) def test_ticket_18414(self): Article.objects.create(name='one', created=datetime.datetime.now()) Article.objects.create(name='one', created=datetime.datetime.now()) Article.objects.create(name='two', created=datetime.datetime.now()) self.assertTrue(Article.objects.exists()) self.assertTrue(Article.objects.distinct().exists()) self.assertTrue(Article.objects.distinct()[1:3].exists()) self.assertFalse(Article.objects.distinct()[1:1].exists()) @skipUnlessDBFeature('can_distinct_on_fields') def test_ticket_18414_distinct_on(self): Article.objects.create(name='one', created=datetime.datetime.now()) Article.objects.create(name='one', created=datetime.datetime.now()) Article.objects.create(name='two', created=datetime.datetime.now()) self.assertTrue(Article.objects.distinct('name').exists()) self.assertTrue(Article.objects.distinct('name')[1:2].exists()) self.assertFalse(Article.objects.distinct('name')[2:3].exists()) class QuerysetOrderedTests(unittest.TestCase): """ Tests for the Queryset.ordered attribute. """ def test_no_default_or_explicit_ordering(self): self.assertIs(Annotation.objects.all().ordered, False) def test_cleared_default_ordering(self): self.assertIs(Tag.objects.all().ordered, True) self.assertIs(Tag.objects.all().order_by().ordered, False) def test_explicit_ordering(self): self.assertIs(Annotation.objects.all().order_by('id').ordered, True) def test_empty_queryset(self): self.assertIs(Annotation.objects.none().ordered, True) def test_order_by_extra(self): self.assertIs(Annotation.objects.all().extra(order_by=['id']).ordered, True) def test_annotated_ordering(self): qs = Annotation.objects.annotate(num_notes=Count('notes')) self.assertIs(qs.ordered, False) self.assertIs(qs.order_by('num_notes').ordered, True) def test_annotated_default_ordering(self): qs = Tag.objects.annotate(num_notes=Count('pk')) self.assertIs(qs.ordered, False) self.assertIs(qs.order_by('name').ordered, True) def test_annotated_values_default_ordering(self): qs = Tag.objects.values('name').annotate(num_notes=Count('pk')) self.assertIs(qs.ordered, False) self.assertIs(qs.order_by('name').ordered, True) @skipUnlessDBFeature('allow_sliced_subqueries_with_in') class SubqueryTests(TestCase): @classmethod def setUpTestData(cls): NamedCategory.objects.create(id=1, name='first') NamedCategory.objects.create(id=2, name='second') NamedCategory.objects.create(id=3, name='third') NamedCategory.objects.create(id=4, name='fourth') def test_ordered_subselect(self): "Subselects honor any manual ordering" query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[0:2]) self.assertEqual(set(query.values_list('id', flat=True)), {3, 4}) query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[:2]) self.assertEqual(set(query.values_list('id', flat=True)), {3, 4}) query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[1:2]) self.assertEqual(set(query.values_list('id', flat=True)), {3}) query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[2:]) self.assertEqual(set(query.values_list('id', flat=True)), {1, 2}) def test_slice_subquery_and_query(self): """ Slice a query that has a sliced subquery """ query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[0:2])[0:2] self.assertEqual({x.id for x in query}, {3, 4}) query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[1:3])[1:3] self.assertEqual({x.id for x in query}, {3}) query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[2:])[1:] self.assertEqual({x.id for x in query}, {2}) def test_related_sliced_subquery(self): """ Related objects constraints can safely contain sliced subqueries. refs #22434 """ generic = NamedCategory.objects.create(id=5, name="Generic") t1 = Tag.objects.create(name='t1', category=generic) t2 = Tag.objects.create(name='t2', category=generic) ManagedModel.objects.create(data='mm1', tag=t1, public=True) mm2 = ManagedModel.objects.create(data='mm2', tag=t2, public=True) query = ManagedModel.normal_manager.filter( tag__in=Tag.objects.order_by('-id')[:1] ) self.assertEqual({x.id for x in query}, {mm2.id}) def test_sliced_delete(self): "Delete queries can safely contain sliced subqueries" DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[0:1]).delete() self.assertEqual(set(DumbCategory.objects.values_list('id', flat=True)), {1, 2, 3}) DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[1:2]).delete() self.assertEqual(set(DumbCategory.objects.values_list('id', flat=True)), {1, 3}) DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[1:]).delete() self.assertEqual(set(DumbCategory.objects.values_list('id', flat=True)), {3}) def test_distinct_ordered_sliced_subquery(self): # Implicit values('id'). self.assertSequenceEqual( NamedCategory.objects.filter( id__in=NamedCategory.objects.distinct().order_by('name')[0:2], ).order_by('name').values_list('name', flat=True), ['first', 'fourth'] ) # Explicit values('id'). self.assertSequenceEqual( NamedCategory.objects.filter( id__in=NamedCategory.objects.distinct().order_by('-name').values('id')[0:2], ).order_by('name').values_list('name', flat=True), ['second', 'third'] ) # Annotated value. self.assertSequenceEqual( DumbCategory.objects.filter( id__in=DumbCategory.objects.annotate( double_id=F('id') * 2 ).order_by('id').distinct().values('double_id')[0:2], ).order_by('id').values_list('id', flat=True), [2, 4] ) class QuerySetBitwiseOperationTests(TestCase): @classmethod def setUpTestData(cls): cls.school = School.objects.create() cls.room_1 = Classroom.objects.create(school=cls.school, has_blackboard=False, name='Room 1') cls.room_2 = Classroom.objects.create(school=cls.school, has_blackboard=True, name='Room 2') cls.room_3 = Classroom.objects.create(school=cls.school, has_blackboard=True, name='Room 3') cls.room_4 = Classroom.objects.create(school=cls.school, has_blackboard=False, name='Room 4') @skipUnlessDBFeature('allow_sliced_subqueries_with_in') def test_or_with_rhs_slice(self): qs1 = Classroom.objects.filter(has_blackboard=True) qs2 = Classroom.objects.filter(has_blackboard=False)[:1] self.assertCountEqual(qs1 | qs2, [self.room_1, self.room_2, self.room_3]) @skipUnlessDBFeature('allow_sliced_subqueries_with_in') def test_or_with_lhs_slice(self): qs1 = Classroom.objects.filter(has_blackboard=True)[:1] qs2 = Classroom.objects.filter(has_blackboard=False) self.assertCountEqual(qs1 | qs2, [self.room_1, self.room_2, self.room_4]) @skipUnlessDBFeature('allow_sliced_subqueries_with_in') def test_or_with_both_slice(self): qs1 = Classroom.objects.filter(has_blackboard=False)[:1] qs2 = Classroom.objects.filter(has_blackboard=True)[:1] self.assertCountEqual(qs1 | qs2, [self.room_1, self.room_2]) @skipUnlessDBFeature('allow_sliced_subqueries_with_in') def test_or_with_both_slice_and_ordering(self): qs1 = Classroom.objects.filter(has_blackboard=False).order_by('-pk')[:1] qs2 = Classroom.objects.filter(has_blackboard=True).order_by('-name')[:1] self.assertCountEqual(qs1 | qs2, [self.room_3, self.room_4]) def test_subquery_aliases(self): combined = School.objects.filter(pk__isnull=False) & School.objects.filter( Exists(Classroom.objects.filter( has_blackboard=True, school=OuterRef('pk'), )), ) self.assertSequenceEqual(combined, [self.school]) nested_combined = School.objects.filter(pk__in=combined.values('pk')) self.assertSequenceEqual(nested_combined, [self.school]) class CloneTests(TestCase): def test_evaluated_queryset_as_argument(self): "#13227 -- If a queryset is already evaluated, it can still be used as a query arg" n = Note(note='Test1', misc='misc') n.save() e = ExtraInfo(info='good', note=n) e.save() n_list = Note.objects.all() # Evaluate the Note queryset, populating the query cache list(n_list) # Make one of cached results unpickable. n_list._result_cache[0].lock = Lock() with self.assertRaises(TypeError): pickle.dumps(n_list) # Use the note queryset in a query, and evaluate # that query in a way that involves cloning. self.assertEqual(ExtraInfo.objects.filter(note__in=n_list)[0].info, 'good') def test_no_model_options_cloning(self): """ Cloning a queryset does not get out of hand. While complete testing is impossible, this is a sanity check against invalid use of deepcopy. refs #16759. """ opts_class = type(Note._meta) note_deepcopy = getattr(opts_class, "__deepcopy__", None) opts_class.__deepcopy__ = lambda obj, memo: self.fail("Model options shouldn't be cloned.") try: Note.objects.filter(pk__lte=F('pk') + 1).all() finally: if note_deepcopy is None: delattr(opts_class, "__deepcopy__") else: opts_class.__deepcopy__ = note_deepcopy def test_no_fields_cloning(self): """ Cloning a queryset does not get out of hand. While complete testing is impossible, this is a sanity check against invalid use of deepcopy. refs #16759. """ opts_class = type(Note._meta.get_field("misc")) note_deepcopy = getattr(opts_class, "__deepcopy__", None) opts_class.__deepcopy__ = lambda obj, memo: self.fail("Model fields shouldn't be cloned") try: Note.objects.filter(note=F('misc')).all() finally: if note_deepcopy is None: delattr(opts_class, "__deepcopy__") else: opts_class.__deepcopy__ = note_deepcopy class EmptyQuerySetTests(SimpleTestCase): def test_emptyqueryset_values(self): # #14366 -- Calling .values() on an empty QuerySet and then cloning # that should not cause an error self.assertCountEqual(Number.objects.none().values('num').order_by('num'), []) def test_values_subquery(self): self.assertCountEqual(Number.objects.filter(pk__in=Number.objects.none().values('pk')), []) self.assertCountEqual(Number.objects.filter(pk__in=Number.objects.none().values_list('pk')), []) def test_ticket_19151(self): # #19151 -- Calling .values() or .values_list() on an empty QuerySet # should return an empty QuerySet and not cause an error. q = Author.objects.none() self.assertCountEqual(q.values(), []) self.assertCountEqual(q.values_list(), []) class ValuesQuerysetTests(TestCase): @classmethod def setUpTestData(cls): Number.objects.create(num=72) def test_flat_values_list(self): qs = Number.objects.values_list("num") qs = qs.values_list("num", flat=True) self.assertSequenceEqual(qs, [72]) def test_extra_values(self): # testing for ticket 14930 issues qs = Number.objects.extra(select={'value_plus_x': 'num+%s', 'value_minus_x': 'num-%s'}, select_params=(1, 2)) qs = qs.order_by('value_minus_x') qs = qs.values('num') self.assertSequenceEqual(qs, [{'num': 72}]) def test_extra_values_order_twice(self): # testing for ticket 14930 issues qs = Number.objects.extra(select={'value_plus_one': 'num+1', 'value_minus_one': 'num-1'}) qs = qs.order_by('value_minus_one').order_by('value_plus_one') qs = qs.values('num') self.assertSequenceEqual(qs, [{'num': 72}]) def test_extra_values_order_multiple(self): # Postgres doesn't allow constants in order by, so check for that. qs = Number.objects.extra(select={ 'value_plus_one': 'num+1', 'value_minus_one': 'num-1', 'constant_value': '1' }) qs = qs.order_by('value_plus_one', 'value_minus_one', 'constant_value') qs = qs.values('num') self.assertSequenceEqual(qs, [{'num': 72}]) def test_extra_values_order_in_extra(self): # testing for ticket 14930 issues qs = Number.objects.extra( select={'value_plus_one': 'num+1', 'value_minus_one': 'num-1'}, order_by=['value_minus_one'], ) qs = qs.values('num') def test_extra_select_params_values_order_in_extra(self): # testing for 23259 issue qs = Number.objects.extra( select={'value_plus_x': 'num+%s'}, select_params=[1], order_by=['value_plus_x'], ) qs = qs.filter(num=72) qs = qs.values('num') self.assertSequenceEqual(qs, [{'num': 72}]) def test_extra_multiple_select_params_values_order_by(self): # testing for 23259 issue qs = Number.objects.extra(select={'value_plus_x': 'num+%s', 'value_minus_x': 'num-%s'}, select_params=(72, 72)) qs = qs.order_by('value_minus_x') qs = qs.filter(num=1) qs = qs.values('num') self.assertSequenceEqual(qs, []) def test_extra_values_list(self): # testing for ticket 14930 issues qs = Number.objects.extra(select={'value_plus_one': 'num+1'}) qs = qs.order_by('value_plus_one') qs = qs.values_list('num') self.assertSequenceEqual(qs, [(72,)]) def test_flat_extra_values_list(self): # testing for ticket 14930 issues qs = Number.objects.extra(select={'value_plus_one': 'num+1'}) qs = qs.order_by('value_plus_one') qs = qs.values_list('num', flat=True) self.assertSequenceEqual(qs, [72]) def test_field_error_values_list(self): # see #23443 msg = "Cannot resolve keyword %r into field. Join on 'name' not permitted." % 'foo' with self.assertRaisesMessage(FieldError, msg): Tag.objects.values_list('name__foo') def test_named_values_list_flat(self): msg = "'flat' and 'named' can't be used together." with self.assertRaisesMessage(TypeError, msg): Number.objects.values_list('num', flat=True, named=True) def test_named_values_list_bad_field_name(self): msg = "Type names and field names must be valid identifiers: '1'" with self.assertRaisesMessage(ValueError, msg): Number.objects.extra(select={'1': 'num+1'}).values_list('1', named=True).first() def test_named_values_list_with_fields(self): qs = Number.objects.extra(select={'num2': 'num+1'}).annotate(Count('id')) values = qs.values_list('num', 'num2', named=True).first() self.assertEqual(type(values).__name__, 'Row') self.assertEqual(values._fields, ('num', 'num2')) self.assertEqual(values.num, 72) self.assertEqual(values.num2, 73) def test_named_values_list_without_fields(self): qs = Number.objects.extra(select={'num2': 'num+1'}).annotate(Count('id')) values = qs.values_list(named=True).first() self.assertEqual(type(values).__name__, 'Row') self.assertEqual( values._fields, ('num2', 'id', 'num', 'other_num', 'another_num', 'id__count'), ) self.assertEqual(values.num, 72) self.assertEqual(values.num2, 73) self.assertEqual(values.id__count, 1) def test_named_values_list_expression_with_default_alias(self): expr = Count('id') values = Number.objects.annotate(id__count1=expr).values_list(expr, 'id__count1', named=True).first() self.assertEqual(values._fields, ('id__count2', 'id__count1')) def test_named_values_list_expression(self): expr = F('num') + 1 qs = Number.objects.annotate(combinedexpression1=expr).values_list(expr, 'combinedexpression1', named=True) values = qs.first() self.assertEqual(values._fields, ('combinedexpression2', 'combinedexpression1')) def test_named_values_pickle(self): value = Number.objects.values_list('num', 'other_num', named=True).get() self.assertEqual(value, (72, None)) self.assertEqual(pickle.loads(pickle.dumps(value)), value) class QuerySetSupportsPythonIdioms(TestCase): @classmethod def setUpTestData(cls): some_date = datetime.datetime(2014, 5, 16, 12, 1) cls.articles = [ Article.objects.create(name=f'Article {i}', created=some_date) for i in range(1, 8) ] def get_ordered_articles(self): return Article.objects.all().order_by('name') def test_can_get_items_using_index_and_slice_notation(self): self.assertEqual(self.get_ordered_articles()[0].name, 'Article 1') self.assertSequenceEqual( self.get_ordered_articles()[1:3], [self.articles[1], self.articles[2]], ) def test_slicing_with_steps_can_be_used(self): self.assertSequenceEqual( self.get_ordered_articles()[::2], [ self.articles[0], self.articles[2], self.articles[4], self.articles[6], ] ) def test_slicing_without_step_is_lazy(self): with self.assertNumQueries(0): self.get_ordered_articles()[0:5] def test_slicing_with_tests_is_not_lazy(self): with self.assertNumQueries(1): self.get_ordered_articles()[0:5:3] def test_slicing_can_slice_again_after_slicing(self): self.assertSequenceEqual( self.get_ordered_articles()[0:5][0:2], [self.articles[0], self.articles[1]], ) self.assertSequenceEqual(self.get_ordered_articles()[0:5][4:], [self.articles[4]]) self.assertSequenceEqual(self.get_ordered_articles()[0:5][5:], []) # Some more tests! self.assertSequenceEqual( self.get_ordered_articles()[2:][0:2], [self.articles[2], self.articles[3]], ) self.assertSequenceEqual( self.get_ordered_articles()[2:][:2], [self.articles[2], self.articles[3]], ) self.assertSequenceEqual(self.get_ordered_articles()[2:][2:3], [self.articles[4]]) # Using an offset without a limit is also possible. self.assertSequenceEqual( self.get_ordered_articles()[5:], [self.articles[5], self.articles[6]], ) def test_slicing_cannot_filter_queryset_once_sliced(self): msg = 'Cannot filter a query once a slice has been taken.' with self.assertRaisesMessage(TypeError, msg): Article.objects.all()[0:5].filter(id=1) def test_slicing_cannot_reorder_queryset_once_sliced(self): msg = 'Cannot reorder a query once a slice has been taken.' with self.assertRaisesMessage(TypeError, msg): Article.objects.all()[0:5].order_by('id') def test_slicing_cannot_combine_queries_once_sliced(self): msg = 'Cannot combine queries once a slice has been taken.' with self.assertRaisesMessage(TypeError, msg): Article.objects.all()[0:1] & Article.objects.all()[4:5] def test_slicing_negative_indexing_not_supported_for_single_element(self): """hint: inverting your ordering might do what you need""" with self.assertRaisesMessage(AssertionError, "Negative indexing is not supported."): Article.objects.all()[-1] def test_slicing_negative_indexing_not_supported_for_range(self): """hint: inverting your ordering might do what you need""" with self.assertRaisesMessage(AssertionError, "Negative indexing is not supported."): Article.objects.all()[0:-5] def test_invalid_index(self): msg = 'QuerySet indices must be integers or slices, not str.' with self.assertRaisesMessage(TypeError, msg): Article.objects.all()['foo'] def test_can_get_number_of_items_in_queryset_using_standard_len(self): self.assertEqual(len(Article.objects.filter(name__exact='Article 1')), 1) def test_can_combine_queries_using_and_and_or_operators(self): s1 = Article.objects.filter(name__exact='Article 1') s2 = Article.objects.filter(name__exact='Article 2') self.assertSequenceEqual( (s1 | s2).order_by('name'), [self.articles[0], self.articles[1]], ) self.assertSequenceEqual(s1 & s2, []) class WeirdQuerysetSlicingTests(TestCase): @classmethod def setUpTestData(cls): Number.objects.create(num=1) Number.objects.create(num=2) Article.objects.create(name='one', created=datetime.datetime.now()) Article.objects.create(name='two', created=datetime.datetime.now()) Article.objects.create(name='three', created=datetime.datetime.now()) Article.objects.create(name='four', created=datetime.datetime.now()) food = Food.objects.create(name='spam') Eaten.objects.create(meal='spam with eggs', food=food) def test_tickets_7698_10202(self): # People like to slice with '0' as the high-water mark. self.assertQuerysetEqual(Article.objects.all()[0:0], []) self.assertQuerysetEqual(Article.objects.all()[0:0][:10], []) self.assertEqual(Article.objects.all()[:0].count(), 0) msg = 'Cannot change a query once a slice has been taken.' with self.assertRaisesMessage(TypeError, msg): Article.objects.all()[:0].latest('created') def test_empty_resultset_sql(self): # ticket #12192 self.assertNumQueries(0, lambda: list(Number.objects.all()[1:1])) def test_empty_sliced_subquery(self): self.assertEqual(Eaten.objects.filter(food__in=Food.objects.all()[0:0]).count(), 0) def test_empty_sliced_subquery_exclude(self): self.assertEqual(Eaten.objects.exclude(food__in=Food.objects.all()[0:0]).count(), 1) def test_zero_length_values_slicing(self): n = 42 with self.assertNumQueries(0): self.assertQuerysetEqual(Article.objects.values()[n:n], []) self.assertQuerysetEqual(Article.objects.values_list()[n:n], []) class EscapingTests(TestCase): def test_ticket_7302(self): # Reserved names are appropriately escaped r_a = ReservedName.objects.create(name='a', order=42) r_b = ReservedName.objects.create(name='b', order=37) self.assertSequenceEqual( ReservedName.objects.all().order_by('order'), [r_b, r_a], ) self.assertSequenceEqual( ReservedName.objects.extra(select={'stuff': 'name'}, order_by=('order', 'stuff')), [r_b, r_a], ) class ToFieldTests(TestCase): def test_in_query(self): apple = Food.objects.create(name="apple") pear = Food.objects.create(name="pear") lunch = Eaten.objects.create(food=apple, meal="lunch") dinner = Eaten.objects.create(food=pear, meal="dinner") self.assertEqual( set(Eaten.objects.filter(food__in=[apple, pear])), {lunch, dinner}, ) def test_in_subquery(self): apple = Food.objects.create(name="apple") lunch = Eaten.objects.create(food=apple, meal="lunch") self.assertEqual( set(Eaten.objects.filter(food__in=Food.objects.filter(name='apple'))), {lunch} ) self.assertEqual( set(Eaten.objects.filter(food__in=Food.objects.filter(name='apple').values('eaten__meal'))), set() ) self.assertEqual( set(Food.objects.filter(eaten__in=Eaten.objects.filter(meal='lunch'))), {apple} ) def test_nested_in_subquery(self): extra = ExtraInfo.objects.create() author = Author.objects.create(num=42, extra=extra) report = Report.objects.create(creator=author) comment = ReportComment.objects.create(report=report) comments = ReportComment.objects.filter( report__in=Report.objects.filter( creator__in=extra.author_set.all(), ), ) self.assertSequenceEqual(comments, [comment]) def test_reverse_in(self): apple = Food.objects.create(name="apple") pear = Food.objects.create(name="pear") lunch_apple = Eaten.objects.create(food=apple, meal="lunch") lunch_pear = Eaten.objects.create(food=pear, meal="dinner") self.assertEqual( set(Food.objects.filter(eaten__in=[lunch_apple, lunch_pear])), {apple, pear} ) def test_single_object(self): apple = Food.objects.create(name="apple") lunch = Eaten.objects.create(food=apple, meal="lunch") dinner = Eaten.objects.create(food=apple, meal="dinner") self.assertEqual( set(Eaten.objects.filter(food=apple)), {lunch, dinner} ) def test_single_object_reverse(self): apple = Food.objects.create(name="apple") lunch = Eaten.objects.create(food=apple, meal="lunch") self.assertEqual( set(Food.objects.filter(eaten=lunch)), {apple} ) def test_recursive_fk(self): node1 = Node.objects.create(num=42) node2 = Node.objects.create(num=1, parent=node1) self.assertEqual( list(Node.objects.filter(parent=node1)), [node2] ) def test_recursive_fk_reverse(self): node1 = Node.objects.create(num=42) node2 = Node.objects.create(num=1, parent=node1) self.assertEqual( list(Node.objects.filter(node=node2)), [node1] ) class IsNullTests(TestCase): def test_primary_key(self): custom = CustomPk.objects.create(name='pk') null = Related.objects.create() notnull = Related.objects.create(custom=custom) self.assertSequenceEqual(Related.objects.filter(custom__isnull=False), [notnull]) self.assertSequenceEqual(Related.objects.filter(custom__isnull=True), [null]) def test_to_field(self): apple = Food.objects.create(name="apple") e1 = Eaten.objects.create(food=apple, meal="lunch") e2 = Eaten.objects.create(meal="lunch") self.assertSequenceEqual( Eaten.objects.filter(food__isnull=False), [e1], ) self.assertSequenceEqual( Eaten.objects.filter(food__isnull=True), [e2], ) class ConditionalTests(TestCase): """Tests whose execution depend on different environment conditions like Python version or DB backend features""" @classmethod def setUpTestData(cls): generic = NamedCategory.objects.create(name="Generic") t1 = Tag.objects.create(name='t1', category=generic) Tag.objects.create(name='t2', parent=t1, category=generic) t3 = Tag.objects.create(name='t3', parent=t1) Tag.objects.create(name='t4', parent=t3) Tag.objects.create(name='t5', parent=t3) def test_infinite_loop(self): # If you're not careful, it's possible to introduce infinite loops via # default ordering on foreign keys in a cycle. We detect that. with self.assertRaisesMessage(FieldError, 'Infinite loop caused by ordering.'): list(LoopX.objects.all()) # Force queryset evaluation with list() with self.assertRaisesMessage(FieldError, 'Infinite loop caused by ordering.'): list(LoopZ.objects.all()) # Force queryset evaluation with list() # Note that this doesn't cause an infinite loop, since the default # ordering on the Tag model is empty (and thus defaults to using "id" # for the related field). self.assertEqual(len(Tag.objects.order_by('parent')), 5) # ... but you can still order in a non-recursive fashion among linked # fields (the previous test failed because the default ordering was # recursive). self.assertQuerysetEqual( LoopX.objects.all().order_by('y__x__y__x__id'), [] ) # When grouping without specifying ordering, we add an explicit "ORDER BY NULL" # portion in MySQL to prevent unnecessary sorting. @skipUnlessDBFeature('requires_explicit_null_ordering_when_grouping') def test_null_ordering_added(self): query = Tag.objects.values_list('parent_id', flat=True).order_by().query query.group_by = ['parent_id'] sql = query.get_compiler(DEFAULT_DB_ALIAS).as_sql()[0] fragment = "ORDER BY " pos = sql.find(fragment) self.assertEqual(sql.find(fragment, pos + 1), -1) self.assertEqual(sql.find("NULL", pos + len(fragment)), pos + len(fragment)) def test_in_list_limit(self): # The "in" lookup works with lists of 1000 items or more. # The numbers amount is picked to force three different IN batches # for Oracle, yet to be less than 2100 parameter limit for MSSQL. numbers = list(range(2050)) max_query_params = connection.features.max_query_params if max_query_params is None or max_query_params >= len(numbers): Number.objects.bulk_create(Number(num=num) for num in numbers) for number in [1000, 1001, 2000, len(numbers)]: with self.subTest(number=number): self.assertEqual(Number.objects.filter(num__in=numbers[:number]).count(), number) class UnionTests(unittest.TestCase): """ Tests for the union of two querysets. Bug #12252. """ @classmethod def setUpTestData(cls): objectas = [] objectbs = [] objectcs = [] a_info = ['one', 'two', 'three'] for name in a_info: o = ObjectA(name=name) o.save() objectas.append(o) b_info = [('un', 1, objectas[0]), ('deux', 2, objectas[0]), ('trois', 3, objectas[2])] for name, number, objecta in b_info: o = ObjectB(name=name, num=number, objecta=objecta) o.save() objectbs.append(o) c_info = [('ein', objectas[2], objectbs[2]), ('zwei', objectas[1], objectbs[1])] for name, objecta, objectb in c_info: o = ObjectC(name=name, objecta=objecta, objectb=objectb) o.save() objectcs.append(o) def check_union(self, model, Q1, Q2): filter = model.objects.filter self.assertEqual(set(filter(Q1) | filter(Q2)), set(filter(Q1 | Q2))) self.assertEqual(set(filter(Q2) | filter(Q1)), set(filter(Q1 | Q2))) def test_A_AB(self): Q1 = Q(name='two') Q2 = Q(objectb__name='deux') self.check_union(ObjectA, Q1, Q2) def test_A_AB2(self): Q1 = Q(name='two') Q2 = Q(objectb__name='deux', objectb__num=2) self.check_union(ObjectA, Q1, Q2) def test_AB_ACB(self): Q1 = Q(objectb__name='deux') Q2 = Q(objectc__objectb__name='deux') self.check_union(ObjectA, Q1, Q2) def test_BAB_BAC(self): Q1 = Q(objecta__objectb__name='deux') Q2 = Q(objecta__objectc__name='ein') self.check_union(ObjectB, Q1, Q2) def test_BAB_BACB(self): Q1 = Q(objecta__objectb__name='deux') Q2 = Q(objecta__objectc__objectb__name='trois') self.check_union(ObjectB, Q1, Q2) def test_BA_BCA__BAB_BAC_BCA(self): Q1 = Q(objecta__name='one', objectc__objecta__name='two') Q2 = Q(objecta__objectc__name='ein', objectc__objecta__name='three', objecta__objectb__name='trois') self.check_union(ObjectB, Q1, Q2) class DefaultValuesInsertTest(TestCase): def test_no_extra_params(self): """ Can create an instance of a model with only the PK field (#17056)." """ DumbCategory.objects.create() class ExcludeTests(TestCase): @classmethod def setUpTestData(cls): f1 = Food.objects.create(name='apples') cls.f2 = Food.objects.create(name='oranges') Eaten.objects.create(food=f1, meal='dinner') cls.j1 = Job.objects.create(name='Manager') cls.r1 = Responsibility.objects.create(description='Playing golf') cls.j2 = Job.objects.create(name='Programmer') cls.r2 = Responsibility.objects.create(description='Programming') JobResponsibilities.objects.create(job=cls.j1, responsibility=cls.r1) JobResponsibilities.objects.create(job=cls.j2, responsibility=cls.r2) def test_to_field(self): self.assertSequenceEqual( Food.objects.exclude(eaten__meal='dinner'), [self.f2], ) self.assertSequenceEqual( Job.objects.exclude(responsibilities__description='Playing golf'), [self.j2], ) self.assertSequenceEqual( Responsibility.objects.exclude(jobs__name='Manager'), [self.r2], ) def test_ticket14511(self): alex = Person.objects.get_or_create(name='Alex')[0] jane = Person.objects.get_or_create(name='Jane')[0] oracle = Company.objects.get_or_create(name='Oracle')[0] google = Company.objects.get_or_create(name='Google')[0] microsoft = Company.objects.get_or_create(name='Microsoft')[0] intel = Company.objects.get_or_create(name='Intel')[0] def employ(employer, employee, title): Employment.objects.get_or_create(employee=employee, employer=employer, title=title) employ(oracle, alex, 'Engineer') employ(oracle, alex, 'Developer') employ(google, alex, 'Engineer') employ(google, alex, 'Manager') employ(microsoft, alex, 'Manager') employ(intel, alex, 'Manager') employ(microsoft, jane, 'Developer') employ(intel, jane, 'Manager') alex_tech_employers = alex.employers.filter( employment__title__in=('Engineer', 'Developer')).distinct().order_by('name') self.assertSequenceEqual(alex_tech_employers, [google, oracle]) alex_nontech_employers = alex.employers.exclude( employment__title__in=('Engineer', 'Developer')).distinct().order_by('name') self.assertSequenceEqual(alex_nontech_employers, [google, intel, microsoft]) def test_exclude_reverse_fk_field_ref(self): tag = Tag.objects.create() Note.objects.create(tag=tag, note='note') annotation = Annotation.objects.create(name='annotation', tag=tag) self.assertEqual(Annotation.objects.exclude(tag__note__note=F('name')).get(), annotation) def test_exclude_with_circular_fk_relation(self): self.assertEqual(ObjectB.objects.exclude(objecta__objectb__name=F('name')).count(), 0) def test_subquery_exclude_outerref(self): qs = JobResponsibilities.objects.filter( Exists(Responsibility.objects.exclude(jobs=OuterRef('job'))), ) self.assertTrue(qs.exists()) self.r1.delete() self.assertFalse(qs.exists()) def test_exclude_nullable_fields(self): number = Number.objects.create(num=1, other_num=1) Number.objects.create(num=2, other_num=2, another_num=2) self.assertSequenceEqual( Number.objects.exclude(other_num=F('another_num')), [number], ) self.assertSequenceEqual( Number.objects.exclude(num=F('another_num')), [number], ) def test_exclude_multivalued_exists(self): with CaptureQueriesContext(connection) as captured_queries: self.assertSequenceEqual( Job.objects.exclude(responsibilities__description='Programming'), [self.j1], ) self.assertIn('exists', captured_queries[0]['sql'].lower()) def test_exclude_subquery(self): subquery = JobResponsibilities.objects.filter( responsibility__description='bar', ) | JobResponsibilities.objects.exclude( job__responsibilities__description='foo', ) self.assertCountEqual( Job.objects.annotate( responsibility=subquery.filter( job=OuterRef('name'), ).values('id')[:1] ), [self.j1, self.j2], ) class ExcludeTest17600(TestCase): """ Some regressiontests for ticket #17600. Some of these likely duplicate other existing tests. """ @classmethod def setUpTestData(cls): # Create a few Orders. cls.o1 = Order.objects.create(pk=1) cls.o2 = Order.objects.create(pk=2) cls.o3 = Order.objects.create(pk=3) # Create some OrderItems for the first order with homogeneous # status_id values cls.oi1 = OrderItem.objects.create(order=cls.o1, status=1) cls.oi2 = OrderItem.objects.create(order=cls.o1, status=1) cls.oi3 = OrderItem.objects.create(order=cls.o1, status=1) # Create some OrderItems for the second order with heterogeneous # status_id values cls.oi4 = OrderItem.objects.create(order=cls.o2, status=1) cls.oi5 = OrderItem.objects.create(order=cls.o2, status=2) cls.oi6 = OrderItem.objects.create(order=cls.o2, status=3) # Create some OrderItems for the second order with heterogeneous # status_id values cls.oi7 = OrderItem.objects.create(order=cls.o3, status=2) cls.oi8 = OrderItem.objects.create(order=cls.o3, status=3) cls.oi9 = OrderItem.objects.create(order=cls.o3, status=4) def test_exclude_plain(self): """ This should exclude Orders which have some items with status 1 """ self.assertSequenceEqual( Order.objects.exclude(items__status=1), [self.o3], ) def test_exclude_plain_distinct(self): """ This should exclude Orders which have some items with status 1 """ self.assertSequenceEqual( Order.objects.exclude(items__status=1).distinct(), [self.o3], ) def test_exclude_with_q_object_distinct(self): """ This should exclude Orders which have some items with status 1 """ self.assertSequenceEqual( Order.objects.exclude(Q(items__status=1)).distinct(), [self.o3], ) def test_exclude_with_q_object_no_distinct(self): """ This should exclude Orders which have some items with status 1 """ self.assertSequenceEqual( Order.objects.exclude(Q(items__status=1)), [self.o3], ) def test_exclude_with_q_is_equal_to_plain_exclude(self): """ Using exclude(condition) and exclude(Q(condition)) should yield the same QuerySet """ self.assertEqual( list(Order.objects.exclude(items__status=1).distinct()), list(Order.objects.exclude(Q(items__status=1)).distinct())) def test_exclude_with_q_is_equal_to_plain_exclude_variation(self): """ Using exclude(condition) and exclude(Q(condition)) should yield the same QuerySet """ self.assertEqual( list(Order.objects.exclude(items__status=1)), list(Order.objects.exclude(Q(items__status=1)).distinct())) @unittest.expectedFailure def test_only_orders_with_all_items_having_status_1(self): """ This should only return orders having ALL items set to status 1, or those items not having any orders at all. The correct way to write this query in SQL seems to be using two nested subqueries. """ self.assertQuerysetEqual( Order.objects.exclude(~Q(items__status=1)).distinct(), [self.o1], ) class Exclude15786(TestCase): """Regression test for #15786""" def test_ticket15786(self): c1 = SimpleCategory.objects.create(name='c1') c2 = SimpleCategory.objects.create(name='c2') OneToOneCategory.objects.create(category=c1) OneToOneCategory.objects.create(category=c2) rel = CategoryRelationship.objects.create(first=c1, second=c2) self.assertEqual( CategoryRelationship.objects.exclude( first__onetoonecategory=F('second__onetoonecategory') ).get(), rel ) class NullInExcludeTest(TestCase): @classmethod def setUpTestData(cls): NullableName.objects.create(name='i1') NullableName.objects.create() def test_null_in_exclude_qs(self): none_val = '' if connection.features.interprets_empty_strings_as_nulls else None self.assertQuerysetEqual( NullableName.objects.exclude(name__in=[]), ['i1', none_val], attrgetter('name')) self.assertQuerysetEqual( NullableName.objects.exclude(name__in=['i1']), [none_val], attrgetter('name')) self.assertQuerysetEqual( NullableName.objects.exclude(name__in=['i3']), ['i1', none_val], attrgetter('name')) inner_qs = NullableName.objects.filter(name='i1').values_list('name') self.assertQuerysetEqual( NullableName.objects.exclude(name__in=inner_qs), [none_val], attrgetter('name')) # The inner queryset wasn't executed - it should be turned # into subquery above self.assertIs(inner_qs._result_cache, None) @unittest.expectedFailure def test_col_not_in_list_containing_null(self): """ The following case is not handled properly because SQL's COL NOT IN (list containing null) handling is too weird to abstract away. """ self.assertQuerysetEqual( NullableName.objects.exclude(name__in=[None]), ['i1'], attrgetter('name')) def test_double_exclude(self): self.assertEqual( list(NullableName.objects.filter(~~Q(name='i1'))), list(NullableName.objects.filter(Q(name='i1')))) self.assertNotIn( 'IS NOT NULL', str(NullableName.objects.filter(~~Q(name='i1')).query)) class EmptyStringsAsNullTest(TestCase): """ Filtering on non-null character fields works as expected. The reason for these tests is that Oracle treats '' as NULL, and this can cause problems in query construction. Refs #17957. """ @classmethod def setUpTestData(cls): cls.nc = NamedCategory.objects.create(name='') def test_direct_exclude(self): self.assertQuerysetEqual( NamedCategory.objects.exclude(name__in=['nonexistent']), [self.nc.pk], attrgetter('pk') ) def test_joined_exclude(self): self.assertQuerysetEqual( DumbCategory.objects.exclude(namedcategory__name__in=['nonexistent']), [self.nc.pk], attrgetter('pk') ) def test_21001(self): foo = NamedCategory.objects.create(name='foo') self.assertQuerysetEqual( NamedCategory.objects.exclude(name=''), [foo.pk], attrgetter('pk') ) class ProxyQueryCleanupTest(TestCase): def test_evaluated_proxy_count(self): """ Generating the query string doesn't alter the query's state in irreversible ways. Refs #18248. """ ProxyCategory.objects.create() qs = ProxyCategory.objects.all() self.assertEqual(qs.count(), 1) str(qs.query) self.assertEqual(qs.count(), 1) class WhereNodeTest(SimpleTestCase): class DummyNode: def as_sql(self, compiler, connection): return 'dummy', [] class MockCompiler: def compile(self, node): return node.as_sql(self, connection) def __call__(self, name): return connection.ops.quote_name(name) def test_empty_full_handling_conjunction(self): compiler = WhereNodeTest.MockCompiler() w = WhereNode(children=[NothingNode()]) with self.assertRaises(EmptyResultSet): w.as_sql(compiler, connection) w.negate() self.assertEqual(w.as_sql(compiler, connection), ('', [])) w = WhereNode(children=[self.DummyNode(), self.DummyNode()]) self.assertEqual(w.as_sql(compiler, connection), ('(dummy AND dummy)', [])) w.negate() self.assertEqual(w.as_sql(compiler, connection), ('NOT (dummy AND dummy)', [])) w = WhereNode(children=[NothingNode(), self.DummyNode()]) with self.assertRaises(EmptyResultSet): w.as_sql(compiler, connection) w.negate() self.assertEqual(w.as_sql(compiler, connection), ('', [])) def test_empty_full_handling_disjunction(self): compiler = WhereNodeTest.MockCompiler() w = WhereNode(children=[NothingNode()], connector='OR') with self.assertRaises(EmptyResultSet): w.as_sql(compiler, connection) w.negate() self.assertEqual(w.as_sql(compiler, connection), ('', [])) w = WhereNode(children=[self.DummyNode(), self.DummyNode()], connector='OR') self.assertEqual(w.as_sql(compiler, connection), ('(dummy OR dummy)', [])) w.negate() self.assertEqual(w.as_sql(compiler, connection), ('NOT (dummy OR dummy)', [])) w = WhereNode(children=[NothingNode(), self.DummyNode()], connector='OR') self.assertEqual(w.as_sql(compiler, connection), ('dummy', [])) w.negate() self.assertEqual(w.as_sql(compiler, connection), ('NOT (dummy)', [])) def test_empty_nodes(self): compiler = WhereNodeTest.MockCompiler() empty_w = WhereNode() w = WhereNode(children=[empty_w, empty_w]) self.assertEqual(w.as_sql(compiler, connection), ('', [])) w.negate() with self.assertRaises(EmptyResultSet): w.as_sql(compiler, connection) w.connector = 'OR' with self.assertRaises(EmptyResultSet): w.as_sql(compiler, connection) w.negate() self.assertEqual(w.as_sql(compiler, connection), ('', [])) w = WhereNode(children=[empty_w, NothingNode()], connector='OR') self.assertEqual(w.as_sql(compiler, connection), ('', [])) w = WhereNode(children=[empty_w, NothingNode()], connector='AND') with self.assertRaises(EmptyResultSet): w.as_sql(compiler, connection) class QuerySetExceptionTests(SimpleTestCase): def test_iter_exceptions(self): qs = ExtraInfo.objects.only('author') msg = "'ManyToOneRel' object has no attribute 'attname'" with self.assertRaisesMessage(AttributeError, msg): list(qs) def test_invalid_order_by(self): msg = ( "Cannot resolve keyword '*' into field. Choices are: created, id, " "name" ) with self.assertRaisesMessage(FieldError, msg): Article.objects.order_by('*') def test_invalid_order_by_raw_column_alias(self): msg = ( "Cannot resolve keyword 'queries_author.name' into field. Choices " "are: cover, created, creator, creator_id, id, modified, name, " "note, note_id, tags" ) with self.assertRaisesMessage(FieldError, msg): Item.objects.values('creator__name').order_by('queries_author.name') def test_invalid_queryset_model(self): msg = 'Cannot use QuerySet for "Article": Use a QuerySet for "ExtraInfo".' with self.assertRaisesMessage(ValueError, msg): list(Author.objects.filter(extra=Article.objects.all())) class NullJoinPromotionOrTest(TestCase): @classmethod def setUpTestData(cls): cls.d1 = ModelD.objects.create(name='foo') d2 = ModelD.objects.create(name='bar') cls.a1 = ModelA.objects.create(name='a1', d=cls.d1) c = ModelC.objects.create(name='c') b = ModelB.objects.create(name='b', c=c) cls.a2 = ModelA.objects.create(name='a2', b=b, d=d2) def test_ticket_17886(self): # The first Q-object is generating the match, the rest of the filters # should not remove the match even if they do not match anything. The # problem here was that b__name generates a LOUTER JOIN, then # b__c__name generates join to c, which the ORM tried to promote but # failed as that join isn't nullable. q_obj = ( Q(d__name='foo') | Q(b__name='foo') | Q(b__c__name='foo') ) qset = ModelA.objects.filter(q_obj) self.assertEqual(list(qset), [self.a1]) # We generate one INNER JOIN to D. The join is direct and not nullable # so we can use INNER JOIN for it. However, we can NOT use INNER JOIN # for the b->c join, as a->b is nullable. self.assertEqual(str(qset.query).count('INNER JOIN'), 1) def test_isnull_filter_promotion(self): qs = ModelA.objects.filter(Q(b__name__isnull=True)) self.assertEqual(str(qs.query).count('LEFT OUTER'), 1) self.assertEqual(list(qs), [self.a1]) qs = ModelA.objects.filter(~Q(b__name__isnull=True)) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(list(qs), [self.a2]) qs = ModelA.objects.filter(~~Q(b__name__isnull=True)) self.assertEqual(str(qs.query).count('LEFT OUTER'), 1) self.assertEqual(list(qs), [self.a1]) qs = ModelA.objects.filter(Q(b__name__isnull=False)) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(list(qs), [self.a2]) qs = ModelA.objects.filter(~Q(b__name__isnull=False)) self.assertEqual(str(qs.query).count('LEFT OUTER'), 1) self.assertEqual(list(qs), [self.a1]) qs = ModelA.objects.filter(~~Q(b__name__isnull=False)) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(list(qs), [self.a2]) def test_null_join_demotion(self): qs = ModelA.objects.filter(Q(b__name__isnull=False) & Q(b__name__isnull=True)) self.assertIn(' INNER JOIN ', str(qs.query)) qs = ModelA.objects.filter(Q(b__name__isnull=True) & Q(b__name__isnull=False)) self.assertIn(' INNER JOIN ', str(qs.query)) qs = ModelA.objects.filter(Q(b__name__isnull=False) | Q(b__name__isnull=True)) self.assertIn(' LEFT OUTER JOIN ', str(qs.query)) qs = ModelA.objects.filter(Q(b__name__isnull=True) | Q(b__name__isnull=False)) self.assertIn(' LEFT OUTER JOIN ', str(qs.query)) def test_ticket_21366(self): n = Note.objects.create(note='n', misc='m') e = ExtraInfo.objects.create(info='info', note=n) a = Author.objects.create(name='Author1', num=1, extra=e) Ranking.objects.create(rank=1, author=a) r1 = Report.objects.create(name='Foo', creator=a) r2 = Report.objects.create(name='Bar') Report.objects.create(name='Bar', creator=a) qs = Report.objects.filter( Q(creator__ranking__isnull=True) | Q(creator__ranking__rank=1, name='Foo') ) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2) self.assertEqual(str(qs.query).count(' JOIN '), 2) self.assertSequenceEqual(qs.order_by('name'), [r2, r1]) def test_ticket_21748(self): i1 = Identifier.objects.create(name='i1') i2 = Identifier.objects.create(name='i2') i3 = Identifier.objects.create(name='i3') Program.objects.create(identifier=i1) Channel.objects.create(identifier=i1) Program.objects.create(identifier=i2) self.assertSequenceEqual(Identifier.objects.filter(program=None, channel=None), [i3]) self.assertSequenceEqual(Identifier.objects.exclude(program=None, channel=None).order_by('name'), [i1, i2]) def test_ticket_21748_double_negated_and(self): i1 = Identifier.objects.create(name='i1') i2 = Identifier.objects.create(name='i2') Identifier.objects.create(name='i3') p1 = Program.objects.create(identifier=i1) c1 = Channel.objects.create(identifier=i1) Program.objects.create(identifier=i2) # Check the ~~Q() (or equivalently .exclude(~Q)) works like Q() for # join promotion. qs1_doubleneg = Identifier.objects.exclude(~Q(program__id=p1.id, channel__id=c1.id)).order_by('pk') qs1_filter = Identifier.objects.filter(program__id=p1.id, channel__id=c1.id).order_by('pk') self.assertQuerysetEqual(qs1_doubleneg, qs1_filter, lambda x: x) self.assertEqual(str(qs1_filter.query).count('JOIN'), str(qs1_doubleneg.query).count('JOIN')) self.assertEqual(2, str(qs1_doubleneg.query).count('INNER JOIN')) self.assertEqual(str(qs1_filter.query).count('INNER JOIN'), str(qs1_doubleneg.query).count('INNER JOIN')) def test_ticket_21748_double_negated_or(self): i1 = Identifier.objects.create(name='i1') i2 = Identifier.objects.create(name='i2') Identifier.objects.create(name='i3') p1 = Program.objects.create(identifier=i1) c1 = Channel.objects.create(identifier=i1) p2 = Program.objects.create(identifier=i2) # Test OR + doubleneg. The expected result is that channel is LOUTER # joined, program INNER joined qs1_filter = Identifier.objects.filter( Q(program__id=p2.id, channel__id=c1.id) | Q(program__id=p1.id) ).order_by('pk') qs1_doubleneg = Identifier.objects.exclude( ~Q(Q(program__id=p2.id, channel__id=c1.id) | Q(program__id=p1.id)) ).order_by('pk') self.assertQuerysetEqual(qs1_doubleneg, qs1_filter, lambda x: x) self.assertEqual(str(qs1_filter.query).count('JOIN'), str(qs1_doubleneg.query).count('JOIN')) self.assertEqual(1, str(qs1_doubleneg.query).count('INNER JOIN')) self.assertEqual(str(qs1_filter.query).count('INNER JOIN'), str(qs1_doubleneg.query).count('INNER JOIN')) def test_ticket_21748_complex_filter(self): i1 = Identifier.objects.create(name='i1') i2 = Identifier.objects.create(name='i2') Identifier.objects.create(name='i3') p1 = Program.objects.create(identifier=i1) c1 = Channel.objects.create(identifier=i1) p2 = Program.objects.create(identifier=i2) # Finally, a more complex case, one time in a way where each # NOT is pushed to lowest level in the boolean tree, and # another query where this isn't done. qs1 = Identifier.objects.filter( ~Q(~Q(program__id=p2.id, channel__id=c1.id) & Q(program__id=p1.id)) ).order_by('pk') qs2 = Identifier.objects.filter( Q(Q(program__id=p2.id, channel__id=c1.id) | ~Q(program__id=p1.id)) ).order_by('pk') self.assertQuerysetEqual(qs1, qs2, lambda x: x) self.assertEqual(str(qs1.query).count('JOIN'), str(qs2.query).count('JOIN')) self.assertEqual(0, str(qs1.query).count('INNER JOIN')) self.assertEqual(str(qs1.query).count('INNER JOIN'), str(qs2.query).count('INNER JOIN')) class ReverseJoinTrimmingTest(TestCase): def test_reverse_trimming(self): # We don't accidentally trim reverse joins - we can't know if there is # anything on the other side of the join, so trimming reverse joins # can't be done, ever. t = Tag.objects.create() qs = Tag.objects.filter(annotation__tag=t.pk) self.assertIn('INNER JOIN', str(qs.query)) self.assertEqual(list(qs), []) class JoinReuseTest(TestCase): """ The queries reuse joins sensibly (for example, direct joins are always reused). """ def test_fk_reuse(self): qs = Annotation.objects.filter(tag__name='foo').filter(tag__name='bar') self.assertEqual(str(qs.query).count('JOIN'), 1) def test_fk_reuse_select_related(self): qs = Annotation.objects.filter(tag__name='foo').select_related('tag') self.assertEqual(str(qs.query).count('JOIN'), 1) def test_fk_reuse_annotation(self): qs = Annotation.objects.filter(tag__name='foo').annotate(cnt=Count('tag__name')) self.assertEqual(str(qs.query).count('JOIN'), 1) def test_fk_reuse_disjunction(self): qs = Annotation.objects.filter(Q(tag__name='foo') | Q(tag__name='bar')) self.assertEqual(str(qs.query).count('JOIN'), 1) def test_fk_reuse_order_by(self): qs = Annotation.objects.filter(tag__name='foo').order_by('tag__name') self.assertEqual(str(qs.query).count('JOIN'), 1) def test_revo2o_reuse(self): qs = Detail.objects.filter(member__name='foo').filter(member__name='foo') self.assertEqual(str(qs.query).count('JOIN'), 1) def test_revfk_noreuse(self): qs = Author.objects.filter(report__name='r4').filter(report__name='r1') self.assertEqual(str(qs.query).count('JOIN'), 2) def test_inverted_q_across_relations(self): """ When a trimmable join is specified in the query (here school__), the ORM detects it and removes unnecessary joins. The set of reusable joins are updated after trimming the query so that other lookups don't consider that the outer query's filters are in effect for the subquery (#26551). """ springfield_elementary = School.objects.create() hogward = School.objects.create() Student.objects.create(school=springfield_elementary) hp = Student.objects.create(school=hogward) Classroom.objects.create(school=hogward, name='Potion') Classroom.objects.create(school=springfield_elementary, name='Main') qs = Student.objects.filter( ~(Q(school__classroom__name='Main') & Q(school__classroom__has_blackboard=None)) ) self.assertSequenceEqual(qs, [hp]) class DisjunctionPromotionTests(TestCase): def test_disjunction_promotion_select_related(self): fk1 = FK1.objects.create(f1='f1', f2='f2') basea = BaseA.objects.create(a=fk1) qs = BaseA.objects.filter(Q(a=fk1) | Q(b=2)) self.assertEqual(str(qs.query).count(' JOIN '), 0) qs = qs.select_related('a', 'b') self.assertEqual(str(qs.query).count(' INNER JOIN '), 0) self.assertEqual(str(qs.query).count(' LEFT OUTER JOIN '), 2) with self.assertNumQueries(1): self.assertSequenceEqual(qs, [basea]) self.assertEqual(qs[0].a, fk1) self.assertIs(qs[0].b, None) def test_disjunction_promotion1(self): # Pre-existing join, add two ORed filters to the same join, # all joins can be INNER JOINS. qs = BaseA.objects.filter(a__f1='foo') self.assertEqual(str(qs.query).count('INNER JOIN'), 1) qs = qs.filter(Q(b__f1='foo') | Q(b__f2='foo')) self.assertEqual(str(qs.query).count('INNER JOIN'), 2) # Reverse the order of AND and OR filters. qs = BaseA.objects.filter(Q(b__f1='foo') | Q(b__f2='foo')) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) qs = qs.filter(a__f1='foo') self.assertEqual(str(qs.query).count('INNER JOIN'), 2) def test_disjunction_promotion2(self): qs = BaseA.objects.filter(a__f1='foo') self.assertEqual(str(qs.query).count('INNER JOIN'), 1) # Now we have two different joins in an ORed condition, these # must be OUTER joins. The pre-existing join should remain INNER. qs = qs.filter(Q(b__f1='foo') | Q(c__f2='foo')) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2) # Reverse case. qs = BaseA.objects.filter(Q(b__f1='foo') | Q(c__f2='foo')) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2) qs = qs.filter(a__f1='foo') self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2) def test_disjunction_promotion3(self): qs = BaseA.objects.filter(a__f2='bar') self.assertEqual(str(qs.query).count('INNER JOIN'), 1) # The ANDed a__f2 filter allows us to use keep using INNER JOIN # even inside the ORed case. If the join to a__ returns nothing, # the ANDed filter for a__f2 can't be true. qs = qs.filter(Q(a__f1='foo') | Q(b__f2='foo')) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1) def test_disjunction_promotion3_demote(self): # This one needs demotion logic: the first filter causes a to be # outer joined, the second filter makes it inner join again. qs = BaseA.objects.filter( Q(a__f1='foo') | Q(b__f2='foo')).filter(a__f2='bar') self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1) def test_disjunction_promotion4_demote(self): qs = BaseA.objects.filter(Q(a=1) | Q(a=2)) self.assertEqual(str(qs.query).count('JOIN'), 0) # Demote needed for the "a" join. It is marked as outer join by # above filter (even if it is trimmed away). qs = qs.filter(a__f1='foo') self.assertEqual(str(qs.query).count('INNER JOIN'), 1) def test_disjunction_promotion4(self): qs = BaseA.objects.filter(a__f1='foo') self.assertEqual(str(qs.query).count('INNER JOIN'), 1) qs = qs.filter(Q(a=1) | Q(a=2)) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) def test_disjunction_promotion5_demote(self): qs = BaseA.objects.filter(Q(a=1) | Q(a=2)) # Note that the above filters on a force the join to an # inner join even if it is trimmed. self.assertEqual(str(qs.query).count('JOIN'), 0) qs = qs.filter(Q(a__f1='foo') | Q(b__f1='foo')) # So, now the a__f1 join doesn't need promotion. self.assertEqual(str(qs.query).count('INNER JOIN'), 1) # But b__f1 does. self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1) qs = BaseA.objects.filter(Q(a__f1='foo') | Q(b__f1='foo')) # Now the join to a is created as LOUTER self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2) qs = qs.filter(Q(a=1) | Q(a=2)) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1) def test_disjunction_promotion6(self): qs = BaseA.objects.filter(Q(a=1) | Q(a=2)) self.assertEqual(str(qs.query).count('JOIN'), 0) qs = BaseA.objects.filter(Q(a__f1='foo') & Q(b__f1='foo')) self.assertEqual(str(qs.query).count('INNER JOIN'), 2) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 0) qs = BaseA.objects.filter(Q(a__f1='foo') & Q(b__f1='foo')) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(qs.query).count('INNER JOIN'), 2) qs = qs.filter(Q(a=1) | Q(a=2)) self.assertEqual(str(qs.query).count('INNER JOIN'), 2) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 0) def test_disjunction_promotion7(self): qs = BaseA.objects.filter(Q(a=1) | Q(a=2)) self.assertEqual(str(qs.query).count('JOIN'), 0) qs = BaseA.objects.filter(Q(a__f1='foo') | (Q(b__f1='foo') & Q(a__f1='bar'))) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1) qs = BaseA.objects.filter( (Q(a__f1='foo') | Q(b__f1='foo')) & (Q(a__f1='bar') | Q(c__f1='foo')) ) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 3) self.assertEqual(str(qs.query).count('INNER JOIN'), 0) qs = BaseA.objects.filter( Q(a__f1='foo') | Q(a__f1='bar') & (Q(b__f1='bar') | Q(c__f1='foo')) ) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) def test_disjunction_promotion_fexpression(self): qs = BaseA.objects.filter(Q(a__f1=F('b__f1')) | Q(b__f1='foo')) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) qs = BaseA.objects.filter(Q(a__f1=F('c__f1')) | Q(b__f1='foo')) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 3) qs = BaseA.objects.filter(Q(a__f1=F('b__f1')) | Q(a__f2=F('b__f2')) | Q(c__f1='foo')) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 3) qs = BaseA.objects.filter(Q(a__f1=F('c__f1')) | (Q(pk=1) & Q(pk=2))) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2) self.assertEqual(str(qs.query).count('INNER JOIN'), 0) class ManyToManyExcludeTest(TestCase): def test_exclude_many_to_many(self): i_extra = Identifier.objects.create(name='extra') i_program = Identifier.objects.create(name='program') program = Program.objects.create(identifier=i_program) i_channel = Identifier.objects.create(name='channel') channel = Channel.objects.create(identifier=i_channel) channel.programs.add(program) # channel contains 'program1', so all Identifiers except that one # should be returned self.assertSequenceEqual( Identifier.objects.exclude(program__channel=channel).order_by('name'), [i_channel, i_extra], ) self.assertSequenceEqual( Identifier.objects.exclude(program__channel=None).order_by('name'), [i_program], ) def test_ticket_12823(self): pg3 = Page.objects.create(text='pg3') pg2 = Page.objects.create(text='pg2') pg1 = Page.objects.create(text='pg1') pa1 = Paragraph.objects.create(text='pa1') pa1.page.set([pg1, pg2]) pa2 = Paragraph.objects.create(text='pa2') pa2.page.set([pg2, pg3]) pa3 = Paragraph.objects.create(text='pa3') ch1 = Chapter.objects.create(title='ch1', paragraph=pa1) ch2 = Chapter.objects.create(title='ch2', paragraph=pa2) ch3 = Chapter.objects.create(title='ch3', paragraph=pa3) b1 = Book.objects.create(title='b1', chapter=ch1) b2 = Book.objects.create(title='b2', chapter=ch2) b3 = Book.objects.create(title='b3', chapter=ch3) q = Book.objects.exclude(chapter__paragraph__page__text='pg1') self.assertNotIn('IS NOT NULL', str(q.query)) self.assertEqual(len(q), 2) self.assertNotIn(b1, q) self.assertIn(b2, q) self.assertIn(b3, q) class RelabelCloneTest(TestCase): def test_ticket_19964(self): my1 = MyObject.objects.create(data='foo') my1.parent = my1 my1.save() my2 = MyObject.objects.create(data='bar', parent=my1) parents = MyObject.objects.filter(parent=F('id')) children = MyObject.objects.filter(parent__in=parents).exclude(parent=F('id')) self.assertEqual(list(parents), [my1]) # Evaluating the children query (which has parents as part of it) does # not change results for the parents query. self.assertEqual(list(children), [my2]) self.assertEqual(list(parents), [my1]) class Ticket20101Tests(TestCase): def test_ticket_20101(self): """ Tests QuerySet ORed combining in exclude subquery case. """ t = Tag.objects.create(name='foo') a1 = Annotation.objects.create(tag=t, name='a1') a2 = Annotation.objects.create(tag=t, name='a2') a3 = Annotation.objects.create(tag=t, name='a3') n = Note.objects.create(note='foo', misc='bar') qs1 = Note.objects.exclude(annotation__in=[a1, a2]) qs2 = Note.objects.filter(annotation__in=[a3]) self.assertIn(n, qs1) self.assertNotIn(n, qs2) self.assertIn(n, (qs1 | qs2)) class EmptyStringPromotionTests(SimpleTestCase): def test_empty_string_promotion(self): qs = RelatedObject.objects.filter(single__name='') if connection.features.interprets_empty_strings_as_nulls: self.assertIn('LEFT OUTER JOIN', str(qs.query)) else: self.assertNotIn('LEFT OUTER JOIN', str(qs.query)) class ValuesSubqueryTests(TestCase): def test_values_in_subquery(self): # If a values() queryset is used, then the given values # will be used instead of forcing use of the relation's field. o1 = Order.objects.create(id=-2) o2 = Order.objects.create(id=-1) oi1 = OrderItem.objects.create(order=o1, status=0) oi1.status = oi1.pk oi1.save() OrderItem.objects.create(order=o2, status=0) # The query below should match o1 as it has related order_item # with id == status. self.assertSequenceEqual(Order.objects.filter(items__in=OrderItem.objects.values_list('status')), [o1]) class DoubleInSubqueryTests(TestCase): def test_double_subquery_in(self): lfa1 = LeafA.objects.create(data='foo') lfa2 = LeafA.objects.create(data='bar') lfb1 = LeafB.objects.create(data='lfb1') lfb2 = LeafB.objects.create(data='lfb2') Join.objects.create(a=lfa1, b=lfb1) Join.objects.create(a=lfa2, b=lfb2) leaf_as = LeafA.objects.filter(data='foo').values_list('pk', flat=True) joins = Join.objects.filter(a__in=leaf_as).values_list('b__id', flat=True) qs = LeafB.objects.filter(pk__in=joins) self.assertSequenceEqual(qs, [lfb1]) class Ticket18785Tests(SimpleTestCase): def test_ticket_18785(self): # Test join trimming from ticket18785 qs = Item.objects.exclude( note__isnull=False ).filter( name='something', creator__extra__isnull=True ).order_by() self.assertEqual(1, str(qs.query).count('INNER JOIN')) self.assertEqual(0, str(qs.query).count('OUTER JOIN')) class Ticket20788Tests(TestCase): def test_ticket_20788(self): Paragraph.objects.create() paragraph = Paragraph.objects.create() page = paragraph.page.create() chapter = Chapter.objects.create(paragraph=paragraph) Book.objects.create(chapter=chapter) paragraph2 = Paragraph.objects.create() Page.objects.create() chapter2 = Chapter.objects.create(paragraph=paragraph2) book2 = Book.objects.create(chapter=chapter2) sentences_not_in_pub = Book.objects.exclude(chapter__paragraph__page=page) self.assertSequenceEqual(sentences_not_in_pub, [book2]) class Ticket12807Tests(TestCase): def test_ticket_12807(self): p1 = Paragraph.objects.create() p2 = Paragraph.objects.create() # The ORed condition below should have no effect on the query - the # ~Q(pk__in=[]) will always be True. qs = Paragraph.objects.filter((Q(pk=p2.pk) | ~Q(pk__in=[])) & Q(pk=p1.pk)) self.assertSequenceEqual(qs, [p1]) class RelatedLookupTypeTests(TestCase): error = 'Cannot query "%s": Must be "%s" instance.' @classmethod def setUpTestData(cls): cls.oa = ObjectA.objects.create(name="oa") cls.poa = ProxyObjectA.objects.get(name="oa") cls.coa = ChildObjectA.objects.create(name="coa") cls.wrong_type = Order.objects.create(id=cls.oa.pk) cls.ob = ObjectB.objects.create(name="ob", objecta=cls.oa, num=1) cls.pob1 = ProxyObjectB.objects.create(name="pob", objecta=cls.oa, num=2) cls.pob = ProxyObjectB.objects.all() cls.c = ObjectC.objects.create(childobjecta=cls.coa) def test_wrong_type_lookup(self): """ A ValueError is raised when the incorrect object type is passed to a query lookup. """ # Passing incorrect object type with self.assertRaisesMessage(ValueError, self.error % (self.wrong_type, ObjectA._meta.object_name)): ObjectB.objects.get(objecta=self.wrong_type) with self.assertRaisesMessage(ValueError, self.error % (self.wrong_type, ObjectA._meta.object_name)): ObjectB.objects.filter(objecta__in=[self.wrong_type]) with self.assertRaisesMessage(ValueError, self.error % (self.wrong_type, ObjectA._meta.object_name)): ObjectB.objects.filter(objecta=self.wrong_type) with self.assertRaisesMessage(ValueError, self.error % (self.wrong_type, ObjectB._meta.object_name)): ObjectA.objects.filter(objectb__in=[self.wrong_type, self.ob]) # Passing an object of the class on which query is done. with self.assertRaisesMessage(ValueError, self.error % (self.ob, ObjectA._meta.object_name)): ObjectB.objects.filter(objecta__in=[self.poa, self.ob]) with self.assertRaisesMessage(ValueError, self.error % (self.ob, ChildObjectA._meta.object_name)): ObjectC.objects.exclude(childobjecta__in=[self.coa, self.ob]) def test_wrong_backward_lookup(self): """ A ValueError is raised when the incorrect object type is passed to a query lookup for backward relations. """ with self.assertRaisesMessage(ValueError, self.error % (self.oa, ObjectB._meta.object_name)): ObjectA.objects.filter(objectb__in=[self.oa, self.ob]) with self.assertRaisesMessage(ValueError, self.error % (self.oa, ObjectB._meta.object_name)): ObjectA.objects.exclude(objectb=self.oa) with self.assertRaisesMessage(ValueError, self.error % (self.wrong_type, ObjectB._meta.object_name)): ObjectA.objects.get(objectb=self.wrong_type) def test_correct_lookup(self): """ When passing proxy model objects, child objects, or parent objects, lookups work fine. """ out_a = [self.oa] out_b = [self.ob, self.pob1] out_c = [self.c] # proxy model objects self.assertSequenceEqual(ObjectB.objects.filter(objecta=self.poa).order_by('name'), out_b) self.assertSequenceEqual(ObjectA.objects.filter(objectb__in=self.pob).order_by('pk'), out_a * 2) # child objects self.assertSequenceEqual(ObjectB.objects.filter(objecta__in=[self.coa]), []) self.assertSequenceEqual(ObjectB.objects.filter(objecta__in=[self.poa, self.coa]).order_by('name'), out_b) self.assertSequenceEqual( ObjectB.objects.filter(objecta__in=iter([self.poa, self.coa])).order_by('name'), out_b ) # parent objects self.assertSequenceEqual(ObjectC.objects.exclude(childobjecta=self.oa), out_c) # QuerySet related object type checking shouldn't issue queries # (the querysets aren't evaluated here, hence zero queries) (#23266). with self.assertNumQueries(0): ObjectB.objects.filter(objecta__in=ObjectA.objects.all()) def test_values_queryset_lookup(self): """ #23396 - Ensure ValueQuerySets are not checked for compatibility with the lookup field """ # Make sure the num and objecta field values match. ob = ObjectB.objects.get(name='ob') ob.num = ob.objecta.pk ob.save() pob = ObjectB.objects.get(name='pob') pob.num = pob.objecta.pk pob.save() self.assertSequenceEqual(ObjectB.objects.filter( objecta__in=ObjectB.objects.all().values_list('num') ).order_by('pk'), [ob, pob]) class Ticket14056Tests(TestCase): def test_ticket_14056(self): s1 = SharedConnection.objects.create(data='s1') s2 = SharedConnection.objects.create(data='s2') s3 = SharedConnection.objects.create(data='s3') PointerA.objects.create(connection=s2) expected_ordering = ( [s1, s3, s2] if connection.features.nulls_order_largest else [s2, s1, s3] ) self.assertSequenceEqual(SharedConnection.objects.order_by('-pointera__connection', 'pk'), expected_ordering) class Ticket20955Tests(TestCase): def test_ticket_20955(self): jack = Staff.objects.create(name='jackstaff') jackstaff = StaffUser.objects.create(staff=jack) jill = Staff.objects.create(name='jillstaff') jillstaff = StaffUser.objects.create(staff=jill) task = Task.objects.create(creator=jackstaff, owner=jillstaff, title="task") task_get = Task.objects.get(pk=task.pk) # Load data so that assertNumQueries doesn't complain about the get # version's queries. task_get.creator.staffuser.staff task_get.owner.staffuser.staff qs = Task.objects.select_related( 'creator__staffuser__staff', 'owner__staffuser__staff') self.assertEqual(str(qs.query).count(' JOIN '), 6) task_select_related = qs.get(pk=task.pk) with self.assertNumQueries(0): self.assertEqual(task_select_related.creator.staffuser.staff, task_get.creator.staffuser.staff) self.assertEqual(task_select_related.owner.staffuser.staff, task_get.owner.staffuser.staff) class Ticket21203Tests(TestCase): def test_ticket_21203(self): p = Ticket21203Parent.objects.create(parent_bool=True) c = Ticket21203Child.objects.create(parent=p) qs = Ticket21203Child.objects.select_related('parent').defer('parent__created') self.assertSequenceEqual(qs, [c]) self.assertIs(qs[0].parent.parent_bool, True) class ValuesJoinPromotionTests(TestCase): def test_values_no_promotion_for_existing(self): qs = Node.objects.filter(parent__parent__isnull=False) self.assertIn(' INNER JOIN ', str(qs.query)) qs = qs.values('parent__parent__id') self.assertIn(' INNER JOIN ', str(qs.query)) # Make sure there is a left outer join without the filter. qs = Node.objects.values('parent__parent__id') self.assertIn(' LEFT OUTER JOIN ', str(qs.query)) def test_non_nullable_fk_not_promoted(self): qs = ObjectB.objects.values('objecta__name') self.assertIn(' INNER JOIN ', str(qs.query)) def test_ticket_21376(self): a = ObjectA.objects.create() ObjectC.objects.create(objecta=a) qs = ObjectC.objects.filter( Q(objecta=a) | Q(objectb__objecta=a), ) qs = qs.filter( Q(objectb=1) | Q(objecta=a), ) self.assertEqual(qs.count(), 1) tblname = connection.ops.quote_name(ObjectB._meta.db_table) self.assertIn(' LEFT OUTER JOIN %s' % tblname, str(qs.query)) class ForeignKeyToBaseExcludeTests(TestCase): def test_ticket_21787(self): sc1 = SpecialCategory.objects.create(special_name='sc1', name='sc1') sc2 = SpecialCategory.objects.create(special_name='sc2', name='sc2') sc3 = SpecialCategory.objects.create(special_name='sc3', name='sc3') c1 = CategoryItem.objects.create(category=sc1) CategoryItem.objects.create(category=sc2) self.assertSequenceEqual(SpecialCategory.objects.exclude(categoryitem__id=c1.pk).order_by('name'), [sc2, sc3]) self.assertSequenceEqual(SpecialCategory.objects.filter(categoryitem__id=c1.pk), [sc1]) class ReverseM2MCustomPkTests(TestCase): def test_ticket_21879(self): cpt1 = CustomPkTag.objects.create(id='cpt1', tag='cpt1') cp1 = CustomPk.objects.create(name='cp1', extra='extra') cp1.custompktag_set.add(cpt1) self.assertSequenceEqual(CustomPk.objects.filter(custompktag=cpt1), [cp1]) self.assertSequenceEqual(CustomPkTag.objects.filter(custom_pk=cp1), [cpt1]) class Ticket22429Tests(TestCase): def test_ticket_22429(self): sc1 = School.objects.create() st1 = Student.objects.create(school=sc1) sc2 = School.objects.create() st2 = Student.objects.create(school=sc2) cr = Classroom.objects.create(school=sc1) cr.students.add(st1) queryset = Student.objects.filter(~Q(classroom__school=F('school'))) self.assertSequenceEqual(queryset, [st2]) class Ticket23605Tests(TestCase): def test_ticket_23605(self): # Test filtering on a complicated q-object from ticket's report. # The query structure is such that we have multiple nested subqueries. # The original problem was that the inner queries weren't relabeled # correctly. # See also #24090. a1 = Ticket23605A.objects.create() a2 = Ticket23605A.objects.create() c1 = Ticket23605C.objects.create(field_c0=10000.0) Ticket23605B.objects.create( field_b0=10000.0, field_b1=True, modelc_fk=c1, modela_fk=a1) complex_q = Q(pk__in=Ticket23605A.objects.filter( Q( # True for a1 as field_b0 = 10000, field_c0=10000 # False for a2 as no ticket23605b found ticket23605b__field_b0__gte=1000000 / F("ticket23605b__modelc_fk__field_c0") ) & # True for a1 (field_b1=True) Q(ticket23605b__field_b1=True) & ~Q(ticket23605b__pk__in=Ticket23605B.objects.filter( ~( # Same filters as above commented filters, but # double-negated (one for Q() above, one for # parentheses). So, again a1 match, a2 not. Q(field_b1=True) & Q(field_b0__gte=1000000 / F("modelc_fk__field_c0")) ) ))).filter(ticket23605b__field_b1=True)) qs1 = Ticket23605A.objects.filter(complex_q) self.assertSequenceEqual(qs1, [a1]) qs2 = Ticket23605A.objects.exclude(complex_q) self.assertSequenceEqual(qs2, [a2]) class TestTicket24279(TestCase): def test_ticket_24278(self): School.objects.create() qs = School.objects.filter(Q(pk__in=()) | Q()) self.assertQuerysetEqual(qs, []) class TestInvalidValuesRelation(SimpleTestCase): def test_invalid_values(self): msg = "Field 'id' expected a number but got 'abc'." with self.assertRaisesMessage(ValueError, msg): Annotation.objects.filter(tag='abc') with self.assertRaisesMessage(ValueError, msg): Annotation.objects.filter(tag__in=[123, 'abc']) class TestTicket24605(TestCase): def test_ticket_24605(self): """ Subquery table names should be quoted. """ i1 = Individual.objects.create(alive=True) RelatedIndividual.objects.create(related=i1) i2 = Individual.objects.create(alive=False) RelatedIndividual.objects.create(related=i2) i3 = Individual.objects.create(alive=True) i4 = Individual.objects.create(alive=False) self.assertSequenceEqual(Individual.objects.filter(Q(alive=False), Q(related_individual__isnull=True)), [i4]) self.assertSequenceEqual( Individual.objects.exclude(Q(alive=False), Q(related_individual__isnull=True)).order_by('pk'), [i1, i2, i3] ) class Ticket23622Tests(TestCase): @skipUnlessDBFeature('can_distinct_on_fields') def test_ticket_23622(self): """ Make sure __pk__in and __in work the same for related fields when using a distinct on subquery. """ a1 = Ticket23605A.objects.create() a2 = Ticket23605A.objects.create() c1 = Ticket23605C.objects.create(field_c0=0.0) Ticket23605B.objects.create( modela_fk=a1, field_b0=123, field_b1=True, modelc_fk=c1, ) Ticket23605B.objects.create( modela_fk=a1, field_b0=23, field_b1=True, modelc_fk=c1, ) Ticket23605B.objects.create( modela_fk=a1, field_b0=234, field_b1=True, modelc_fk=c1, ) Ticket23605B.objects.create( modela_fk=a1, field_b0=12, field_b1=True, modelc_fk=c1, ) Ticket23605B.objects.create( modela_fk=a2, field_b0=567, field_b1=True, modelc_fk=c1, ) Ticket23605B.objects.create( modela_fk=a2, field_b0=76, field_b1=True, modelc_fk=c1, ) Ticket23605B.objects.create( modela_fk=a2, field_b0=7, field_b1=True, modelc_fk=c1, ) Ticket23605B.objects.create( modela_fk=a2, field_b0=56, field_b1=True, modelc_fk=c1, ) qx = ( Q(ticket23605b__pk__in=Ticket23605B.objects.order_by('modela_fk', '-field_b1').distinct('modela_fk')) & Q(ticket23605b__field_b0__gte=300) ) qy = ( Q(ticket23605b__in=Ticket23605B.objects.order_by('modela_fk', '-field_b1').distinct('modela_fk')) & Q(ticket23605b__field_b0__gte=300) ) self.assertEqual( set(Ticket23605A.objects.filter(qx).values_list('pk', flat=True)), set(Ticket23605A.objects.filter(qy).values_list('pk', flat=True)) ) self.assertSequenceEqual(Ticket23605A.objects.filter(qx), [a2])
3da8a38196c0d54ec308fa47ab589d87acd3b2e2d21c53a25a4b5ee3d8407403
import os from datetime import date from django.contrib.sitemaps import Sitemap from django.contrib.sites.models import Site from django.core.exceptions import ImproperlyConfigured from django.test import modify_settings, override_settings from django.utils import translation from django.utils.formats import localize from .base import SitemapTestsBase from .models import TestModel class HTTPSitemapTests(SitemapTestsBase): use_sitemap_err_msg = ( 'To use sitemaps, either enable the sites framework or pass a ' 'Site/RequestSite object in your view.' ) def test_simple_sitemap_index(self): "A simple sitemap index can be rendered" response = self.client.get('/simple/index.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> <sitemap><loc>%s/simple/sitemap-simple.xml</loc></sitemap> </sitemapindex> """ % self.base_url self.assertXMLEqual(response.content.decode(), expected_content) def test_sitemap_not_callable(self): """A sitemap may not be callable.""" response = self.client.get('/simple-not-callable/index.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> <sitemap><loc>%s/simple/sitemap-simple.xml</loc></sitemap> </sitemapindex> """ % self.base_url self.assertXMLEqual(response.content.decode(), expected_content) def test_paged_sitemap(self): """A sitemap may have multiple pages.""" response = self.client.get('/simple-paged/index.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> <sitemap><loc>{0}/simple/sitemap-simple.xml</loc></sitemap><sitemap><loc>{0}/simple/sitemap-simple.xml?p=2</loc></sitemap> </sitemapindex> """.format(self.base_url) self.assertXMLEqual(response.content.decode(), expected_content) @override_settings(TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(os.path.dirname(__file__), 'templates')], }]) def test_simple_sitemap_custom_index(self): "A simple sitemap index can be rendered with a custom template" response = self.client.get('/simple/custom-index.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <!-- This is a customised template --> <sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> <sitemap><loc>%s/simple/sitemap-simple.xml</loc></sitemap> </sitemapindex> """ % self.base_url self.assertXMLEqual(response.content.decode(), expected_content) def test_simple_sitemap_section(self): "A simple sitemap section can be rendered" response = self.client.get('/simple/sitemap-simple.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml"> <url><loc>%s/location/</loc><lastmod>%s</lastmod><changefreq>never</changefreq><priority>0.5</priority></url> </urlset> """ % (self.base_url, date.today()) self.assertXMLEqual(response.content.decode(), expected_content) def test_no_section(self): response = self.client.get('/simple/sitemap-simple2.xml') self.assertEqual(str(response.context['exception']), "No sitemap available for section: 'simple2'") self.assertEqual(response.status_code, 404) def test_empty_page(self): response = self.client.get('/simple/sitemap-simple.xml?p=0') self.assertEqual(str(response.context['exception']), 'Page 0 empty') self.assertEqual(response.status_code, 404) def test_page_not_int(self): response = self.client.get('/simple/sitemap-simple.xml?p=test') self.assertEqual(str(response.context['exception']), "No page 'test'") self.assertEqual(response.status_code, 404) def test_simple_sitemap(self): "A simple sitemap can be rendered" response = self.client.get('/simple/sitemap.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml"> <url><loc>%s/location/</loc><lastmod>%s</lastmod><changefreq>never</changefreq><priority>0.5</priority></url> </urlset> """ % (self.base_url, date.today()) self.assertXMLEqual(response.content.decode(), expected_content) @override_settings(TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(os.path.dirname(__file__), 'templates')], }]) def test_simple_custom_sitemap(self): "A simple sitemap can be rendered with a custom template" response = self.client.get('/simple/custom-sitemap.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <!-- This is a customised template --> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> <url><loc>%s/location/</loc><lastmod>%s</lastmod><changefreq>never</changefreq><priority>0.5</priority></url> </urlset> """ % (self.base_url, date.today()) self.assertXMLEqual(response.content.decode(), expected_content) def test_sitemap_last_modified(self): "Last-Modified header is set correctly" response = self.client.get('/lastmod/sitemap.xml') self.assertEqual(response.headers['Last-Modified'], 'Wed, 13 Mar 2013 10:00:00 GMT') def test_sitemap_last_modified_date(self): """ The Last-Modified header should be support dates (without time). """ response = self.client.get('/lastmod/date-sitemap.xml') self.assertEqual(response.headers['Last-Modified'], 'Wed, 13 Mar 2013 00:00:00 GMT') def test_sitemap_last_modified_tz(self): """ The Last-Modified header should be converted from timezone aware dates to GMT. """ response = self.client.get('/lastmod/tz-sitemap.xml') self.assertEqual(response.headers['Last-Modified'], 'Wed, 13 Mar 2013 15:00:00 GMT') def test_sitemap_last_modified_missing(self): "Last-Modified header is missing when sitemap has no lastmod" response = self.client.get('/generic/sitemap.xml') self.assertFalse(response.has_header('Last-Modified')) def test_sitemap_last_modified_mixed(self): "Last-Modified header is omitted when lastmod not on all items" response = self.client.get('/lastmod-mixed/sitemap.xml') self.assertFalse(response.has_header('Last-Modified')) def test_sitemaps_lastmod_mixed_ascending_last_modified_missing(self): """ The Last-Modified header is omitted when lastmod isn't found in all sitemaps. Test sitemaps are sorted by lastmod in ascending order. """ response = self.client.get('/lastmod-sitemaps/mixed-ascending.xml') self.assertFalse(response.has_header('Last-Modified')) def test_sitemaps_lastmod_mixed_descending_last_modified_missing(self): """ The Last-Modified header is omitted when lastmod isn't found in all sitemaps. Test sitemaps are sorted by lastmod in descending order. """ response = self.client.get('/lastmod-sitemaps/mixed-descending.xml') self.assertFalse(response.has_header('Last-Modified')) def test_sitemaps_lastmod_ascending(self): """ The Last-Modified header is set to the most recent sitemap lastmod. Test sitemaps are sorted by lastmod in ascending order. """ response = self.client.get('/lastmod-sitemaps/ascending.xml') self.assertEqual(response.headers['Last-Modified'], 'Sat, 20 Apr 2013 05:00:00 GMT') def test_sitemaps_lastmod_descending(self): """ The Last-Modified header is set to the most recent sitemap lastmod. Test sitemaps are sorted by lastmod in descending order. """ response = self.client.get('/lastmod-sitemaps/descending.xml') self.assertEqual(response.headers['Last-Modified'], 'Sat, 20 Apr 2013 05:00:00 GMT') @override_settings(USE_I18N=True, USE_L10N=True) def test_localized_priority(self): """The priority value should not be localized.""" with translation.override('fr'): self.assertEqual('0,3', localize(0.3)) # Priorities aren't rendered in localized format. response = self.client.get('/simple/sitemap.xml') self.assertContains(response, '<priority>0.5</priority>') self.assertContains(response, '<lastmod>%s</lastmod>' % date.today()) @modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'}) def test_requestsite_sitemap(self): # Hitting the flatpages sitemap without the sites framework installed # doesn't raise an exception. response = self.client.get('/simple/sitemap.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml"> <url><loc>http://testserver/location/</loc><lastmod>%s</lastmod><changefreq>never</changefreq><priority>0.5</priority></url> </urlset> """ % date.today() self.assertXMLEqual(response.content.decode(), expected_content) def test_sitemap_get_urls_no_site_1(self): """ Check we get ImproperlyConfigured if we don't pass a site object to Sitemap.get_urls and no Site objects exist """ Site.objects.all().delete() with self.assertRaisesMessage(ImproperlyConfigured, self.use_sitemap_err_msg): Sitemap().get_urls() @modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'}) def test_sitemap_get_urls_no_site_2(self): """ Check we get ImproperlyConfigured when we don't pass a site object to Sitemap.get_urls if Site objects exists, but the sites framework is not actually installed. """ with self.assertRaisesMessage(ImproperlyConfigured, self.use_sitemap_err_msg): Sitemap().get_urls() def test_sitemap_item(self): """ Check to make sure that the raw item is included with each Sitemap.get_url() url result. """ test_sitemap = Sitemap() test_sitemap.items = TestModel.objects.order_by('pk').all def is_testmodel(url): return isinstance(url['item'], TestModel) item_in_url_info = all(map(is_testmodel, test_sitemap.get_urls())) self.assertTrue(item_in_url_info) def test_cached_sitemap_index(self): """ A cached sitemap index can be rendered (#2713). """ response = self.client.get('/cached/index.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> <sitemap><loc>%s/cached/sitemap-simple.xml</loc></sitemap> </sitemapindex> """ % self.base_url self.assertXMLEqual(response.content.decode(), expected_content) def test_x_robots_sitemap(self): response = self.client.get('/simple/index.xml') self.assertEqual(response.headers['X-Robots-Tag'], 'noindex, noodp, noarchive') response = self.client.get('/simple/sitemap.xml') self.assertEqual(response.headers['X-Robots-Tag'], 'noindex, noodp, noarchive') def test_empty_sitemap(self): response = self.client.get('/empty/sitemap.xml') self.assertEqual(response.status_code, 200) @override_settings(LANGUAGES=(('en', 'English'), ('pt', 'Portuguese'))) def test_simple_i18n_sitemap_index(self): """ A simple i18n sitemap index can be rendered, without logging variable lookup errors. """ with self.assertNoLogs('django.template', 'DEBUG'): response = self.client.get('/simple/i18n.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml"> <url><loc>{0}/en/i18n/testmodel/{1}/</loc><changefreq>never</changefreq><priority>0.5</priority></url><url><loc>{0}/pt/i18n/testmodel/{1}/</loc><changefreq>never</changefreq><priority>0.5</priority></url> </urlset> """.format(self.base_url, self.i18n_model.pk) self.assertXMLEqual(response.content.decode(), expected_content) @override_settings(LANGUAGES=(('en', 'English'), ('pt', 'Portuguese'))) def test_alternate_i18n_sitemap_index(self): """ A i18n sitemap with alternate/hreflang links can be rendered. """ response = self.client.get('/alternates/i18n.xml') url, pk = self.base_url, self.i18n_model.pk expected_urls = f""" <url><loc>{url}/en/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority> <xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/> <xhtml:link rel="alternate" hreflang="pt" href="{url}/pt/i18n/testmodel/{pk}/"/> </url> <url><loc>{url}/pt/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority> <xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/> <xhtml:link rel="alternate" hreflang="pt" href="{url}/pt/i18n/testmodel/{pk}/"/> </url> """.replace('\n', '') expected_content = f"""<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml"> {expected_urls} </urlset> """ self.assertXMLEqual(response.content.decode(), expected_content) @override_settings(LANGUAGES=(('en', 'English'), ('pt', 'Portuguese'), ('es', 'Spanish'))) def test_alternate_i18n_sitemap_limited(self): """ A i18n sitemap index with limited languages can be rendered. """ response = self.client.get('/limited/i18n.xml') url, pk = self.base_url, self.i18n_model.pk expected_urls = f""" <url><loc>{url}/en/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority> <xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/> <xhtml:link rel="alternate" hreflang="es" href="{url}/es/i18n/testmodel/{pk}/"/> </url> <url><loc>{url}/es/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority> <xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/> <xhtml:link rel="alternate" hreflang="es" href="{url}/es/i18n/testmodel/{pk}/"/> </url> """.replace('\n', '') expected_content = f"""<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml"> {expected_urls} </urlset> """ self.assertXMLEqual(response.content.decode(), expected_content) @override_settings(LANGUAGES=(('en', 'English'), ('pt', 'Portuguese'))) def test_alternate_i18n_sitemap_xdefault(self): """ A i18n sitemap index with x-default can be rendered. """ response = self.client.get('/x-default/i18n.xml') url, pk = self.base_url, self.i18n_model.pk expected_urls = f""" <url><loc>{url}/en/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority> <xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/> <xhtml:link rel="alternate" hreflang="pt" href="{url}/pt/i18n/testmodel/{pk}/"/> <xhtml:link rel="alternate" hreflang="x-default" href="{url}/i18n/testmodel/{pk}/"/> </url> <url><loc>{url}/pt/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority> <xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/> <xhtml:link rel="alternate" hreflang="pt" href="{url}/pt/i18n/testmodel/{pk}/"/> <xhtml:link rel="alternate" hreflang="x-default" href="{url}/i18n/testmodel/{pk}/"/> </url> """.replace('\n', '') expected_content = f"""<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml"> {expected_urls} </urlset> """ self.assertXMLEqual(response.content.decode(), expected_content) def test_sitemap_without_entries(self): response = self.client.get('/sitemap-without-entries/sitemap.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml"> </urlset>""" self.assertXMLEqual(response.content.decode(), expected_content)
f9f0043d797f2519041ed16b565383626b9c5ebf01269ccf71e61f50ff992589
import datetime import itertools import unittest from copy import copy from unittest import mock from django.core.exceptions import FieldError from django.core.management.color import no_style from django.db import ( DatabaseError, DataError, IntegrityError, OperationalError, connection, ) from django.db.models import ( CASCADE, PROTECT, AutoField, BigAutoField, BigIntegerField, BinaryField, BooleanField, CharField, CheckConstraint, DateField, DateTimeField, DecimalField, F, FloatField, ForeignKey, ForeignObject, Index, IntegerField, JSONField, ManyToManyField, Model, OneToOneField, OrderBy, PositiveIntegerField, Q, SlugField, SmallAutoField, SmallIntegerField, TextField, TimeField, UniqueConstraint, UUIDField, Value, ) from django.db.models.fields.json import KeyTextTransform from django.db.models.functions import Abs, Cast, Collate, Lower, Random, Upper from django.db.models.indexes import IndexExpression from django.db.transaction import TransactionManagementError, atomic from django.test import ( TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature, ) from django.test.utils import ( CaptureQueriesContext, isolate_apps, register_lookup, ) from django.utils import timezone from .fields import ( CustomManyToManyField, InheritedManyToManyField, MediumBlobField, ) from .models import ( Author, AuthorCharFieldWithIndex, AuthorTextFieldWithIndex, AuthorWithDefaultHeight, AuthorWithEvenLongerName, AuthorWithIndexedName, AuthorWithIndexedNameAndBirthday, AuthorWithUniqueName, AuthorWithUniqueNameAndBirthday, Book, BookForeignObj, BookWeak, BookWithLongName, BookWithO2O, BookWithoutAuthor, BookWithSlug, IntegerPK, Node, Note, NoteRename, Tag, TagIndexed, TagM2MTest, TagUniqueRename, Thing, UniqueTest, new_apps, ) class SchemaTests(TransactionTestCase): """ Tests for the schema-alteration code. Be aware that these tests are more liable than most to false results, as sometimes the code to check if a test has worked is almost as complex as the code it is testing. """ available_apps = [] models = [ Author, AuthorCharFieldWithIndex, AuthorTextFieldWithIndex, AuthorWithDefaultHeight, AuthorWithEvenLongerName, Book, BookWeak, BookWithLongName, BookWithO2O, BookWithSlug, IntegerPK, Node, Note, Tag, TagIndexed, TagM2MTest, TagUniqueRename, Thing, UniqueTest, ] # Utility functions def setUp(self): # local_models should contain test dependent model classes that will be # automatically removed from the app cache on test tear down. self.local_models = [] # isolated_local_models contains models that are in test methods # decorated with @isolate_apps. self.isolated_local_models = [] def tearDown(self): # Delete any tables made for our models self.delete_tables() new_apps.clear_cache() for model in new_apps.get_models(): model._meta._expire_cache() if 'schema' in new_apps.all_models: for model in self.local_models: for many_to_many in model._meta.many_to_many: through = many_to_many.remote_field.through if through and through._meta.auto_created: del new_apps.all_models['schema'][through._meta.model_name] del new_apps.all_models['schema'][model._meta.model_name] if self.isolated_local_models: with connection.schema_editor() as editor: for model in self.isolated_local_models: editor.delete_model(model) def delete_tables(self): "Deletes all model tables for our models for a clean test environment" converter = connection.introspection.identifier_converter with connection.schema_editor() as editor: connection.disable_constraint_checking() table_names = connection.introspection.table_names() if connection.features.ignores_table_name_case: table_names = [table_name.lower() for table_name in table_names] for model in itertools.chain(SchemaTests.models, self.local_models): tbl = converter(model._meta.db_table) if connection.features.ignores_table_name_case: tbl = tbl.lower() if tbl in table_names: editor.delete_model(model) table_names.remove(tbl) connection.enable_constraint_checking() def column_classes(self, model): with connection.cursor() as cursor: columns = { d[0]: (connection.introspection.get_field_type(d[1], d), d) for d in connection.introspection.get_table_description( cursor, model._meta.db_table, ) } # SQLite has a different format for field_type for name, (type, desc) in columns.items(): if isinstance(type, tuple): columns[name] = (type[0], desc) # SQLite also doesn't error properly if not columns: raise DatabaseError("Table does not exist (empty pragma)") return columns def get_primary_key(self, table): with connection.cursor() as cursor: return connection.introspection.get_primary_key_column(cursor, table) def get_indexes(self, table): """ Get the indexes on the table using a new cursor. """ with connection.cursor() as cursor: return [ c['columns'][0] for c in connection.introspection.get_constraints(cursor, table).values() if c['index'] and len(c['columns']) == 1 ] def get_uniques(self, table): with connection.cursor() as cursor: return [ c['columns'][0] for c in connection.introspection.get_constraints(cursor, table).values() if c['unique'] and len(c['columns']) == 1 ] def get_constraints(self, table): """ Get the constraints on a table using a new cursor. """ with connection.cursor() as cursor: return connection.introspection.get_constraints(cursor, table) def get_constraints_for_column(self, model, column_name): constraints = self.get_constraints(model._meta.db_table) constraints_for_column = [] for name, details in constraints.items(): if details['columns'] == [column_name]: constraints_for_column.append(name) return sorted(constraints_for_column) def check_added_field_default(self, schema_editor, model, field, field_name, expected_default, cast_function=None): with connection.cursor() as cursor: schema_editor.add_field(model, field) cursor.execute("SELECT {} FROM {};".format(field_name, model._meta.db_table)) database_default = cursor.fetchall()[0][0] if cast_function and type(database_default) != type(expected_default): database_default = cast_function(database_default) self.assertEqual(database_default, expected_default) def get_constraints_count(self, table, column, fk_to): """ Return a dict with keys 'fks', 'uniques, and 'indexes' indicating the number of foreign keys, unique constraints, and indexes on `table`.`column`. The `fk_to` argument is a 2-tuple specifying the expected foreign key relationship's (table, column). """ with connection.cursor() as cursor: constraints = connection.introspection.get_constraints(cursor, table) counts = {'fks': 0, 'uniques': 0, 'indexes': 0} for c in constraints.values(): if c['columns'] == [column]: if c['foreign_key'] == fk_to: counts['fks'] += 1 if c['unique']: counts['uniques'] += 1 elif c['index']: counts['indexes'] += 1 return counts def get_column_collation(self, table, column): with connection.cursor() as cursor: return next( f.collation for f in connection.introspection.get_table_description(cursor, table) if f.name == column ) def assertIndexOrder(self, table, index, order): constraints = self.get_constraints(table) self.assertIn(index, constraints) index_orders = constraints[index]['orders'] self.assertTrue(all(val == expected for val, expected in zip(index_orders, order))) def assertForeignKeyExists(self, model, column, expected_fk_table, field='id'): """ Fail if the FK constraint on `model.Meta.db_table`.`column` to `expected_fk_table`.id doesn't exist. """ constraints = self.get_constraints(model._meta.db_table) constraint_fk = None for details in constraints.values(): if details['columns'] == [column] and details['foreign_key']: constraint_fk = details['foreign_key'] break self.assertEqual(constraint_fk, (expected_fk_table, field)) def assertForeignKeyNotExists(self, model, column, expected_fk_table): with self.assertRaises(AssertionError): self.assertForeignKeyExists(model, column, expected_fk_table) # Tests def test_creation_deletion(self): """ Tries creating a model's table, and then deleting it. """ with connection.schema_editor() as editor: # Create the table editor.create_model(Author) # The table is there list(Author.objects.all()) # Clean up that table editor.delete_model(Author) # No deferred SQL should be left over. self.assertEqual(editor.deferred_sql, []) # The table is gone with self.assertRaises(DatabaseError): list(Author.objects.all()) @skipUnlessDBFeature('supports_foreign_keys') def test_fk(self): "Creating tables out of FK order, then repointing, works" # Create the table with connection.schema_editor() as editor: editor.create_model(Book) editor.create_model(Author) editor.create_model(Tag) # Initial tables are there list(Author.objects.all()) list(Book.objects.all()) # Make sure the FK constraint is present with self.assertRaises(IntegrityError): Book.objects.create( author_id=1, title="Much Ado About Foreign Keys", pub_date=datetime.datetime.now(), ) # Repoint the FK constraint old_field = Book._meta.get_field("author") new_field = ForeignKey(Tag, CASCADE) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) self.assertForeignKeyExists(Book, 'author_id', 'schema_tag') @skipUnlessDBFeature('can_create_inline_fk') def test_inline_fk(self): # Create some tables. with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) editor.create_model(Note) self.assertForeignKeyNotExists(Note, 'book_id', 'schema_book') # Add a foreign key from one to the other. with connection.schema_editor() as editor: new_field = ForeignKey(Book, CASCADE) new_field.set_attributes_from_name('book') editor.add_field(Note, new_field) self.assertForeignKeyExists(Note, 'book_id', 'schema_book') # Creating a FK field with a constraint uses a single statement without # a deferred ALTER TABLE. self.assertFalse([ sql for sql in (str(statement) for statement in editor.deferred_sql) if sql.startswith('ALTER TABLE') and 'ADD CONSTRAINT' in sql ]) @skipUnlessDBFeature('can_create_inline_fk') def test_add_inline_fk_update_data(self): with connection.schema_editor() as editor: editor.create_model(Node) # Add an inline foreign key and update data in the same transaction. new_field = ForeignKey(Node, CASCADE, related_name='new_fk', null=True) new_field.set_attributes_from_name('new_parent_fk') parent = Node.objects.create() with connection.schema_editor() as editor: editor.add_field(Node, new_field) editor.execute('UPDATE schema_node SET new_parent_fk_id = %s;', [parent.pk]) assertIndex = ( self.assertIn if connection.features.indexes_foreign_keys else self.assertNotIn ) assertIndex('new_parent_fk_id', self.get_indexes(Node._meta.db_table)) @skipUnlessDBFeature( 'can_create_inline_fk', 'allows_multiple_constraints_on_same_fields', ) @isolate_apps('schema') def test_add_inline_fk_index_update_data(self): class Node(Model): class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Node) # Add an inline foreign key, update data, and an index in the same # transaction. new_field = ForeignKey(Node, CASCADE, related_name='new_fk', null=True) new_field.set_attributes_from_name('new_parent_fk') parent = Node.objects.create() with connection.schema_editor() as editor: editor.add_field(Node, new_field) Node._meta.add_field(new_field) editor.execute('UPDATE schema_node SET new_parent_fk_id = %s;', [parent.pk]) editor.add_index(Node, Index(fields=['new_parent_fk'], name='new_parent_inline_fk_idx')) self.assertIn('new_parent_fk_id', self.get_indexes(Node._meta.db_table)) @skipUnlessDBFeature('supports_foreign_keys') def test_char_field_with_db_index_to_fk(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(AuthorCharFieldWithIndex) # Change CharField to FK old_field = AuthorCharFieldWithIndex._meta.get_field('char_field') new_field = ForeignKey(Author, CASCADE, blank=True) new_field.set_attributes_from_name('char_field') with connection.schema_editor() as editor: editor.alter_field(AuthorCharFieldWithIndex, old_field, new_field, strict=True) self.assertForeignKeyExists(AuthorCharFieldWithIndex, 'char_field_id', 'schema_author') @skipUnlessDBFeature('supports_foreign_keys') @skipUnlessDBFeature('supports_index_on_text_field') def test_text_field_with_db_index_to_fk(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(AuthorTextFieldWithIndex) # Change TextField to FK old_field = AuthorTextFieldWithIndex._meta.get_field('text_field') new_field = ForeignKey(Author, CASCADE, blank=True) new_field.set_attributes_from_name('text_field') with connection.schema_editor() as editor: editor.alter_field(AuthorTextFieldWithIndex, old_field, new_field, strict=True) self.assertForeignKeyExists(AuthorTextFieldWithIndex, 'text_field_id', 'schema_author') @isolate_apps('schema') def test_char_field_pk_to_auto_field(self): class Foo(Model): id = CharField(max_length=255, primary_key=True) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Foo) self.isolated_local_models = [Foo] old_field = Foo._meta.get_field('id') new_field = AutoField(primary_key=True) new_field.set_attributes_from_name('id') new_field.model = Foo with connection.schema_editor() as editor: editor.alter_field(Foo, old_field, new_field, strict=True) @skipUnlessDBFeature('supports_foreign_keys') def test_fk_to_proxy(self): "Creating a FK to a proxy model creates database constraints." class AuthorProxy(Author): class Meta: app_label = 'schema' apps = new_apps proxy = True class AuthorRef(Model): author = ForeignKey(AuthorProxy, on_delete=CASCADE) class Meta: app_label = 'schema' apps = new_apps self.local_models = [AuthorProxy, AuthorRef] # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(AuthorRef) self.assertForeignKeyExists(AuthorRef, 'author_id', 'schema_author') @skipUnlessDBFeature('supports_foreign_keys') def test_fk_db_constraint(self): "The db_constraint parameter is respected" # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) editor.create_model(Author) editor.create_model(BookWeak) # Initial tables are there list(Author.objects.all()) list(Tag.objects.all()) list(BookWeak.objects.all()) self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author') # Make a db_constraint=False FK new_field = ForeignKey(Tag, CASCADE, db_constraint=False) new_field.set_attributes_from_name("tag") with connection.schema_editor() as editor: editor.add_field(Author, new_field) self.assertForeignKeyNotExists(Author, 'tag_id', 'schema_tag') # Alter to one with a constraint new_field2 = ForeignKey(Tag, CASCADE) new_field2.set_attributes_from_name("tag") with connection.schema_editor() as editor: editor.alter_field(Author, new_field, new_field2, strict=True) self.assertForeignKeyExists(Author, 'tag_id', 'schema_tag') # Alter to one without a constraint again new_field2 = ForeignKey(Tag, CASCADE) new_field2.set_attributes_from_name("tag") with connection.schema_editor() as editor: editor.alter_field(Author, new_field2, new_field, strict=True) self.assertForeignKeyNotExists(Author, 'tag_id', 'schema_tag') @isolate_apps('schema') def test_no_db_constraint_added_during_primary_key_change(self): """ When a primary key that's pointed to by a ForeignKey with db_constraint=False is altered, a foreign key constraint isn't added. """ class Author(Model): class Meta: app_label = 'schema' class BookWeak(Model): author = ForeignKey(Author, CASCADE, db_constraint=False) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWeak) self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author') old_field = Author._meta.get_field('id') new_field = BigAutoField(primary_key=True) new_field.model = Author new_field.set_attributes_from_name('id') # @isolate_apps() and inner models are needed to have the model # relations populated, otherwise this doesn't act as a regression test. self.assertEqual(len(new_field.model._meta.related_objects), 1) with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author') def _test_m2m_db_constraint(self, M2MFieldClass): class LocalAuthorWithM2M(Model): name = CharField(max_length=255) class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalAuthorWithM2M] # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) editor.create_model(LocalAuthorWithM2M) # Initial tables are there list(LocalAuthorWithM2M.objects.all()) list(Tag.objects.all()) # Make a db_constraint=False FK new_field = M2MFieldClass(Tag, related_name="authors", db_constraint=False) new_field.contribute_to_class(LocalAuthorWithM2M, "tags") # Add the field with connection.schema_editor() as editor: editor.add_field(LocalAuthorWithM2M, new_field) self.assertForeignKeyNotExists(new_field.remote_field.through, 'tag_id', 'schema_tag') @skipUnlessDBFeature('supports_foreign_keys') def test_m2m_db_constraint(self): self._test_m2m_db_constraint(ManyToManyField) @skipUnlessDBFeature('supports_foreign_keys') def test_m2m_db_constraint_custom(self): self._test_m2m_db_constraint(CustomManyToManyField) @skipUnlessDBFeature('supports_foreign_keys') def test_m2m_db_constraint_inherited(self): self._test_m2m_db_constraint(InheritedManyToManyField) def test_add_field(self): """ Tests adding fields to models """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no age field columns = self.column_classes(Author) self.assertNotIn("age", columns) # Add the new field new_field = IntegerField(null=True) new_field.set_attributes_from_name("age") with CaptureQueriesContext(connection) as ctx, connection.schema_editor() as editor: editor.add_field(Author, new_field) drop_default_sql = editor.sql_alter_column_no_default % { 'column': editor.quote_name(new_field.name), } self.assertFalse(any(drop_default_sql in query['sql'] for query in ctx.captured_queries)) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['age'][0], connection.features.introspected_field_types['IntegerField']) self.assertTrue(columns['age'][1][6]) def test_add_field_remove_field(self): """ Adding a field and removing it removes all deferred sql referring to it. """ with connection.schema_editor() as editor: # Create a table with a unique constraint on the slug field. editor.create_model(Tag) # Remove the slug column. editor.remove_field(Tag, Tag._meta.get_field('slug')) self.assertEqual(editor.deferred_sql, []) def test_add_field_temp_default(self): """ Tests adding fields to models with a temporary default """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no age field columns = self.column_classes(Author) self.assertNotIn("age", columns) # Add some rows of data Author.objects.create(name="Andrew", height=30) Author.objects.create(name="Andrea") # Add a not-null field new_field = CharField(max_length=30, default="Godwin") new_field.set_attributes_from_name("surname") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['surname'][0], connection.features.introspected_field_types['CharField']) self.assertEqual(columns['surname'][1][6], connection.features.interprets_empty_strings_as_nulls) def test_add_field_temp_default_boolean(self): """ Tests adding fields to models with a temporary default where the default is False. (#21783) """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no age field columns = self.column_classes(Author) self.assertNotIn("age", columns) # Add some rows of data Author.objects.create(name="Andrew", height=30) Author.objects.create(name="Andrea") # Add a not-null field new_field = BooleanField(default=False) new_field.set_attributes_from_name("awesome") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure the field is right afterwards columns = self.column_classes(Author) # BooleanField are stored as TINYINT(1) on MySQL. field_type = columns['awesome'][0] self.assertEqual(field_type, connection.features.introspected_field_types['BooleanField']) def test_add_field_default_transform(self): """ Tests adding fields to models with a default that is not directly valid in the database (#22581) """ class TestTransformField(IntegerField): # Weird field that saves the count of items in its value def get_default(self): return self.default def get_prep_value(self, value): if value is None: return 0 return len(value) # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Add some rows of data Author.objects.create(name="Andrew", height=30) Author.objects.create(name="Andrea") # Add the field with a default it needs to cast (to string in this case) new_field = TestTransformField(default={1: 2}) new_field.set_attributes_from_name("thing") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure the field is there columns = self.column_classes(Author) field_type, field_info = columns['thing'] self.assertEqual(field_type, connection.features.introspected_field_types['IntegerField']) # Make sure the values were transformed correctly self.assertEqual(Author.objects.extra(where=["thing = 1"]).count(), 2) def test_add_field_binary(self): """ Tests binary fields get a sane default (#22851) """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Add the new field new_field = BinaryField(blank=True) new_field.set_attributes_from_name("bits") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure the field is right afterwards columns = self.column_classes(Author) # MySQL annoyingly uses the same backend, so it'll come back as one of # these two types. self.assertIn(columns['bits'][0], ("BinaryField", "TextField")) @unittest.skipUnless(connection.vendor == 'mysql', "MySQL specific") def test_add_binaryfield_mediumblob(self): """ Test adding a custom-sized binary field on MySQL (#24846). """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Add the new field with default new_field = MediumBlobField(blank=True, default=b'123') new_field.set_attributes_from_name('bits') with connection.schema_editor() as editor: editor.add_field(Author, new_field) columns = self.column_classes(Author) # Introspection treats BLOBs as TextFields self.assertEqual(columns['bits'][0], "TextField") def test_alter(self): """ Tests simple altering of fields """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the field is right to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField']) self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls)) # Alter the name field to a TextField old_field = Author._meta.get_field("name") new_field = TextField(null=True) new_field.set_attributes_from_name("name") with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "TextField") self.assertTrue(columns['name'][1][6]) # Change nullability again new_field2 = TextField(null=False) new_field2.set_attributes_from_name("name") with connection.schema_editor() as editor: editor.alter_field(Author, new_field, new_field2, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "TextField") self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls)) def test_alter_auto_field_to_integer_field(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Change AutoField to IntegerField old_field = Author._meta.get_field('id') new_field = IntegerField(primary_key=True) new_field.set_attributes_from_name('id') new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) # Now that ID is an IntegerField, the database raises an error if it # isn't provided. if not connection.features.supports_unspecified_pk: with self.assertRaises(DatabaseError): Author.objects.create() def test_alter_auto_field_to_char_field(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Change AutoField to CharField old_field = Author._meta.get_field('id') new_field = CharField(primary_key=True, max_length=50) new_field.set_attributes_from_name('id') new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) @isolate_apps('schema') def test_alter_auto_field_quoted_db_column(self): class Foo(Model): id = AutoField(primary_key=True, db_column='"quoted_id"') class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Foo) self.isolated_local_models = [Foo] old_field = Foo._meta.get_field('id') new_field = BigAutoField(primary_key=True) new_field.model = Foo new_field.db_column = '"quoted_id"' new_field.set_attributes_from_name('id') with connection.schema_editor() as editor: editor.alter_field(Foo, old_field, new_field, strict=True) Foo.objects.create() def test_alter_not_unique_field_to_primary_key(self): # Create the table. with connection.schema_editor() as editor: editor.create_model(Author) # Change UUIDField to primary key. old_field = Author._meta.get_field('uuid') new_field = UUIDField(primary_key=True) new_field.set_attributes_from_name('uuid') new_field.model = Author with connection.schema_editor() as editor: editor.remove_field(Author, Author._meta.get_field('id')) editor.alter_field(Author, old_field, new_field, strict=True) @isolate_apps('schema') def test_alter_primary_key_quoted_db_table(self): class Foo(Model): class Meta: app_label = 'schema' db_table = '"foo"' with connection.schema_editor() as editor: editor.create_model(Foo) self.isolated_local_models = [Foo] old_field = Foo._meta.get_field('id') new_field = BigAutoField(primary_key=True) new_field.model = Foo new_field.set_attributes_from_name('id') with connection.schema_editor() as editor: editor.alter_field(Foo, old_field, new_field, strict=True) Foo.objects.create() def test_alter_text_field(self): # Regression for "BLOB/TEXT column 'info' can't have a default value") # on MySQL. # Create the table with connection.schema_editor() as editor: editor.create_model(Note) old_field = Note._meta.get_field("info") new_field = TextField(blank=True) new_field.set_attributes_from_name("info") with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) @skipUnlessDBFeature('can_defer_constraint_checks', 'can_rollback_ddl') def test_alter_fk_checks_deferred_constraints(self): """ #25492 - Altering a foreign key's structure and data in the same transaction. """ with connection.schema_editor() as editor: editor.create_model(Node) old_field = Node._meta.get_field('parent') new_field = ForeignKey(Node, CASCADE) new_field.set_attributes_from_name('parent') parent = Node.objects.create() with connection.schema_editor() as editor: # Update the parent FK to create a deferred constraint check. Node.objects.update(parent=parent) editor.alter_field(Node, old_field, new_field, strict=True) def test_alter_text_field_to_date_field(self): """ #25002 - Test conversion of text field to date field. """ with connection.schema_editor() as editor: editor.create_model(Note) Note.objects.create(info='1988-05-05') old_field = Note._meta.get_field('info') new_field = DateField(blank=True) new_field.set_attributes_from_name('info') with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) # Make sure the field isn't nullable columns = self.column_classes(Note) self.assertFalse(columns['info'][1][6]) def test_alter_text_field_to_datetime_field(self): """ #25002 - Test conversion of text field to datetime field. """ with connection.schema_editor() as editor: editor.create_model(Note) Note.objects.create(info='1988-05-05 3:16:17.4567') old_field = Note._meta.get_field('info') new_field = DateTimeField(blank=True) new_field.set_attributes_from_name('info') with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) # Make sure the field isn't nullable columns = self.column_classes(Note) self.assertFalse(columns['info'][1][6]) def test_alter_text_field_to_time_field(self): """ #25002 - Test conversion of text field to time field. """ with connection.schema_editor() as editor: editor.create_model(Note) Note.objects.create(info='3:16:17.4567') old_field = Note._meta.get_field('info') new_field = TimeField(blank=True) new_field.set_attributes_from_name('info') with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) # Make sure the field isn't nullable columns = self.column_classes(Note) self.assertFalse(columns['info'][1][6]) @skipIfDBFeature('interprets_empty_strings_as_nulls') def test_alter_textual_field_keep_null_status(self): """ Changing a field type shouldn't affect the not null status. """ with connection.schema_editor() as editor: editor.create_model(Note) with self.assertRaises(IntegrityError): Note.objects.create(info=None) old_field = Note._meta.get_field("info") new_field = CharField(max_length=50) new_field.set_attributes_from_name("info") with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) with self.assertRaises(IntegrityError): Note.objects.create(info=None) def test_alter_numeric_field_keep_null_status(self): """ Changing a field type shouldn't affect the not null status. """ with connection.schema_editor() as editor: editor.create_model(UniqueTest) with self.assertRaises(IntegrityError): UniqueTest.objects.create(year=None, slug='aaa') old_field = UniqueTest._meta.get_field("year") new_field = BigIntegerField() new_field.set_attributes_from_name("year") with connection.schema_editor() as editor: editor.alter_field(UniqueTest, old_field, new_field, strict=True) with self.assertRaises(IntegrityError): UniqueTest.objects.create(year=None, slug='bbb') def test_alter_null_to_not_null(self): """ #23609 - Tests handling of default values when altering from NULL to NOT NULL. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the field is right to begin with columns = self.column_classes(Author) self.assertTrue(columns['height'][1][6]) # Create some test data Author.objects.create(name='Not null author', height=12) Author.objects.create(name='Null author') # Verify null value self.assertEqual(Author.objects.get(name='Not null author').height, 12) self.assertIsNone(Author.objects.get(name='Null author').height) # Alter the height field to NOT NULL with default old_field = Author._meta.get_field("height") new_field = PositiveIntegerField(default=42) new_field.set_attributes_from_name("height") with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertFalse(columns['height'][1][6]) # Verify default value self.assertEqual(Author.objects.get(name='Not null author').height, 12) self.assertEqual(Author.objects.get(name='Null author').height, 42) def test_alter_charfield_to_null(self): """ #24307 - Should skip an alter statement on databases with interprets_empty_strings_as_null when changing a CharField to null. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Change the CharField to null old_field = Author._meta.get_field('name') new_field = copy(old_field) new_field.null = True with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific') def test_alter_char_field_decrease_length(self): # Create the table. with connection.schema_editor() as editor: editor.create_model(Author) Author.objects.create(name='x' * 255) # Change max_length of CharField. old_field = Author._meta.get_field('name') new_field = CharField(max_length=254) new_field.set_attributes_from_name('name') with connection.schema_editor() as editor: msg = 'value too long for type character varying(254)' with self.assertRaisesMessage(DataError, msg): editor.alter_field(Author, old_field, new_field, strict=True) @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific') def test_alter_field_with_custom_db_type(self): from django.contrib.postgres.fields import ArrayField class Foo(Model): field = ArrayField(CharField(max_length=255)) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Foo) self.isolated_local_models = [Foo] old_field = Foo._meta.get_field('field') new_field = ArrayField(CharField(max_length=16)) new_field.set_attributes_from_name('field') new_field.model = Foo with connection.schema_editor() as editor: editor.alter_field(Foo, old_field, new_field, strict=True) @isolate_apps('schema') @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific') def test_alter_array_field_decrease_base_field_length(self): from django.contrib.postgres.fields import ArrayField class ArrayModel(Model): field = ArrayField(CharField(max_length=16)) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(ArrayModel) self.isolated_local_models = [ArrayModel] ArrayModel.objects.create(field=['x' * 16]) old_field = ArrayModel._meta.get_field('field') new_field = ArrayField(CharField(max_length=15)) new_field.set_attributes_from_name('field') new_field.model = ArrayModel with connection.schema_editor() as editor: msg = 'value too long for type character varying(15)' with self.assertRaisesMessage(DataError, msg): editor.alter_field(ArrayModel, old_field, new_field, strict=True) @isolate_apps('schema') @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific') def test_alter_array_field_decrease_nested_base_field_length(self): from django.contrib.postgres.fields import ArrayField class ArrayModel(Model): field = ArrayField(ArrayField(CharField(max_length=16))) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(ArrayModel) self.isolated_local_models = [ArrayModel] ArrayModel.objects.create(field=[['x' * 16]]) old_field = ArrayModel._meta.get_field('field') new_field = ArrayField(ArrayField(CharField(max_length=15))) new_field.set_attributes_from_name('field') new_field.model = ArrayModel with connection.schema_editor() as editor: msg = 'value too long for type character varying(15)' with self.assertRaisesMessage(DataError, msg): editor.alter_field(ArrayModel, old_field, new_field, strict=True) def test_alter_textfield_to_null(self): """ #24307 - Should skip an alter statement on databases with interprets_empty_strings_as_null when changing a TextField to null. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Note) # Change the TextField to null old_field = Note._meta.get_field('info') new_field = copy(old_field) new_field.null = True with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) @skipUnlessDBFeature('supports_combined_alters') def test_alter_null_to_not_null_keeping_default(self): """ #23738 - Can change a nullable field with default to non-nullable with the same default. """ # Create the table with connection.schema_editor() as editor: editor.create_model(AuthorWithDefaultHeight) # Ensure the field is right to begin with columns = self.column_classes(AuthorWithDefaultHeight) self.assertTrue(columns['height'][1][6]) # Alter the height field to NOT NULL keeping the previous default old_field = AuthorWithDefaultHeight._meta.get_field("height") new_field = PositiveIntegerField(default=42) new_field.set_attributes_from_name("height") with connection.schema_editor() as editor: editor.alter_field(AuthorWithDefaultHeight, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(AuthorWithDefaultHeight) self.assertFalse(columns['height'][1][6]) @skipUnlessDBFeature('supports_foreign_keys') def test_alter_fk(self): """ Tests altering of FKs """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the field is right to begin with columns = self.column_classes(Book) self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField']) self.assertForeignKeyExists(Book, 'author_id', 'schema_author') # Alter the FK old_field = Book._meta.get_field("author") new_field = ForeignKey(Author, CASCADE, editable=False) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Book) self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField']) self.assertForeignKeyExists(Book, 'author_id', 'schema_author') @skipUnlessDBFeature('supports_foreign_keys') def test_alter_to_fk(self): """ #24447 - Tests adding a FK constraint for an existing column """ class LocalBook(Model): author = IntegerField() title = CharField(max_length=100, db_index=True) pub_date = DateTimeField() class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalBook] # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(LocalBook) # Ensure no FK constraint exists constraints = self.get_constraints(LocalBook._meta.db_table) for details in constraints.values(): if details['foreign_key']: self.fail('Found an unexpected FK constraint to %s' % details['columns']) old_field = LocalBook._meta.get_field("author") new_field = ForeignKey(Author, CASCADE) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(LocalBook, old_field, new_field, strict=True) self.assertForeignKeyExists(LocalBook, 'author_id', 'schema_author') @skipUnlessDBFeature('supports_foreign_keys') def test_alter_o2o_to_fk(self): """ #24163 - Tests altering of OneToOneField to ForeignKey """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithO2O) # Ensure the field is right to begin with columns = self.column_classes(BookWithO2O) self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField']) # Ensure the field is unique author = Author.objects.create(name="Joe") BookWithO2O.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now()) with self.assertRaises(IntegrityError): BookWithO2O.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now()) BookWithO2O.objects.all().delete() self.assertForeignKeyExists(BookWithO2O, 'author_id', 'schema_author') # Alter the OneToOneField to ForeignKey old_field = BookWithO2O._meta.get_field("author") new_field = ForeignKey(Author, CASCADE) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(BookWithO2O, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Book) self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField']) # Ensure the field is not unique anymore Book.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now()) Book.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now()) self.assertForeignKeyExists(Book, 'author_id', 'schema_author') @skipUnlessDBFeature('supports_foreign_keys') def test_alter_fk_to_o2o(self): """ #24163 - Tests altering of ForeignKey to OneToOneField """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the field is right to begin with columns = self.column_classes(Book) self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField']) # Ensure the field is not unique author = Author.objects.create(name="Joe") Book.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now()) Book.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now()) Book.objects.all().delete() self.assertForeignKeyExists(Book, 'author_id', 'schema_author') # Alter the ForeignKey to OneToOneField old_field = Book._meta.get_field("author") new_field = OneToOneField(Author, CASCADE) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(BookWithO2O) self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField']) # Ensure the field is unique now BookWithO2O.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now()) with self.assertRaises(IntegrityError): BookWithO2O.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now()) self.assertForeignKeyExists(BookWithO2O, 'author_id', 'schema_author') def test_alter_field_fk_to_o2o(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) expected_fks = 1 if connection.features.supports_foreign_keys else 0 expected_indexes = 1 if connection.features.indexes_foreign_keys else 0 # Check the index is right to begin with. counts = self.get_constraints_count( Book._meta.db_table, Book._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) self.assertEqual( counts, {'fks': expected_fks, 'uniques': 0, 'indexes': expected_indexes}, ) old_field = Book._meta.get_field('author') new_field = OneToOneField(Author, CASCADE) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) counts = self.get_constraints_count( Book._meta.db_table, Book._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) # The index on ForeignKey is replaced with a unique constraint for OneToOneField. self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0}) def test_alter_field_fk_keeps_index(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) expected_fks = 1 if connection.features.supports_foreign_keys else 0 expected_indexes = 1 if connection.features.indexes_foreign_keys else 0 # Check the index is right to begin with. counts = self.get_constraints_count( Book._meta.db_table, Book._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) self.assertEqual( counts, {'fks': expected_fks, 'uniques': 0, 'indexes': expected_indexes}, ) old_field = Book._meta.get_field('author') # on_delete changed from CASCADE. new_field = ForeignKey(Author, PROTECT) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) counts = self.get_constraints_count( Book._meta.db_table, Book._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) # The index remains. self.assertEqual( counts, {'fks': expected_fks, 'uniques': 0, 'indexes': expected_indexes}, ) def test_alter_field_o2o_to_fk(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithO2O) expected_fks = 1 if connection.features.supports_foreign_keys else 0 # Check the unique constraint is right to begin with. counts = self.get_constraints_count( BookWithO2O._meta.db_table, BookWithO2O._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0}) old_field = BookWithO2O._meta.get_field('author') new_field = ForeignKey(Author, CASCADE) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.alter_field(BookWithO2O, old_field, new_field, strict=True) counts = self.get_constraints_count( BookWithO2O._meta.db_table, BookWithO2O._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) # The unique constraint on OneToOneField is replaced with an index for ForeignKey. self.assertEqual(counts, {'fks': expected_fks, 'uniques': 0, 'indexes': 1}) def test_alter_field_o2o_keeps_unique(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithO2O) expected_fks = 1 if connection.features.supports_foreign_keys else 0 # Check the unique constraint is right to begin with. counts = self.get_constraints_count( BookWithO2O._meta.db_table, BookWithO2O._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0}) old_field = BookWithO2O._meta.get_field('author') # on_delete changed from CASCADE. new_field = OneToOneField(Author, PROTECT) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.alter_field(BookWithO2O, old_field, new_field, strict=True) counts = self.get_constraints_count( BookWithO2O._meta.db_table, BookWithO2O._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) # The unique constraint remains. self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0}) @skipUnlessDBFeature('ignores_table_name_case') def test_alter_db_table_case(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Alter the case of the table old_table_name = Author._meta.db_table with connection.schema_editor() as editor: editor.alter_db_table(Author, old_table_name, old_table_name.upper()) def test_alter_implicit_id_to_explicit(self): """ Should be able to convert an implicit "id" field to an explicit "id" primary key field. """ with connection.schema_editor() as editor: editor.create_model(Author) old_field = Author._meta.get_field("id") new_field = AutoField(primary_key=True) new_field.set_attributes_from_name("id") new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) # This will fail if DROP DEFAULT is inadvertently executed on this # field which drops the id sequence, at least on PostgreSQL. Author.objects.create(name='Foo') Author.objects.create(name='Bar') def test_alter_autofield_pk_to_bigautofield_pk_sequence_owner(self): """ Converting an implicit PK to BigAutoField(primary_key=True) should keep a sequence owner on PostgreSQL. """ with connection.schema_editor() as editor: editor.create_model(Author) old_field = Author._meta.get_field('id') new_field = BigAutoField(primary_key=True) new_field.set_attributes_from_name('id') new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) Author.objects.create(name='Foo', pk=1) with connection.cursor() as cursor: sequence_reset_sqls = connection.ops.sequence_reset_sql(no_style(), [Author]) if sequence_reset_sqls: cursor.execute(sequence_reset_sqls[0]) # Fail on PostgreSQL if sequence is missing an owner. self.assertIsNotNone(Author.objects.create(name='Bar')) def test_alter_autofield_pk_to_smallautofield_pk_sequence_owner(self): """ Converting an implicit PK to SmallAutoField(primary_key=True) should keep a sequence owner on PostgreSQL. """ with connection.schema_editor() as editor: editor.create_model(Author) old_field = Author._meta.get_field('id') new_field = SmallAutoField(primary_key=True) new_field.set_attributes_from_name('id') new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) Author.objects.create(name='Foo', pk=1) with connection.cursor() as cursor: sequence_reset_sqls = connection.ops.sequence_reset_sql(no_style(), [Author]) if sequence_reset_sqls: cursor.execute(sequence_reset_sqls[0]) # Fail on PostgreSQL if sequence is missing an owner. self.assertIsNotNone(Author.objects.create(name='Bar')) def test_alter_int_pk_to_autofield_pk(self): """ Should be able to rename an IntegerField(primary_key=True) to AutoField(primary_key=True). """ with connection.schema_editor() as editor: editor.create_model(IntegerPK) old_field = IntegerPK._meta.get_field('i') new_field = AutoField(primary_key=True) new_field.model = IntegerPK new_field.set_attributes_from_name('i') with connection.schema_editor() as editor: editor.alter_field(IntegerPK, old_field, new_field, strict=True) # A model representing the updated model. class IntegerPKToAutoField(Model): i = AutoField(primary_key=True) j = IntegerField(unique=True) class Meta: app_label = 'schema' apps = new_apps db_table = IntegerPK._meta.db_table # An id (i) is generated by the database. obj = IntegerPKToAutoField.objects.create(j=1) self.assertIsNotNone(obj.i) def test_alter_int_pk_to_bigautofield_pk(self): """ Should be able to rename an IntegerField(primary_key=True) to BigAutoField(primary_key=True). """ with connection.schema_editor() as editor: editor.create_model(IntegerPK) old_field = IntegerPK._meta.get_field('i') new_field = BigAutoField(primary_key=True) new_field.model = IntegerPK new_field.set_attributes_from_name('i') with connection.schema_editor() as editor: editor.alter_field(IntegerPK, old_field, new_field, strict=True) # A model representing the updated model. class IntegerPKToBigAutoField(Model): i = BigAutoField(primary_key=True) j = IntegerField(unique=True) class Meta: app_label = 'schema' apps = new_apps db_table = IntegerPK._meta.db_table # An id (i) is generated by the database. obj = IntegerPKToBigAutoField.objects.create(j=1) self.assertIsNotNone(obj.i) @isolate_apps('schema') def test_alter_smallint_pk_to_smallautofield_pk(self): """ Should be able to rename an SmallIntegerField(primary_key=True) to SmallAutoField(primary_key=True). """ class SmallIntegerPK(Model): i = SmallIntegerField(primary_key=True) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(SmallIntegerPK) self.isolated_local_models = [SmallIntegerPK] old_field = SmallIntegerPK._meta.get_field('i') new_field = SmallAutoField(primary_key=True) new_field.model = SmallIntegerPK new_field.set_attributes_from_name('i') with connection.schema_editor() as editor: editor.alter_field(SmallIntegerPK, old_field, new_field, strict=True) def test_alter_int_pk_to_int_unique(self): """ Should be able to rename an IntegerField(primary_key=True) to IntegerField(unique=True). """ with connection.schema_editor() as editor: editor.create_model(IntegerPK) # Delete the old PK old_field = IntegerPK._meta.get_field('i') new_field = IntegerField(unique=True) new_field.model = IntegerPK new_field.set_attributes_from_name('i') with connection.schema_editor() as editor: editor.alter_field(IntegerPK, old_field, new_field, strict=True) # The primary key constraint is gone. Result depends on database: # 'id' for SQLite, None for others (must not be 'i'). self.assertIn(self.get_primary_key(IntegerPK._meta.db_table), ('id', None)) # Set up a model class as it currently stands. The original IntegerPK # class is now out of date and some backends make use of the whole # model class when modifying a field (such as sqlite3 when remaking a # table) so an outdated model class leads to incorrect results. class Transitional(Model): i = IntegerField(unique=True) j = IntegerField(unique=True) class Meta: app_label = 'schema' apps = new_apps db_table = 'INTEGERPK' # model requires a new PK old_field = Transitional._meta.get_field('j') new_field = IntegerField(primary_key=True) new_field.model = Transitional new_field.set_attributes_from_name('j') with connection.schema_editor() as editor: editor.alter_field(Transitional, old_field, new_field, strict=True) # Create a model class representing the updated model. class IntegerUnique(Model): i = IntegerField(unique=True) j = IntegerField(primary_key=True) class Meta: app_label = 'schema' apps = new_apps db_table = 'INTEGERPK' # Ensure unique constraint works. IntegerUnique.objects.create(i=1, j=1) with self.assertRaises(IntegrityError): IntegerUnique.objects.create(i=1, j=2) def test_rename(self): """ Tests simple altering of fields """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the field is right to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField']) self.assertNotIn("display_name", columns) # Alter the name field's name old_field = Author._meta.get_field("name") new_field = CharField(max_length=254) new_field.set_attributes_from_name("display_name") with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['display_name'][0], connection.features.introspected_field_types['CharField']) self.assertNotIn("name", columns) @isolate_apps('schema') def test_rename_referenced_field(self): class Author(Model): name = CharField(max_length=255, unique=True) class Meta: app_label = 'schema' class Book(Model): author = ForeignKey(Author, CASCADE, to_field='name') class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) new_field = CharField(max_length=255, unique=True) new_field.set_attributes_from_name('renamed') with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor: editor.alter_field(Author, Author._meta.get_field('name'), new_field) # Ensure the foreign key reference was updated. self.assertForeignKeyExists(Book, 'author_id', 'schema_author', 'renamed') @skipIfDBFeature('interprets_empty_strings_as_nulls') def test_rename_keep_null_status(self): """ Renaming a field shouldn't affect the not null status. """ with connection.schema_editor() as editor: editor.create_model(Note) with self.assertRaises(IntegrityError): Note.objects.create(info=None) old_field = Note._meta.get_field("info") new_field = TextField() new_field.set_attributes_from_name("detail_info") with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) columns = self.column_classes(Note) self.assertEqual(columns['detail_info'][0], "TextField") self.assertNotIn("info", columns) with self.assertRaises(IntegrityError): NoteRename.objects.create(detail_info=None) def _test_m2m_create(self, M2MFieldClass): """ Tests M2M fields on models during creation """ class LocalBookWithM2M(Model): author = ForeignKey(Author, CASCADE) title = CharField(max_length=100, db_index=True) pub_date = DateTimeField() tags = M2MFieldClass("TagM2MTest", related_name="books") class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalBookWithM2M] # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(TagM2MTest) editor.create_model(LocalBookWithM2M) # Ensure there is now an m2m table there columns = self.column_classes(LocalBookWithM2M._meta.get_field("tags").remote_field.through) self.assertEqual(columns['tagm2mtest_id'][0], connection.features.introspected_field_types['IntegerField']) def test_m2m_create(self): self._test_m2m_create(ManyToManyField) def test_m2m_create_custom(self): self._test_m2m_create(CustomManyToManyField) def test_m2m_create_inherited(self): self._test_m2m_create(InheritedManyToManyField) def _test_m2m_create_through(self, M2MFieldClass): """ Tests M2M fields on models during creation with through models """ class LocalTagThrough(Model): book = ForeignKey("schema.LocalBookWithM2MThrough", CASCADE) tag = ForeignKey("schema.TagM2MTest", CASCADE) class Meta: app_label = 'schema' apps = new_apps class LocalBookWithM2MThrough(Model): tags = M2MFieldClass("TagM2MTest", related_name="books", through=LocalTagThrough) class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalTagThrough, LocalBookWithM2MThrough] # Create the tables with connection.schema_editor() as editor: editor.create_model(LocalTagThrough) editor.create_model(TagM2MTest) editor.create_model(LocalBookWithM2MThrough) # Ensure there is now an m2m table there columns = self.column_classes(LocalTagThrough) self.assertEqual(columns['book_id'][0], connection.features.introspected_field_types['IntegerField']) self.assertEqual(columns['tag_id'][0], connection.features.introspected_field_types['IntegerField']) def test_m2m_create_through(self): self._test_m2m_create_through(ManyToManyField) def test_m2m_create_through_custom(self): self._test_m2m_create_through(CustomManyToManyField) def test_m2m_create_through_inherited(self): self._test_m2m_create_through(InheritedManyToManyField) def _test_m2m(self, M2MFieldClass): """ Tests adding/removing M2M fields on models """ class LocalAuthorWithM2M(Model): name = CharField(max_length=255) class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalAuthorWithM2M] # Create the tables with connection.schema_editor() as editor: editor.create_model(LocalAuthorWithM2M) editor.create_model(TagM2MTest) # Create an M2M field new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors") new_field.contribute_to_class(LocalAuthorWithM2M, "tags") # Ensure there's no m2m table there with self.assertRaises(DatabaseError): self.column_classes(new_field.remote_field.through) # Add the field with connection.schema_editor() as editor: editor.add_field(LocalAuthorWithM2M, new_field) # Ensure there is now an m2m table there columns = self.column_classes(new_field.remote_field.through) self.assertEqual(columns['tagm2mtest_id'][0], connection.features.introspected_field_types['IntegerField']) # "Alter" the field. This should not rename the DB table to itself. with connection.schema_editor() as editor: editor.alter_field(LocalAuthorWithM2M, new_field, new_field, strict=True) # Remove the M2M table again with connection.schema_editor() as editor: editor.remove_field(LocalAuthorWithM2M, new_field) # Ensure there's no m2m table there with self.assertRaises(DatabaseError): self.column_classes(new_field.remote_field.through) # Make sure the model state is coherent with the table one now that # we've removed the tags field. opts = LocalAuthorWithM2M._meta opts.local_many_to_many.remove(new_field) del new_apps.all_models['schema'][new_field.remote_field.through._meta.model_name] opts._expire_cache() def test_m2m(self): self._test_m2m(ManyToManyField) def test_m2m_custom(self): self._test_m2m(CustomManyToManyField) def test_m2m_inherited(self): self._test_m2m(InheritedManyToManyField) def _test_m2m_through_alter(self, M2MFieldClass): """ Tests altering M2Ms with explicit through models (should no-op) """ class LocalAuthorTag(Model): author = ForeignKey("schema.LocalAuthorWithM2MThrough", CASCADE) tag = ForeignKey("schema.TagM2MTest", CASCADE) class Meta: app_label = 'schema' apps = new_apps class LocalAuthorWithM2MThrough(Model): name = CharField(max_length=255) tags = M2MFieldClass("schema.TagM2MTest", related_name="authors", through=LocalAuthorTag) class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalAuthorTag, LocalAuthorWithM2MThrough] # Create the tables with connection.schema_editor() as editor: editor.create_model(LocalAuthorTag) editor.create_model(LocalAuthorWithM2MThrough) editor.create_model(TagM2MTest) # Ensure the m2m table is there self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3) # "Alter" the field's blankness. This should not actually do anything. old_field = LocalAuthorWithM2MThrough._meta.get_field("tags") new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors", through=LocalAuthorTag) new_field.contribute_to_class(LocalAuthorWithM2MThrough, "tags") with connection.schema_editor() as editor: editor.alter_field(LocalAuthorWithM2MThrough, old_field, new_field, strict=True) # Ensure the m2m table is still there self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3) def test_m2m_through_alter(self): self._test_m2m_through_alter(ManyToManyField) def test_m2m_through_alter_custom(self): self._test_m2m_through_alter(CustomManyToManyField) def test_m2m_through_alter_inherited(self): self._test_m2m_through_alter(InheritedManyToManyField) def _test_m2m_repoint(self, M2MFieldClass): """ Tests repointing M2M fields """ class LocalBookWithM2M(Model): author = ForeignKey(Author, CASCADE) title = CharField(max_length=100, db_index=True) pub_date = DateTimeField() tags = M2MFieldClass("TagM2MTest", related_name="books") class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalBookWithM2M] # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(LocalBookWithM2M) editor.create_model(TagM2MTest) editor.create_model(UniqueTest) # Ensure the M2M exists and points to TagM2MTest if connection.features.supports_foreign_keys: self.assertForeignKeyExists( LocalBookWithM2M._meta.get_field("tags").remote_field.through, 'tagm2mtest_id', 'schema_tagm2mtest', ) # Repoint the M2M old_field = LocalBookWithM2M._meta.get_field("tags") new_field = M2MFieldClass(UniqueTest) new_field.contribute_to_class(LocalBookWithM2M, "uniques") with connection.schema_editor() as editor: editor.alter_field(LocalBookWithM2M, old_field, new_field, strict=True) # Ensure old M2M is gone with self.assertRaises(DatabaseError): self.column_classes(LocalBookWithM2M._meta.get_field("tags").remote_field.through) # This model looks like the new model and is used for teardown. opts = LocalBookWithM2M._meta opts.local_many_to_many.remove(old_field) # Ensure the new M2M exists and points to UniqueTest if connection.features.supports_foreign_keys: self.assertForeignKeyExists(new_field.remote_field.through, 'uniquetest_id', 'schema_uniquetest') def test_m2m_repoint(self): self._test_m2m_repoint(ManyToManyField) def test_m2m_repoint_custom(self): self._test_m2m_repoint(CustomManyToManyField) def test_m2m_repoint_inherited(self): self._test_m2m_repoint(InheritedManyToManyField) @isolate_apps('schema') def test_m2m_rename_field_in_target_model(self): class LocalTagM2MTest(Model): title = CharField(max_length=255) class Meta: app_label = 'schema' class LocalM2M(Model): tags = ManyToManyField(LocalTagM2MTest) class Meta: app_label = 'schema' # Create the tables. with connection.schema_editor() as editor: editor.create_model(LocalM2M) editor.create_model(LocalTagM2MTest) self.isolated_local_models = [LocalM2M, LocalTagM2MTest] # Ensure the m2m table is there. self.assertEqual(len(self.column_classes(LocalM2M)), 1) # Alter a field in LocalTagM2MTest. old_field = LocalTagM2MTest._meta.get_field('title') new_field = CharField(max_length=254) new_field.contribute_to_class(LocalTagM2MTest, 'title1') # @isolate_apps() and inner models are needed to have the model # relations populated, otherwise this doesn't act as a regression test. self.assertEqual(len(new_field.model._meta.related_objects), 1) with connection.schema_editor() as editor: editor.alter_field(LocalTagM2MTest, old_field, new_field, strict=True) # Ensure the m2m table is still there. self.assertEqual(len(self.column_classes(LocalM2M)), 1) @skipUnlessDBFeature('supports_column_check_constraints', 'can_introspect_check_constraints') def test_check_constraints(self): """ Tests creating/deleting CHECK constraints """ # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the constraint exists constraints = self.get_constraints(Author._meta.db_table) if not any(details['columns'] == ['height'] and details['check'] for details in constraints.values()): self.fail("No check constraint for height found") # Alter the column to remove it old_field = Author._meta.get_field("height") new_field = IntegerField(null=True, blank=True) new_field.set_attributes_from_name("height") with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) constraints = self.get_constraints(Author._meta.db_table) for details in constraints.values(): if details['columns'] == ["height"] and details['check']: self.fail("Check constraint for height found") # Alter the column to re-add it new_field2 = Author._meta.get_field("height") with connection.schema_editor() as editor: editor.alter_field(Author, new_field, new_field2, strict=True) constraints = self.get_constraints(Author._meta.db_table) if not any(details['columns'] == ['height'] and details['check'] for details in constraints.values()): self.fail("No check constraint for height found") @skipUnlessDBFeature('supports_column_check_constraints', 'can_introspect_check_constraints') def test_remove_field_check_does_not_remove_meta_constraints(self): with connection.schema_editor() as editor: editor.create_model(Author) # Add the custom check constraint constraint = CheckConstraint(check=Q(height__gte=0), name='author_height_gte_0_check') custom_constraint_name = constraint.name Author._meta.constraints = [constraint] with connection.schema_editor() as editor: editor.add_constraint(Author, constraint) # Ensure the constraints exist constraints = self.get_constraints(Author._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['height'] and details['check'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Alter the column to remove field check old_field = Author._meta.get_field('height') new_field = IntegerField(null=True, blank=True) new_field.set_attributes_from_name('height') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) constraints = self.get_constraints(Author._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['height'] and details['check'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 0) # Alter the column to re-add field check new_field2 = Author._meta.get_field('height') with connection.schema_editor() as editor: editor.alter_field(Author, new_field, new_field2, strict=True) constraints = self.get_constraints(Author._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['height'] and details['check'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Drop the check constraint with connection.schema_editor() as editor: Author._meta.constraints = [] editor.remove_constraint(Author, constraint) def test_unique(self): """ Tests removing and adding unique constraints to a single column. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) # Ensure the field is unique to begin with Tag.objects.create(title="foo", slug="foo") with self.assertRaises(IntegrityError): Tag.objects.create(title="bar", slug="foo") Tag.objects.all().delete() # Alter the slug field to be non-unique old_field = Tag._meta.get_field("slug") new_field = SlugField(unique=False) new_field.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_field(Tag, old_field, new_field, strict=True) # Ensure the field is no longer unique Tag.objects.create(title="foo", slug="foo") Tag.objects.create(title="bar", slug="foo") Tag.objects.all().delete() # Alter the slug field to be unique new_field2 = SlugField(unique=True) new_field2.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_field(Tag, new_field, new_field2, strict=True) # Ensure the field is unique again Tag.objects.create(title="foo", slug="foo") with self.assertRaises(IntegrityError): Tag.objects.create(title="bar", slug="foo") Tag.objects.all().delete() # Rename the field new_field3 = SlugField(unique=True) new_field3.set_attributes_from_name("slug2") with connection.schema_editor() as editor: editor.alter_field(Tag, new_field2, new_field3, strict=True) # Ensure the field is still unique TagUniqueRename.objects.create(title="foo", slug2="foo") with self.assertRaises(IntegrityError): TagUniqueRename.objects.create(title="bar", slug2="foo") Tag.objects.all().delete() def test_unique_name_quoting(self): old_table_name = TagUniqueRename._meta.db_table try: with connection.schema_editor() as editor: editor.create_model(TagUniqueRename) editor.alter_db_table(TagUniqueRename, old_table_name, 'unique-table') TagUniqueRename._meta.db_table = 'unique-table' # This fails if the unique index name isn't quoted. editor.alter_unique_together(TagUniqueRename, [], (('title', 'slug2'),)) finally: TagUniqueRename._meta.db_table = old_table_name @isolate_apps('schema') @skipUnlessDBFeature('supports_foreign_keys') def test_unique_no_unnecessary_fk_drops(self): """ If AlterField isn't selective about dropping foreign key constraints when modifying a field with a unique constraint, the AlterField incorrectly drops and recreates the Book.author foreign key even though it doesn't restrict the field being changed (#29193). """ class Author(Model): name = CharField(max_length=254, unique=True) class Meta: app_label = 'schema' class Book(Model): author = ForeignKey(Author, CASCADE) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) new_field = CharField(max_length=255, unique=True) new_field.model = Author new_field.set_attributes_from_name('name') with self.assertLogs('django.db.backends.schema', 'DEBUG') as cm: with connection.schema_editor() as editor: editor.alter_field(Author, Author._meta.get_field('name'), new_field) # One SQL statement is executed to alter the field. self.assertEqual(len(cm.records), 1) @isolate_apps('schema') def test_unique_and_reverse_m2m(self): """ AlterField can modify a unique field when there's a reverse M2M relation on the model. """ class Tag(Model): title = CharField(max_length=255) slug = SlugField(unique=True) class Meta: app_label = 'schema' class Book(Model): tags = ManyToManyField(Tag, related_name='books') class Meta: app_label = 'schema' self.isolated_local_models = [Book._meta.get_field('tags').remote_field.through] with connection.schema_editor() as editor: editor.create_model(Tag) editor.create_model(Book) new_field = SlugField(max_length=75, unique=True) new_field.model = Tag new_field.set_attributes_from_name('slug') with self.assertLogs('django.db.backends.schema', 'DEBUG') as cm: with connection.schema_editor() as editor: editor.alter_field(Tag, Tag._meta.get_field('slug'), new_field) # One SQL statement is executed to alter the field. self.assertEqual(len(cm.records), 1) # Ensure that the field is still unique. Tag.objects.create(title='foo', slug='foo') with self.assertRaises(IntegrityError): Tag.objects.create(title='bar', slug='foo') @skipUnlessDBFeature('allows_multiple_constraints_on_same_fields') def test_remove_field_unique_does_not_remove_meta_constraints(self): with connection.schema_editor() as editor: editor.create_model(AuthorWithUniqueName) # Add the custom unique constraint constraint = UniqueConstraint(fields=['name'], name='author_name_uniq') custom_constraint_name = constraint.name AuthorWithUniqueName._meta.constraints = [constraint] with connection.schema_editor() as editor: editor.add_constraint(AuthorWithUniqueName, constraint) # Ensure the constraints exist constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Alter the column to remove field uniqueness old_field = AuthorWithUniqueName._meta.get_field('name') new_field = CharField(max_length=255) new_field.set_attributes_from_name('name') with connection.schema_editor() as editor: editor.alter_field(AuthorWithUniqueName, old_field, new_field, strict=True) constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 0) # Alter the column to re-add field uniqueness new_field2 = AuthorWithUniqueName._meta.get_field('name') with connection.schema_editor() as editor: editor.alter_field(AuthorWithUniqueName, new_field, new_field2, strict=True) constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Drop the unique constraint with connection.schema_editor() as editor: AuthorWithUniqueName._meta.constraints = [] editor.remove_constraint(AuthorWithUniqueName, constraint) def test_unique_together(self): """ Tests removing and adding unique_together constraints on a model. """ # Create the table with connection.schema_editor() as editor: editor.create_model(UniqueTest) # Ensure the fields are unique to begin with UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.create(year=2011, slug="foo") UniqueTest.objects.create(year=2011, slug="bar") with self.assertRaises(IntegrityError): UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.all().delete() # Alter the model to its non-unique-together companion with connection.schema_editor() as editor: editor.alter_unique_together(UniqueTest, UniqueTest._meta.unique_together, []) # Ensure the fields are no longer unique UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.all().delete() # Alter it back new_field2 = SlugField(unique=True) new_field2.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_unique_together(UniqueTest, [], UniqueTest._meta.unique_together) # Ensure the fields are unique again UniqueTest.objects.create(year=2012, slug="foo") with self.assertRaises(IntegrityError): UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.all().delete() def test_unique_together_with_fk(self): """ Tests removing and adding unique_together constraints that include a foreign key. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the fields are unique to begin with self.assertEqual(Book._meta.unique_together, ()) # Add the unique_together constraint with connection.schema_editor() as editor: editor.alter_unique_together(Book, [], [['author', 'title']]) # Alter it back with connection.schema_editor() as editor: editor.alter_unique_together(Book, [['author', 'title']], []) def test_unique_together_with_fk_with_existing_index(self): """ Tests removing and adding unique_together constraints that include a foreign key, where the foreign key is added after the model is created. """ # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithoutAuthor) new_field = ForeignKey(Author, CASCADE) new_field.set_attributes_from_name('author') editor.add_field(BookWithoutAuthor, new_field) # Ensure the fields aren't unique to begin with self.assertEqual(Book._meta.unique_together, ()) # Add the unique_together constraint with connection.schema_editor() as editor: editor.alter_unique_together(Book, [], [['author', 'title']]) # Alter it back with connection.schema_editor() as editor: editor.alter_unique_together(Book, [['author', 'title']], []) @skipUnlessDBFeature('allows_multiple_constraints_on_same_fields') def test_remove_unique_together_does_not_remove_meta_constraints(self): with connection.schema_editor() as editor: editor.create_model(AuthorWithUniqueNameAndBirthday) # Add the custom unique constraint constraint = UniqueConstraint(fields=['name', 'birthday'], name='author_name_birthday_uniq') custom_constraint_name = constraint.name AuthorWithUniqueNameAndBirthday._meta.constraints = [constraint] with connection.schema_editor() as editor: editor.add_constraint(AuthorWithUniqueNameAndBirthday, constraint) # Ensure the constraints exist constraints = self.get_constraints(AuthorWithUniqueNameAndBirthday._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Remove unique together unique_together = AuthorWithUniqueNameAndBirthday._meta.unique_together with connection.schema_editor() as editor: editor.alter_unique_together(AuthorWithUniqueNameAndBirthday, unique_together, []) constraints = self.get_constraints(AuthorWithUniqueNameAndBirthday._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 0) # Re-add unique together with connection.schema_editor() as editor: editor.alter_unique_together(AuthorWithUniqueNameAndBirthday, [], unique_together) constraints = self.get_constraints(AuthorWithUniqueNameAndBirthday._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Drop the unique constraint with connection.schema_editor() as editor: AuthorWithUniqueNameAndBirthday._meta.constraints = [] editor.remove_constraint(AuthorWithUniqueNameAndBirthday, constraint) @skipUnlessDBFeature('supports_expression_indexes') def test_func_unique_constraint(self): with connection.schema_editor() as editor: editor.create_model(Author) constraint = UniqueConstraint(Upper('name').desc(), name='func_upper_uq') # Add constraint. with connection.schema_editor() as editor: editor.add_constraint(Author, constraint) sql = constraint.create_sql(Author, editor) table = Author._meta.db_table constraints = self.get_constraints(table) if connection.features.supports_index_column_ordering: self.assertIndexOrder(table, constraint.name, ['DESC']) self.assertIn(constraint.name, constraints) self.assertIs(constraints[constraint.name]['unique'], True) # SQL contains a database function. self.assertIs(sql.references_column(table, 'name'), True) self.assertIn('UPPER(%s)' % editor.quote_name('name'), str(sql)) # Remove constraint. with connection.schema_editor() as editor: editor.remove_constraint(Author, constraint) self.assertNotIn(constraint.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_composite_func_unique_constraint(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithSlug) constraint = UniqueConstraint( Upper('title'), Lower('slug'), name='func_upper_lower_unq', ) # Add constraint. with connection.schema_editor() as editor: editor.add_constraint(BookWithSlug, constraint) sql = constraint.create_sql(BookWithSlug, editor) table = BookWithSlug._meta.db_table constraints = self.get_constraints(table) self.assertIn(constraint.name, constraints) self.assertIs(constraints[constraint.name]['unique'], True) # SQL contains database functions. self.assertIs(sql.references_column(table, 'title'), True) self.assertIs(sql.references_column(table, 'slug'), True) sql = str(sql) self.assertIn('UPPER(%s)' % editor.quote_name('title'), sql) self.assertIn('LOWER(%s)' % editor.quote_name('slug'), sql) self.assertLess(sql.index('UPPER'), sql.index('LOWER')) # Remove constraint. with connection.schema_editor() as editor: editor.remove_constraint(BookWithSlug, constraint) self.assertNotIn(constraint.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_unique_constraint_field_and_expression(self): with connection.schema_editor() as editor: editor.create_model(Author) constraint = UniqueConstraint( F('height').desc(), 'uuid', Lower('name').asc(), name='func_f_lower_field_unq', ) # Add constraint. with connection.schema_editor() as editor: editor.add_constraint(Author, constraint) sql = constraint.create_sql(Author, editor) table = Author._meta.db_table if connection.features.supports_index_column_ordering: self.assertIndexOrder(table, constraint.name, ['DESC', 'ASC', 'ASC']) constraints = self.get_constraints(table) self.assertIs(constraints[constraint.name]['unique'], True) self.assertEqual(len(constraints[constraint.name]['columns']), 3) self.assertEqual(constraints[constraint.name]['columns'][1], 'uuid') # SQL contains database functions and columns. self.assertIs(sql.references_column(table, 'height'), True) self.assertIs(sql.references_column(table, 'name'), True) self.assertIs(sql.references_column(table, 'uuid'), True) self.assertIn('LOWER(%s)' % editor.quote_name('name'), str(sql)) # Remove constraint. with connection.schema_editor() as editor: editor.remove_constraint(Author, constraint) self.assertNotIn(constraint.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes', 'supports_partial_indexes') def test_func_unique_constraint_partial(self): with connection.schema_editor() as editor: editor.create_model(Author) constraint = UniqueConstraint( Upper('name'), name='func_upper_cond_weight_uq', condition=Q(weight__isnull=False), ) # Add constraint. with connection.schema_editor() as editor: editor.add_constraint(Author, constraint) sql = constraint.create_sql(Author, editor) table = Author._meta.db_table constraints = self.get_constraints(table) self.assertIn(constraint.name, constraints) self.assertIs(constraints[constraint.name]['unique'], True) self.assertIs(sql.references_column(table, 'name'), True) self.assertIn('UPPER(%s)' % editor.quote_name('name'), str(sql)) self.assertIn( 'WHERE %s IS NOT NULL' % editor.quote_name('weight'), str(sql), ) # Remove constraint. with connection.schema_editor() as editor: editor.remove_constraint(Author, constraint) self.assertNotIn(constraint.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes', 'supports_covering_indexes') def test_func_unique_constraint_covering(self): with connection.schema_editor() as editor: editor.create_model(Author) constraint = UniqueConstraint( Upper('name'), name='func_upper_covering_uq', include=['weight', 'height'], ) # Add constraint. with connection.schema_editor() as editor: editor.add_constraint(Author, constraint) sql = constraint.create_sql(Author, editor) table = Author._meta.db_table constraints = self.get_constraints(table) self.assertIn(constraint.name, constraints) self.assertIs(constraints[constraint.name]['unique'], True) self.assertEqual( constraints[constraint.name]['columns'], [None, 'weight', 'height'], ) self.assertIs(sql.references_column(table, 'name'), True) self.assertIs(sql.references_column(table, 'weight'), True) self.assertIs(sql.references_column(table, 'height'), True) self.assertIn('UPPER(%s)' % editor.quote_name('name'), str(sql)) self.assertIn( 'INCLUDE (%s, %s)' % ( editor.quote_name('weight'), editor.quote_name('height'), ), str(sql), ) # Remove constraint. with connection.schema_editor() as editor: editor.remove_constraint(Author, constraint) self.assertNotIn(constraint.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_unique_constraint_lookups(self): with connection.schema_editor() as editor: editor.create_model(Author) with register_lookup(CharField, Lower), register_lookup(IntegerField, Abs): constraint = UniqueConstraint( F('name__lower'), F('weight__abs'), name='func_lower_abs_lookup_uq', ) # Add constraint. with connection.schema_editor() as editor: editor.add_constraint(Author, constraint) sql = constraint.create_sql(Author, editor) table = Author._meta.db_table constraints = self.get_constraints(table) self.assertIn(constraint.name, constraints) self.assertIs(constraints[constraint.name]['unique'], True) # SQL contains columns. self.assertIs(sql.references_column(table, 'name'), True) self.assertIs(sql.references_column(table, 'weight'), True) # Remove constraint. with connection.schema_editor() as editor: editor.remove_constraint(Author, constraint) self.assertNotIn(constraint.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_unique_constraint_collate(self): collation = connection.features.test_collations.get('non_default') if not collation: self.skipTest( 'This backend does not support case-insensitive collations.' ) with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithSlug) constraint = UniqueConstraint( Collate(F('title'), collation=collation).desc(), Collate('slug', collation=collation), name='func_collate_uq', ) # Add constraint. with connection.schema_editor() as editor: editor.add_constraint(BookWithSlug, constraint) sql = constraint.create_sql(BookWithSlug, editor) table = BookWithSlug._meta.db_table constraints = self.get_constraints(table) self.assertIn(constraint.name, constraints) self.assertIs(constraints[constraint.name]['unique'], True) if connection.features.supports_index_column_ordering: self.assertIndexOrder(table, constraint.name, ['DESC', 'ASC']) # SQL contains columns and a collation. self.assertIs(sql.references_column(table, 'title'), True) self.assertIs(sql.references_column(table, 'slug'), True) self.assertIn('COLLATE %s' % editor.quote_name(collation), str(sql)) # Remove constraint. with connection.schema_editor() as editor: editor.remove_constraint(BookWithSlug, constraint) self.assertNotIn(constraint.name, self.get_constraints(table)) @skipIfDBFeature('supports_expression_indexes') def test_func_unique_constraint_unsupported(self): # UniqueConstraint is ignored on databases that don't support indexes on # expressions. with connection.schema_editor() as editor: editor.create_model(Author) constraint = UniqueConstraint(F('name'), name='func_name_uq') with connection.schema_editor() as editor, self.assertNumQueries(0): self.assertIsNone(editor.add_constraint(Author, constraint)) self.assertIsNone(editor.remove_constraint(Author, constraint)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_unique_constraint_nonexistent_field(self): constraint = UniqueConstraint(Lower('nonexistent'), name='func_nonexistent_uq') msg = ( "Cannot resolve keyword 'nonexistent' into field. Choices are: " "height, id, name, uuid, weight" ) with self.assertRaisesMessage(FieldError, msg): with connection.schema_editor() as editor: editor.add_constraint(Author, constraint) @skipUnlessDBFeature('supports_expression_indexes') def test_func_unique_constraint_nondeterministic(self): with connection.schema_editor() as editor: editor.create_model(Author) constraint = UniqueConstraint(Random(), name='func_random_uq') with connection.schema_editor() as editor: with self.assertRaises(DatabaseError): editor.add_constraint(Author, constraint) def test_index_together(self): """ Tests removing and adding index_together constraints on a model. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) # Ensure there's no index on the year/slug columns first self.assertIs( any( c["index"] for c in self.get_constraints("schema_tag").values() if c['columns'] == ["slug", "title"] ), False, ) # Alter the model to add an index with connection.schema_editor() as editor: editor.alter_index_together(Tag, [], [("slug", "title")]) # Ensure there is now an index self.assertIs( any( c["index"] for c in self.get_constraints("schema_tag").values() if c['columns'] == ["slug", "title"] ), True, ) # Alter it back new_field2 = SlugField(unique=True) new_field2.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_index_together(Tag, [("slug", "title")], []) # Ensure there's no index self.assertIs( any( c["index"] for c in self.get_constraints("schema_tag").values() if c['columns'] == ["slug", "title"] ), False, ) def test_index_together_with_fk(self): """ Tests removing and adding index_together constraints that include a foreign key. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the fields are unique to begin with self.assertEqual(Book._meta.index_together, ()) # Add the unique_together constraint with connection.schema_editor() as editor: editor.alter_index_together(Book, [], [['author', 'title']]) # Alter it back with connection.schema_editor() as editor: editor.alter_index_together(Book, [['author', 'title']], []) def test_create_index_together(self): """ Tests creating models with index_together already defined """ # Create the table with connection.schema_editor() as editor: editor.create_model(TagIndexed) # Ensure there is an index self.assertIs( any( c["index"] for c in self.get_constraints("schema_tagindexed").values() if c['columns'] == ["slug", "title"] ), True, ) @skipUnlessDBFeature('allows_multiple_constraints_on_same_fields') def test_remove_index_together_does_not_remove_meta_indexes(self): with connection.schema_editor() as editor: editor.create_model(AuthorWithIndexedNameAndBirthday) # Add the custom index index = Index(fields=['name', 'birthday'], name='author_name_birthday_idx') custom_index_name = index.name AuthorWithIndexedNameAndBirthday._meta.indexes = [index] with connection.schema_editor() as editor: editor.add_index(AuthorWithIndexedNameAndBirthday, index) # Ensure the indexes exist constraints = self.get_constraints(AuthorWithIndexedNameAndBirthday._meta.db_table) self.assertIn(custom_index_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['index'] and name != custom_index_name ] self.assertEqual(len(other_constraints), 1) # Remove index together index_together = AuthorWithIndexedNameAndBirthday._meta.index_together with connection.schema_editor() as editor: editor.alter_index_together(AuthorWithIndexedNameAndBirthday, index_together, []) constraints = self.get_constraints(AuthorWithIndexedNameAndBirthday._meta.db_table) self.assertIn(custom_index_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['index'] and name != custom_index_name ] self.assertEqual(len(other_constraints), 0) # Re-add index together with connection.schema_editor() as editor: editor.alter_index_together(AuthorWithIndexedNameAndBirthday, [], index_together) constraints = self.get_constraints(AuthorWithIndexedNameAndBirthday._meta.db_table) self.assertIn(custom_index_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['index'] and name != custom_index_name ] self.assertEqual(len(other_constraints), 1) # Drop the index with connection.schema_editor() as editor: AuthorWithIndexedNameAndBirthday._meta.indexes = [] editor.remove_index(AuthorWithIndexedNameAndBirthday, index) @isolate_apps('schema') def test_db_table(self): """ Tests renaming of the table """ class Author(Model): name = CharField(max_length=255) class Meta: app_label = 'schema' class Book(Model): author = ForeignKey(Author, CASCADE) class Meta: app_label = 'schema' # Create the table and one referring it. with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the table is there to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField']) # Alter the table with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor: editor.alter_db_table(Author, "schema_author", "schema_otherauthor") # Ensure the table is there afterwards Author._meta.db_table = "schema_otherauthor" columns = self.column_classes(Author) self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField']) # Ensure the foreign key reference was updated self.assertForeignKeyExists(Book, "author_id", "schema_otherauthor") # Alter the table again with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor: editor.alter_db_table(Author, "schema_otherauthor", "schema_author") # Ensure the table is still there Author._meta.db_table = "schema_author" columns = self.column_classes(Author) self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField']) def test_add_remove_index(self): """ Tests index addition and removal """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the table is there and has no index self.assertNotIn('title', self.get_indexes(Author._meta.db_table)) # Add the index index = Index(fields=['name'], name='author_title_idx') with connection.schema_editor() as editor: editor.add_index(Author, index) self.assertIn('name', self.get_indexes(Author._meta.db_table)) # Drop the index with connection.schema_editor() as editor: editor.remove_index(Author, index) self.assertNotIn('name', self.get_indexes(Author._meta.db_table)) def test_remove_db_index_doesnt_remove_custom_indexes(self): """ Changing db_index to False doesn't remove indexes from Meta.indexes. """ with connection.schema_editor() as editor: editor.create_model(AuthorWithIndexedName) # Ensure the table has its index self.assertIn('name', self.get_indexes(AuthorWithIndexedName._meta.db_table)) # Add the custom index index = Index(fields=['-name'], name='author_name_idx') author_index_name = index.name with connection.schema_editor() as editor: db_index_name = editor._create_index_name( table_name=AuthorWithIndexedName._meta.db_table, column_names=('name',), ) try: AuthorWithIndexedName._meta.indexes = [index] with connection.schema_editor() as editor: editor.add_index(AuthorWithIndexedName, index) old_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table) self.assertIn(author_index_name, old_constraints) self.assertIn(db_index_name, old_constraints) # Change name field to db_index=False old_field = AuthorWithIndexedName._meta.get_field('name') new_field = CharField(max_length=255) new_field.set_attributes_from_name('name') with connection.schema_editor() as editor: editor.alter_field(AuthorWithIndexedName, old_field, new_field, strict=True) new_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table) self.assertNotIn(db_index_name, new_constraints) # The index from Meta.indexes is still in the database. self.assertIn(author_index_name, new_constraints) # Drop the index with connection.schema_editor() as editor: editor.remove_index(AuthorWithIndexedName, index) finally: AuthorWithIndexedName._meta.indexes = [] def test_order_index(self): """ Indexes defined with ordering (ASC/DESC) defined on column """ with connection.schema_editor() as editor: editor.create_model(Author) # The table doesn't have an index self.assertNotIn('title', self.get_indexes(Author._meta.db_table)) index_name = 'author_name_idx' # Add the index index = Index(fields=['name', '-weight'], name=index_name) with connection.schema_editor() as editor: editor.add_index(Author, index) if connection.features.supports_index_column_ordering: self.assertIndexOrder(Author._meta.db_table, index_name, ['ASC', 'DESC']) # Drop the index with connection.schema_editor() as editor: editor.remove_index(Author, index) def test_indexes(self): """ Tests creation/altering of indexes """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the table is there and has the right index self.assertIn( "title", self.get_indexes(Book._meta.db_table), ) # Alter to remove the index old_field = Book._meta.get_field("title") new_field = CharField(max_length=100, db_index=False) new_field.set_attributes_from_name("title") with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) # Ensure the table is there and has no index self.assertNotIn( "title", self.get_indexes(Book._meta.db_table), ) # Alter to re-add the index new_field2 = Book._meta.get_field("title") with connection.schema_editor() as editor: editor.alter_field(Book, new_field, new_field2, strict=True) # Ensure the table is there and has the index again self.assertIn( "title", self.get_indexes(Book._meta.db_table), ) # Add a unique column, verify that creates an implicit index new_field3 = BookWithSlug._meta.get_field("slug") with connection.schema_editor() as editor: editor.add_field(Book, new_field3) self.assertIn( "slug", self.get_uniques(Book._meta.db_table), ) # Remove the unique, check the index goes with it new_field4 = CharField(max_length=20, unique=False) new_field4.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_field(BookWithSlug, new_field3, new_field4, strict=True) self.assertNotIn( "slug", self.get_uniques(Book._meta.db_table), ) def test_text_field_with_db_index(self): with connection.schema_editor() as editor: editor.create_model(AuthorTextFieldWithIndex) # The text_field index is present if the database supports it. assertion = self.assertIn if connection.features.supports_index_on_text_field else self.assertNotIn assertion('text_field', self.get_indexes(AuthorTextFieldWithIndex._meta.db_table)) def _index_expressions_wrappers(self): index_expression = IndexExpression() index_expression.set_wrapper_classes(connection) return ', '.join([ wrapper_cls.__qualname__ for wrapper_cls in index_expression.wrapper_classes ]) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_multiple_wrapper_references(self): index = Index(OrderBy(F('name').desc(), descending=True), name='name') msg = ( "Multiple references to %s can't be used in an indexed expression." % self._index_expressions_wrappers() ) with connection.schema_editor() as editor: with self.assertRaisesMessage(ValueError, msg): editor.add_index(Author, index) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_invalid_topmost_expressions(self): index = Index(Upper(F('name').desc()), name='name') msg = ( '%s must be topmost expressions in an indexed expression.' % self._index_expressions_wrappers() ) with connection.schema_editor() as editor: with self.assertRaisesMessage(ValueError, msg): editor.add_index(Author, index) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index(self): with connection.schema_editor() as editor: editor.create_model(Author) index = Index(Lower('name').desc(), name='func_lower_idx') # Add index. with connection.schema_editor() as editor: editor.add_index(Author, index) sql = index.create_sql(Author, editor) table = Author._meta.db_table if connection.features.supports_index_column_ordering: self.assertIndexOrder(table, index.name, ['DESC']) # SQL contains a database function. self.assertIs(sql.references_column(table, 'name'), True) self.assertIn('LOWER(%s)' % editor.quote_name('name'), str(sql)) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Author, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_f(self): with connection.schema_editor() as editor: editor.create_model(Tag) index = Index('slug', F('title').desc(), name='func_f_idx') # Add index. with connection.schema_editor() as editor: editor.add_index(Tag, index) sql = index.create_sql(Tag, editor) table = Tag._meta.db_table self.assertIn(index.name, self.get_constraints(table)) if connection.features.supports_index_column_ordering: self.assertIndexOrder(Tag._meta.db_table, index.name, ['ASC', 'DESC']) # SQL contains columns. self.assertIs(sql.references_column(table, 'slug'), True) self.assertIs(sql.references_column(table, 'title'), True) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Tag, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_lookups(self): with connection.schema_editor() as editor: editor.create_model(Author) with register_lookup(CharField, Lower), register_lookup(IntegerField, Abs): index = Index( F('name__lower'), F('weight__abs'), name='func_lower_abs_lookup_idx', ) # Add index. with connection.schema_editor() as editor: editor.add_index(Author, index) sql = index.create_sql(Author, editor) table = Author._meta.db_table self.assertIn(index.name, self.get_constraints(table)) # SQL contains columns. self.assertIs(sql.references_column(table, 'name'), True) self.assertIs(sql.references_column(table, 'weight'), True) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Author, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_composite_func_index(self): with connection.schema_editor() as editor: editor.create_model(Author) index = Index(Lower('name'), Upper('name'), name='func_lower_upper_idx') # Add index. with connection.schema_editor() as editor: editor.add_index(Author, index) sql = index.create_sql(Author, editor) table = Author._meta.db_table self.assertIn(index.name, self.get_constraints(table)) # SQL contains database functions. self.assertIs(sql.references_column(table, 'name'), True) sql = str(sql) self.assertIn('LOWER(%s)' % editor.quote_name('name'), sql) self.assertIn('UPPER(%s)' % editor.quote_name('name'), sql) self.assertLess(sql.index('LOWER'), sql.index('UPPER')) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Author, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_composite_func_index_field_and_expression(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) index = Index( F('author').desc(), Lower('title').asc(), 'pub_date', name='func_f_lower_field_idx', ) # Add index. with connection.schema_editor() as editor: editor.add_index(Book, index) sql = index.create_sql(Book, editor) table = Book._meta.db_table constraints = self.get_constraints(table) if connection.features.supports_index_column_ordering: self.assertIndexOrder(table, index.name, ['DESC', 'ASC', 'ASC']) self.assertEqual(len(constraints[index.name]['columns']), 3) self.assertEqual(constraints[index.name]['columns'][2], 'pub_date') # SQL contains database functions and columns. self.assertIs(sql.references_column(table, 'author_id'), True) self.assertIs(sql.references_column(table, 'title'), True) self.assertIs(sql.references_column(table, 'pub_date'), True) self.assertIn('LOWER(%s)' % editor.quote_name('title'), str(sql)) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Book, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') @isolate_apps('schema') def test_func_index_f_decimalfield(self): class Node(Model): value = DecimalField(max_digits=5, decimal_places=2) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Node) index = Index(F('value'), name='func_f_decimalfield_idx') # Add index. with connection.schema_editor() as editor: editor.add_index(Node, index) sql = index.create_sql(Node, editor) table = Node._meta.db_table self.assertIn(index.name, self.get_constraints(table)) self.assertIs(sql.references_column(table, 'value'), True) # SQL doesn't contain casting. self.assertNotIn('CAST', str(sql)) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Node, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_cast(self): with connection.schema_editor() as editor: editor.create_model(Author) index = Index(Cast('weight', FloatField()), name='func_cast_idx') # Add index. with connection.schema_editor() as editor: editor.add_index(Author, index) sql = index.create_sql(Author, editor) table = Author._meta.db_table self.assertIn(index.name, self.get_constraints(table)) self.assertIs(sql.references_column(table, 'weight'), True) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Author, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_collate(self): collation = connection.features.test_collations.get('non_default') if not collation: self.skipTest( 'This backend does not support case-insensitive collations.' ) with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithSlug) index = Index( Collate(F('title'), collation=collation).desc(), Collate('slug', collation=collation), name='func_collate_idx', ) # Add index. with connection.schema_editor() as editor: editor.add_index(BookWithSlug, index) sql = index.create_sql(BookWithSlug, editor) table = Book._meta.db_table self.assertIn(index.name, self.get_constraints(table)) if connection.features.supports_index_column_ordering: self.assertIndexOrder(table, index.name, ['DESC', 'ASC']) # SQL contains columns and a collation. self.assertIs(sql.references_column(table, 'title'), True) self.assertIs(sql.references_column(table, 'slug'), True) self.assertIn('COLLATE %s' % editor.quote_name(collation), str(sql)) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Book, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') @skipIfDBFeature('collate_as_index_expression') def test_func_index_collate_f_ordered(self): collation = connection.features.test_collations.get('non_default') if not collation: self.skipTest( 'This backend does not support case-insensitive collations.' ) with connection.schema_editor() as editor: editor.create_model(Author) index = Index( Collate(F('name').desc(), collation=collation), name='func_collate_f_desc_idx', ) # Add index. with connection.schema_editor() as editor: editor.add_index(Author, index) sql = index.create_sql(Author, editor) table = Author._meta.db_table self.assertIn(index.name, self.get_constraints(table)) if connection.features.supports_index_column_ordering: self.assertIndexOrder(table, index.name, ['DESC']) # SQL contains columns and a collation. self.assertIs(sql.references_column(table, 'name'), True) self.assertIn('COLLATE %s' % editor.quote_name(collation), str(sql)) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Author, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_calc(self): with connection.schema_editor() as editor: editor.create_model(Author) index = Index(F('height') / (F('weight') + Value(5)), name='func_calc_idx') # Add index. with connection.schema_editor() as editor: editor.add_index(Author, index) sql = index.create_sql(Author, editor) table = Author._meta.db_table self.assertIn(index.name, self.get_constraints(table)) # SQL contains columns and expressions. self.assertIs(sql.references_column(table, 'height'), True) self.assertIs(sql.references_column(table, 'weight'), True) sql = str(sql) self.assertIs( sql.index(editor.quote_name('height')) < sql.index('/') < sql.index(editor.quote_name('weight')) < sql.index('+') < sql.index('5'), True, ) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Author, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes', 'supports_json_field') @isolate_apps('schema') def test_func_index_json_key_transform(self): class JSONModel(Model): field = JSONField() class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(JSONModel) self.isolated_local_models = [JSONModel] index = Index('field__some_key', name='func_json_key_idx') with connection.schema_editor() as editor: editor.add_index(JSONModel, index) sql = index.create_sql(JSONModel, editor) table = JSONModel._meta.db_table self.assertIn(index.name, self.get_constraints(table)) self.assertIs(sql.references_column(table, 'field'), True) with connection.schema_editor() as editor: editor.remove_index(JSONModel, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes', 'supports_json_field') @isolate_apps('schema') def test_func_index_json_key_transform_cast(self): class JSONModel(Model): field = JSONField() class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(JSONModel) self.isolated_local_models = [JSONModel] index = Index( Cast(KeyTextTransform('some_key', 'field'), IntegerField()), name='func_json_key_cast_idx', ) with connection.schema_editor() as editor: editor.add_index(JSONModel, index) sql = index.create_sql(JSONModel, editor) table = JSONModel._meta.db_table self.assertIn(index.name, self.get_constraints(table)) self.assertIs(sql.references_column(table, 'field'), True) with connection.schema_editor() as editor: editor.remove_index(JSONModel, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipIfDBFeature('supports_expression_indexes') def test_func_index_unsupported(self): # Index is ignored on databases that don't support indexes on # expressions. with connection.schema_editor() as editor: editor.create_model(Author) index = Index(F('name'), name='random_idx') with connection.schema_editor() as editor, self.assertNumQueries(0): self.assertIsNone(editor.add_index(Author, index)) self.assertIsNone(editor.remove_index(Author, index)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_nonexistent_field(self): index = Index(Lower('nonexistent'), name='func_nonexistent_idx') msg = ( "Cannot resolve keyword 'nonexistent' into field. Choices are: " "height, id, name, uuid, weight" ) with self.assertRaisesMessage(FieldError, msg): with connection.schema_editor() as editor: editor.add_index(Author, index) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_nondeterministic(self): with connection.schema_editor() as editor: editor.create_model(Author) index = Index(Random(), name='func_random_idx') with connection.schema_editor() as editor: with self.assertRaises(DatabaseError): editor.add_index(Author, index) def test_primary_key(self): """ Tests altering of the primary key """ # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) # Ensure the table is there and has the right PK self.assertEqual(self.get_primary_key(Tag._meta.db_table), 'id') # Alter to change the PK id_field = Tag._meta.get_field("id") old_field = Tag._meta.get_field("slug") new_field = SlugField(primary_key=True) new_field.set_attributes_from_name("slug") new_field.model = Tag with connection.schema_editor() as editor: editor.remove_field(Tag, id_field) editor.alter_field(Tag, old_field, new_field) # Ensure the PK changed self.assertNotIn( 'id', self.get_indexes(Tag._meta.db_table), ) self.assertEqual(self.get_primary_key(Tag._meta.db_table), 'slug') def test_context_manager_exit(self): """ Ensures transaction is correctly closed when an error occurs inside a SchemaEditor context. """ class SomeError(Exception): pass try: with connection.schema_editor(): raise SomeError except SomeError: self.assertFalse(connection.in_atomic_block) @skipIfDBFeature('can_rollback_ddl') def test_unsupported_transactional_ddl_disallowed(self): message = ( "Executing DDL statements while in a transaction on databases " "that can't perform a rollback is prohibited." ) with atomic(), connection.schema_editor() as editor: with self.assertRaisesMessage(TransactionManagementError, message): editor.execute(editor.sql_create_table % {'table': 'foo', 'definition': ''}) @skipUnlessDBFeature('supports_foreign_keys', 'indexes_foreign_keys') def test_foreign_key_index_long_names_regression(self): """ Regression test for #21497. Only affects databases that supports foreign keys. """ # Create the table with connection.schema_editor() as editor: editor.create_model(AuthorWithEvenLongerName) editor.create_model(BookWithLongName) # Find the properly shortened column name column_name = connection.ops.quote_name("author_foreign_key_with_really_long_field_name_id") column_name = column_name[1:-1].lower() # unquote, and, for Oracle, un-upcase # Ensure the table is there and has an index on the column self.assertIn( column_name, self.get_indexes(BookWithLongName._meta.db_table), ) @skipUnlessDBFeature('supports_foreign_keys') def test_add_foreign_key_long_names(self): """ Regression test for #23009. Only affects databases that supports foreign keys. """ # Create the initial tables with connection.schema_editor() as editor: editor.create_model(AuthorWithEvenLongerName) editor.create_model(BookWithLongName) # Add a second FK, this would fail due to long ref name before the fix new_field = ForeignKey(AuthorWithEvenLongerName, CASCADE, related_name="something") new_field.set_attributes_from_name("author_other_really_long_named_i_mean_so_long_fk") with connection.schema_editor() as editor: editor.add_field(BookWithLongName, new_field) @isolate_apps('schema') @skipUnlessDBFeature('supports_foreign_keys') def test_add_foreign_key_quoted_db_table(self): class Author(Model): class Meta: db_table = '"table_author_double_quoted"' app_label = 'schema' class Book(Model): author = ForeignKey(Author, CASCADE) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) if connection.vendor == 'mysql': self.assertForeignKeyExists(Book, 'author_id', '"table_author_double_quoted"') else: self.assertForeignKeyExists(Book, 'author_id', 'table_author_double_quoted') def test_add_foreign_object(self): with connection.schema_editor() as editor: editor.create_model(BookForeignObj) new_field = ForeignObject(Author, on_delete=CASCADE, from_fields=['author_id'], to_fields=['id']) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.add_field(BookForeignObj, new_field) def test_creation_deletion_reserved_names(self): """ Tries creating a model's table, and then deleting it when it has a SQL reserved name. """ # Create the table with connection.schema_editor() as editor: try: editor.create_model(Thing) except OperationalError as e: self.fail("Errors when applying initial migration for a model " "with a table named after an SQL reserved word: %s" % e) # The table is there list(Thing.objects.all()) # Clean up that table with connection.schema_editor() as editor: editor.delete_model(Thing) # The table is gone with self.assertRaises(DatabaseError): list(Thing.objects.all()) def test_remove_constraints_capital_letters(self): """ #23065 - Constraint names must be quoted if they contain capital letters. """ def get_field(*args, field_class=IntegerField, **kwargs): kwargs['db_column'] = "CamelCase" field = field_class(*args, **kwargs) field.set_attributes_from_name("CamelCase") return field model = Author field = get_field() table = model._meta.db_table column = field.column identifier_converter = connection.introspection.identifier_converter with connection.schema_editor() as editor: editor.create_model(model) editor.add_field(model, field) constraint_name = 'CamelCaseIndex' expected_constraint_name = identifier_converter(constraint_name) editor.execute( editor.sql_create_index % { "table": editor.quote_name(table), "name": editor.quote_name(constraint_name), "using": "", "columns": editor.quote_name(column), "extra": "", "condition": "", "include": "", } ) self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) editor.alter_field(model, get_field(db_index=True), field, strict=True) self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) constraint_name = 'CamelCaseUniqConstraint' expected_constraint_name = identifier_converter(constraint_name) editor.execute(editor._create_unique_sql(model, [field.column], constraint_name)) self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) editor.alter_field(model, get_field(unique=True), field, strict=True) self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) if editor.sql_create_fk: constraint_name = 'CamelCaseFKConstraint' expected_constraint_name = identifier_converter(constraint_name) editor.execute( editor.sql_create_fk % { "table": editor.quote_name(table), "name": editor.quote_name(constraint_name), "column": editor.quote_name(column), "to_table": editor.quote_name(table), "to_column": editor.quote_name(model._meta.auto_field.column), "deferrable": connection.ops.deferrable_sql(), } ) self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) editor.alter_field(model, get_field(Author, CASCADE, field_class=ForeignKey), field, strict=True) self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) def test_add_field_use_effective_default(self): """ #23987 - effective_default() should be used as the field default when adding a new field. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no surname field columns = self.column_classes(Author) self.assertNotIn("surname", columns) # Create a row Author.objects.create(name='Anonymous1') # Add new CharField to ensure default will be used from effective_default new_field = CharField(max_length=15, blank=True) new_field.set_attributes_from_name("surname") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure field was added with the right default with connection.cursor() as cursor: cursor.execute("SELECT surname FROM schema_author;") item = cursor.fetchall()[0] self.assertEqual(item[0], None if connection.features.interprets_empty_strings_as_nulls else '') def test_add_field_default_dropped(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no surname field columns = self.column_classes(Author) self.assertNotIn("surname", columns) # Create a row Author.objects.create(name='Anonymous1') # Add new CharField with a default new_field = CharField(max_length=15, blank=True, default='surname default') new_field.set_attributes_from_name("surname") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure field was added with the right default with connection.cursor() as cursor: cursor.execute("SELECT surname FROM schema_author;") item = cursor.fetchall()[0] self.assertEqual(item[0], 'surname default') # And that the default is no longer set in the database. field = next( f for f in connection.introspection.get_table_description(cursor, "schema_author") if f.name == "surname" ) if connection.features.can_introspect_default: self.assertIsNone(field.default) def test_add_field_default_nullable(self): with connection.schema_editor() as editor: editor.create_model(Author) # Add new nullable CharField with a default. new_field = CharField(max_length=15, blank=True, null=True, default='surname') new_field.set_attributes_from_name('surname') with connection.schema_editor() as editor: editor.add_field(Author, new_field) Author.objects.create(name='Anonymous1') with connection.cursor() as cursor: cursor.execute('SELECT surname FROM schema_author;') item = cursor.fetchall()[0] self.assertIsNone(item[0]) field = next( f for f in connection.introspection.get_table_description( cursor, 'schema_author', ) if f.name == 'surname' ) # Field is still nullable. self.assertTrue(field.null_ok) # The database default is no longer set. if connection.features.can_introspect_default: self.assertIn(field.default, ['NULL', None]) def test_alter_field_default_dropped(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Create a row Author.objects.create(name='Anonymous1') self.assertIsNone(Author.objects.get().height) old_field = Author._meta.get_field('height') # The default from the new field is used in updating existing rows. new_field = IntegerField(blank=True, default=42) new_field.set_attributes_from_name('height') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertEqual(Author.objects.get().height, 42) # The database default should be removed. with connection.cursor() as cursor: field = next( f for f in connection.introspection.get_table_description(cursor, "schema_author") if f.name == "height" ) if connection.features.can_introspect_default: self.assertIsNone(field.default) def test_alter_field_default_doesnt_perform_queries(self): """ No queries are performed if a field default changes and the field's not changing from null to non-null. """ with connection.schema_editor() as editor: editor.create_model(AuthorWithDefaultHeight) old_field = AuthorWithDefaultHeight._meta.get_field('height') new_default = old_field.default * 2 new_field = PositiveIntegerField(null=True, blank=True, default=new_default) new_field.set_attributes_from_name('height') with connection.schema_editor() as editor, self.assertNumQueries(0): editor.alter_field(AuthorWithDefaultHeight, old_field, new_field, strict=True) @skipUnlessDBFeature('supports_foreign_keys') def test_alter_field_fk_attributes_noop(self): """ No queries are performed when changing field attributes that don't affect the schema. """ with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) old_field = Book._meta.get_field('author') new_field = ForeignKey( Author, blank=True, editable=False, error_messages={'invalid': 'error message'}, help_text='help text', limit_choices_to={'limit': 'choice'}, on_delete=PROTECT, related_name='related_name', related_query_name='related_query_name', validators=[lambda x: x], verbose_name='verbose name', ) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor, self.assertNumQueries(0): editor.alter_field(Book, old_field, new_field, strict=True) with connection.schema_editor() as editor, self.assertNumQueries(0): editor.alter_field(Book, new_field, old_field, strict=True) def test_add_textfield_unhashable_default(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Create a row Author.objects.create(name='Anonymous1') # Create a field that has an unhashable default new_field = TextField(default={}) new_field.set_attributes_from_name("info") with connection.schema_editor() as editor: editor.add_field(Author, new_field) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_add_indexed_charfield(self): field = CharField(max_length=255, db_index=True) field.set_attributes_from_name('nom_de_plume') with connection.schema_editor() as editor: editor.create_model(Author) editor.add_field(Author, field) # Should create two indexes; one for like operator. self.assertEqual( self.get_constraints_for_column(Author, 'nom_de_plume'), ['schema_author_nom_de_plume_7570a851', 'schema_author_nom_de_plume_7570a851_like'], ) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_add_unique_charfield(self): field = CharField(max_length=255, unique=True) field.set_attributes_from_name('nom_de_plume') with connection.schema_editor() as editor: editor.create_model(Author) editor.add_field(Author, field) # Should create two indexes; one for like operator. self.assertEqual( self.get_constraints_for_column(Author, 'nom_de_plume'), ['schema_author_nom_de_plume_7570a851_like', 'schema_author_nom_de_plume_key'] ) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_add_index_to_charfield(self): # Create the table and verify no initial indexes. with connection.schema_editor() as editor: editor.create_model(Author) self.assertEqual(self.get_constraints_for_column(Author, 'name'), []) # Alter to add db_index=True and create 2 indexes. old_field = Author._meta.get_field('name') new_field = CharField(max_length=255, db_index=True) new_field.set_attributes_from_name('name') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(Author, 'name'), ['schema_author_name_1fbc5617', 'schema_author_name_1fbc5617_like'] ) # Remove db_index=True to drop both indexes. with connection.schema_editor() as editor: editor.alter_field(Author, new_field, old_field, strict=True) self.assertEqual(self.get_constraints_for_column(Author, 'name'), []) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_add_unique_to_charfield(self): # Create the table and verify no initial indexes. with connection.schema_editor() as editor: editor.create_model(Author) self.assertEqual(self.get_constraints_for_column(Author, 'name'), []) # Alter to add unique=True and create 2 indexes. old_field = Author._meta.get_field('name') new_field = CharField(max_length=255, unique=True) new_field.set_attributes_from_name('name') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(Author, 'name'), ['schema_author_name_1fbc5617_like', 'schema_author_name_1fbc5617_uniq'] ) # Remove unique=True to drop both indexes. with connection.schema_editor() as editor: editor.alter_field(Author, new_field, old_field, strict=True) self.assertEqual(self.get_constraints_for_column(Author, 'name'), []) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_add_index_to_textfield(self): # Create the table and verify no initial indexes. with connection.schema_editor() as editor: editor.create_model(Note) self.assertEqual(self.get_constraints_for_column(Note, 'info'), []) # Alter to add db_index=True and create 2 indexes. old_field = Note._meta.get_field('info') new_field = TextField(db_index=True) new_field.set_attributes_from_name('info') with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(Note, 'info'), ['schema_note_info_4b0ea695', 'schema_note_info_4b0ea695_like'] ) # Remove db_index=True to drop both indexes. with connection.schema_editor() as editor: editor.alter_field(Note, new_field, old_field, strict=True) self.assertEqual(self.get_constraints_for_column(Note, 'info'), []) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_add_unique_to_charfield_with_db_index(self): # Create the table and verify initial indexes. with connection.schema_editor() as editor: editor.create_model(BookWithoutAuthor) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like'] ) # Alter to add unique=True (should replace the index) old_field = BookWithoutAuthor._meta.get_field('title') new_field = CharField(max_length=100, db_index=True, unique=True) new_field.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq'] ) # Alter to remove unique=True (should drop unique index) new_field2 = CharField(max_length=100, db_index=True) new_field2.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like'] ) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_remove_unique_and_db_index_from_charfield(self): # Create the table and verify initial indexes. with connection.schema_editor() as editor: editor.create_model(BookWithoutAuthor) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like'] ) # Alter to add unique=True (should replace the index) old_field = BookWithoutAuthor._meta.get_field('title') new_field = CharField(max_length=100, db_index=True, unique=True) new_field.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq'] ) # Alter to remove both unique=True and db_index=True (should drop all indexes) new_field2 = CharField(max_length=100) new_field2.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True) self.assertEqual(self.get_constraints_for_column(BookWithoutAuthor, 'title'), []) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_swap_unique_and_db_index_with_charfield(self): # Create the table and verify initial indexes. with connection.schema_editor() as editor: editor.create_model(BookWithoutAuthor) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like'] ) # Alter to set unique=True and remove db_index=True (should replace the index) old_field = BookWithoutAuthor._meta.get_field('title') new_field = CharField(max_length=100, unique=True) new_field.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq'] ) # Alter to set db_index=True and remove unique=True (should restore index) new_field2 = CharField(max_length=100, db_index=True) new_field2.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like'] ) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_add_db_index_to_charfield_with_unique(self): # Create the table and verify initial indexes. with connection.schema_editor() as editor: editor.create_model(Tag) self.assertEqual( self.get_constraints_for_column(Tag, 'slug'), ['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key'] ) # Alter to add db_index=True old_field = Tag._meta.get_field('slug') new_field = SlugField(db_index=True, unique=True) new_field.set_attributes_from_name('slug') with connection.schema_editor() as editor: editor.alter_field(Tag, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(Tag, 'slug'), ['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key'] ) # Alter to remove db_index=True new_field2 = SlugField(unique=True) new_field2.set_attributes_from_name('slug') with connection.schema_editor() as editor: editor.alter_field(Tag, new_field, new_field2, strict=True) self.assertEqual( self.get_constraints_for_column(Tag, 'slug'), ['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key'] ) def test_alter_field_add_index_to_integerfield(self): # Create the table and verify no initial indexes. with connection.schema_editor() as editor: editor.create_model(Author) self.assertEqual(self.get_constraints_for_column(Author, 'weight'), []) # Alter to add db_index=True and create index. old_field = Author._meta.get_field('weight') new_field = IntegerField(null=True, db_index=True) new_field.set_attributes_from_name('weight') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertEqual(self.get_constraints_for_column(Author, 'weight'), ['schema_author_weight_587740f9']) # Remove db_index=True to drop index. with connection.schema_editor() as editor: editor.alter_field(Author, new_field, old_field, strict=True) self.assertEqual(self.get_constraints_for_column(Author, 'weight'), []) def test_alter_pk_with_self_referential_field(self): """ Changing the primary key field name of a model with a self-referential foreign key (#26384). """ with connection.schema_editor() as editor: editor.create_model(Node) old_field = Node._meta.get_field('node_id') new_field = AutoField(primary_key=True) new_field.set_attributes_from_name('id') with connection.schema_editor() as editor: editor.alter_field(Node, old_field, new_field, strict=True) self.assertForeignKeyExists(Node, 'parent_id', Node._meta.db_table) @mock.patch('django.db.backends.base.schema.datetime') @mock.patch('django.db.backends.base.schema.timezone') def test_add_datefield_and_datetimefield_use_effective_default(self, mocked_datetime, mocked_tz): """ effective_default() should be used for DateField, DateTimeField, and TimeField if auto_now or auto_now_add is set (#25005). """ now = datetime.datetime(month=1, day=1, year=2000, hour=1, minute=1) now_tz = datetime.datetime(month=1, day=1, year=2000, hour=1, minute=1, tzinfo=timezone.utc) mocked_datetime.now = mock.MagicMock(return_value=now) mocked_tz.now = mock.MagicMock(return_value=now_tz) # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Check auto_now/auto_now_add attributes are not defined columns = self.column_classes(Author) self.assertNotIn("dob_auto_now", columns) self.assertNotIn("dob_auto_now_add", columns) self.assertNotIn("dtob_auto_now", columns) self.assertNotIn("dtob_auto_now_add", columns) self.assertNotIn("tob_auto_now", columns) self.assertNotIn("tob_auto_now_add", columns) # Create a row Author.objects.create(name='Anonymous1') # Ensure fields were added with the correct defaults dob_auto_now = DateField(auto_now=True) dob_auto_now.set_attributes_from_name('dob_auto_now') self.check_added_field_default( editor, Author, dob_auto_now, 'dob_auto_now', now.date(), cast_function=lambda x: x.date(), ) dob_auto_now_add = DateField(auto_now_add=True) dob_auto_now_add.set_attributes_from_name('dob_auto_now_add') self.check_added_field_default( editor, Author, dob_auto_now_add, 'dob_auto_now_add', now.date(), cast_function=lambda x: x.date(), ) dtob_auto_now = DateTimeField(auto_now=True) dtob_auto_now.set_attributes_from_name('dtob_auto_now') self.check_added_field_default( editor, Author, dtob_auto_now, 'dtob_auto_now', now, ) dt_tm_of_birth_auto_now_add = DateTimeField(auto_now_add=True) dt_tm_of_birth_auto_now_add.set_attributes_from_name('dtob_auto_now_add') self.check_added_field_default( editor, Author, dt_tm_of_birth_auto_now_add, 'dtob_auto_now_add', now, ) tob_auto_now = TimeField(auto_now=True) tob_auto_now.set_attributes_from_name('tob_auto_now') self.check_added_field_default( editor, Author, tob_auto_now, 'tob_auto_now', now.time(), cast_function=lambda x: x.time(), ) tob_auto_now_add = TimeField(auto_now_add=True) tob_auto_now_add.set_attributes_from_name('tob_auto_now_add') self.check_added_field_default( editor, Author, tob_auto_now_add, 'tob_auto_now_add', now.time(), cast_function=lambda x: x.time(), ) def test_namespaced_db_table_create_index_name(self): """ Table names are stripped of their namespace/schema before being used to generate index names. """ with connection.schema_editor() as editor: max_name_length = connection.ops.max_name_length() or 200 namespace = 'n' * max_name_length table_name = 't' * max_name_length namespaced_table_name = '"%s"."%s"' % (namespace, table_name) self.assertEqual( editor._create_index_name(table_name, []), editor._create_index_name(namespaced_table_name, []), ) @unittest.skipUnless(connection.vendor == 'oracle', 'Oracle specific db_table syntax') def test_creation_with_db_table_double_quotes(self): oracle_user = connection.creation._test_database_user() class Student(Model): name = CharField(max_length=30) class Meta: app_label = 'schema' apps = new_apps db_table = '"%s"."DJANGO_STUDENT_TABLE"' % oracle_user class Document(Model): name = CharField(max_length=30) students = ManyToManyField(Student) class Meta: app_label = 'schema' apps = new_apps db_table = '"%s"."DJANGO_DOCUMENT_TABLE"' % oracle_user self.local_models = [Student, Document] with connection.schema_editor() as editor: editor.create_model(Student) editor.create_model(Document) doc = Document.objects.create(name='Test Name') student = Student.objects.create(name='Some man') doc.students.add(student) @isolate_apps('schema') @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific db_table syntax.') def test_namespaced_db_table_foreign_key_reference(self): with connection.cursor() as cursor: cursor.execute('CREATE SCHEMA django_schema_tests') def delete_schema(): with connection.cursor() as cursor: cursor.execute('DROP SCHEMA django_schema_tests CASCADE') self.addCleanup(delete_schema) class Author(Model): class Meta: app_label = 'schema' class Book(Model): class Meta: app_label = 'schema' db_table = '"django_schema_tests"."schema_book"' author = ForeignKey(Author, CASCADE) author.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) editor.add_field(Book, author) def test_rename_table_renames_deferred_sql_references(self): atomic_rename = connection.features.supports_atomic_references_rename with connection.schema_editor(atomic=atomic_rename) as editor: editor.create_model(Author) editor.create_model(Book) editor.alter_db_table(Author, 'schema_author', 'schema_renamed_author') editor.alter_db_table(Author, 'schema_book', 'schema_renamed_book') try: self.assertGreater(len(editor.deferred_sql), 0) for statement in editor.deferred_sql: self.assertIs(statement.references_table('schema_author'), False) self.assertIs(statement.references_table('schema_book'), False) finally: editor.alter_db_table(Author, 'schema_renamed_author', 'schema_author') editor.alter_db_table(Author, 'schema_renamed_book', 'schema_book') def test_rename_column_renames_deferred_sql_references(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) old_title = Book._meta.get_field('title') new_title = CharField(max_length=100, db_index=True) new_title.set_attributes_from_name('renamed_title') editor.alter_field(Book, old_title, new_title) old_author = Book._meta.get_field('author') new_author = ForeignKey(Author, CASCADE) new_author.set_attributes_from_name('renamed_author') editor.alter_field(Book, old_author, new_author) self.assertGreater(len(editor.deferred_sql), 0) for statement in editor.deferred_sql: self.assertIs(statement.references_column('book', 'title'), False) self.assertIs(statement.references_column('book', 'author_id'), False) @isolate_apps('schema') def test_referenced_field_without_constraint_rename_inside_atomic_block(self): """ Foreign keys without database level constraint don't prevent the field they reference from being renamed in an atomic block. """ class Foo(Model): field = CharField(max_length=255, unique=True) class Meta: app_label = 'schema' class Bar(Model): foo = ForeignKey(Foo, CASCADE, to_field='field', db_constraint=False) class Meta: app_label = 'schema' self.isolated_local_models = [Foo, Bar] with connection.schema_editor() as editor: editor.create_model(Foo) editor.create_model(Bar) new_field = CharField(max_length=255, unique=True) new_field.set_attributes_from_name('renamed') with connection.schema_editor(atomic=True) as editor: editor.alter_field(Foo, Foo._meta.get_field('field'), new_field) @isolate_apps('schema') def test_referenced_table_without_constraint_rename_inside_atomic_block(self): """ Foreign keys without database level constraint don't prevent the table they reference from being renamed in an atomic block. """ class Foo(Model): field = CharField(max_length=255, unique=True) class Meta: app_label = 'schema' class Bar(Model): foo = ForeignKey(Foo, CASCADE, to_field='field', db_constraint=False) class Meta: app_label = 'schema' self.isolated_local_models = [Foo, Bar] with connection.schema_editor() as editor: editor.create_model(Foo) editor.create_model(Bar) new_field = CharField(max_length=255, unique=True) new_field.set_attributes_from_name('renamed') with connection.schema_editor(atomic=True) as editor: editor.alter_db_table(Foo, Foo._meta.db_table, 'renamed_table') Foo._meta.db_table = 'renamed_table' @isolate_apps('schema') @skipUnlessDBFeature('supports_collation_on_charfield') def test_db_collation_charfield(self): collation = connection.features.test_collations.get('non_default') if not collation: self.skipTest('Language collations are not supported.') class Foo(Model): field = CharField(max_length=255, db_collation=collation) class Meta: app_label = 'schema' self.isolated_local_models = [Foo] with connection.schema_editor() as editor: editor.create_model(Foo) self.assertEqual( self.get_column_collation(Foo._meta.db_table, 'field'), collation, ) @isolate_apps('schema') @skipUnlessDBFeature('supports_collation_on_textfield') def test_db_collation_textfield(self): collation = connection.features.test_collations.get('non_default') if not collation: self.skipTest('Language collations are not supported.') class Foo(Model): field = TextField(db_collation=collation) class Meta: app_label = 'schema' self.isolated_local_models = [Foo] with connection.schema_editor() as editor: editor.create_model(Foo) self.assertEqual( self.get_column_collation(Foo._meta.db_table, 'field'), collation, ) @skipUnlessDBFeature('supports_collation_on_charfield') def test_add_field_db_collation(self): collation = connection.features.test_collations.get('non_default') if not collation: self.skipTest('Language collations are not supported.') with connection.schema_editor() as editor: editor.create_model(Author) new_field = CharField(max_length=255, db_collation=collation) new_field.set_attributes_from_name('alias') with connection.schema_editor() as editor: editor.add_field(Author, new_field) columns = self.column_classes(Author) self.assertEqual( columns['alias'][0], connection.features.introspected_field_types['CharField'], ) self.assertEqual(columns['alias'][1][8], collation) @skipUnlessDBFeature('supports_collation_on_charfield') def test_alter_field_db_collation(self): collation = connection.features.test_collations.get('non_default') if not collation: self.skipTest('Language collations are not supported.') with connection.schema_editor() as editor: editor.create_model(Author) old_field = Author._meta.get_field('name') new_field = CharField(max_length=255, db_collation=collation) new_field.set_attributes_from_name('name') new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertEqual( self.get_column_collation(Author._meta.db_table, 'name'), collation, ) with connection.schema_editor() as editor: editor.alter_field(Author, new_field, old_field, strict=True) self.assertIsNone(self.get_column_collation(Author._meta.db_table, 'name')) @skipUnlessDBFeature('supports_collation_on_charfield') def test_alter_field_type_and_db_collation(self): collation = connection.features.test_collations.get('non_default') if not collation: self.skipTest('Language collations are not supported.') with connection.schema_editor() as editor: editor.create_model(Note) old_field = Note._meta.get_field('info') new_field = CharField(max_length=255, db_collation=collation) new_field.set_attributes_from_name('info') new_field.model = Note with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) columns = self.column_classes(Note) self.assertEqual( columns['info'][0], connection.features.introspected_field_types['CharField'], ) self.assertEqual(columns['info'][1][8], collation) with connection.schema_editor() as editor: editor.alter_field(Note, new_field, old_field, strict=True) columns = self.column_classes(Note) self.assertEqual(columns['info'][0], 'TextField') self.assertIsNone(columns['info'][1][8]) @skipUnlessDBFeature( 'supports_collation_on_charfield', 'supports_non_deterministic_collations', ) def test_ci_cs_db_collation(self): cs_collation = connection.features.test_collations.get('cs') ci_collation = connection.features.test_collations.get('ci') try: if connection.vendor == 'mysql': cs_collation = 'latin1_general_cs' elif connection.vendor == 'postgresql': cs_collation = 'en-x-icu' with connection.cursor() as cursor: cursor.execute( "CREATE COLLATION IF NOT EXISTS case_insensitive " "(provider = icu, locale = 'und-u-ks-level2', " "deterministic = false)" ) ci_collation = 'case_insensitive' # Create the table. with connection.schema_editor() as editor: editor.create_model(Author) # Case-insensitive collation. old_field = Author._meta.get_field('name') new_field_ci = CharField(max_length=255, db_collation=ci_collation) new_field_ci.set_attributes_from_name('name') new_field_ci.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field_ci, strict=True) Author.objects.create(name='ANDREW') self.assertIs(Author.objects.filter(name='Andrew').exists(), True) # Case-sensitive collation. new_field_cs = CharField(max_length=255, db_collation=cs_collation) new_field_cs.set_attributes_from_name('name') new_field_cs.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, new_field_ci, new_field_cs, strict=True) self.assertIs(Author.objects.filter(name='Andrew').exists(), False) finally: if connection.vendor == 'postgresql': with connection.cursor() as cursor: cursor.execute('DROP COLLATION IF EXISTS case_insensitive')
29aefe3bbbcd90b9a20b6c105768e4703f1a0387b6d1bbe1f3a1c2d986e9a268
import datetime import pickle import unittest import uuid from collections import namedtuple from copy import deepcopy from decimal import Decimal from unittest import mock from django.core.exceptions import FieldError from django.db import DatabaseError, NotSupportedError, connection from django.db.models import ( AutoField, Avg, BinaryField, BooleanField, Case, CharField, Count, DateField, DateTimeField, DecimalField, DurationField, Exists, Expression, ExpressionList, ExpressionWrapper, F, FloatField, Func, IntegerField, Max, Min, Model, OrderBy, OuterRef, Q, StdDev, Subquery, Sum, TimeField, UUIDField, Value, Variance, When, ) from django.db.models.expressions import ( Col, Combinable, CombinedExpression, RawSQL, Ref, ) from django.db.models.functions import ( Coalesce, Concat, Left, Length, Lower, Substr, Upper, ) from django.db.models.sql import constants from django.db.models.sql.datastructures import Join from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature from django.test.utils import ( Approximate, CaptureQueriesContext, isolate_apps, register_lookup, ) from django.utils.functional import SimpleLazyObject from .models import ( UUID, UUIDPK, Company, Employee, Experiment, Manager, Number, RemoteEmployee, Result, SimulationRun, Time, ) class BasicExpressionsTests(TestCase): @classmethod def setUpTestData(cls): cls.example_inc = Company.objects.create( name="Example Inc.", num_employees=2300, num_chairs=5, ceo=Employee.objects.create(firstname="Joe", lastname="Smith", salary=10) ) cls.foobar_ltd = Company.objects.create( name="Foobar Ltd.", num_employees=3, num_chairs=4, based_in_eu=True, ceo=Employee.objects.create(firstname="Frank", lastname="Meyer", salary=20) ) cls.max = Employee.objects.create(firstname='Max', lastname='Mustermann', salary=30) cls.gmbh = Company.objects.create(name='Test GmbH', num_employees=32, num_chairs=1, ceo=cls.max) def setUp(self): self.company_query = Company.objects.values( "name", "num_employees", "num_chairs" ).order_by( "name", "num_employees", "num_chairs" ) def test_annotate_values_aggregate(self): companies = Company.objects.annotate( salaries=F('ceo__salary'), ).values('num_employees', 'salaries').aggregate( result=Sum( F('salaries') + F('num_employees'), output_field=IntegerField() ), ) self.assertEqual(companies['result'], 2395) def test_annotate_values_filter(self): companies = Company.objects.annotate( foo=RawSQL('%s', ['value']), ).filter(foo='value').order_by('name') self.assertSequenceEqual( companies, [self.example_inc, self.foobar_ltd, self.gmbh], ) def test_annotate_values_count(self): companies = Company.objects.annotate(foo=RawSQL('%s', ['value'])) self.assertEqual(companies.count(), 3) @skipUnlessDBFeature('supports_boolean_expr_in_select_clause') def test_filtering_on_annotate_that_uses_q(self): self.assertEqual( Company.objects.annotate( num_employees_check=ExpressionWrapper(Q(num_employees__gt=3), output_field=BooleanField()) ).filter(num_employees_check=True).count(), 2, ) def test_filtering_on_q_that_is_boolean(self): self.assertEqual( Company.objects.filter( ExpressionWrapper(Q(num_employees__gt=3), output_field=BooleanField()) ).count(), 2, ) def test_filtering_on_rawsql_that_is_boolean(self): self.assertEqual( Company.objects.filter( RawSQL('num_employees > %s', (3,), output_field=BooleanField()), ).count(), 2, ) def test_filter_inter_attribute(self): # We can filter on attribute relationships on same model obj, e.g. # find companies where the number of employees is greater # than the number of chairs. self.assertSequenceEqual( self.company_query.filter(num_employees__gt=F("num_chairs")), [ { "num_chairs": 5, "name": "Example Inc.", "num_employees": 2300, }, { "num_chairs": 1, "name": "Test GmbH", "num_employees": 32 }, ], ) def test_update(self): # We can set one field to have the value of another field # Make sure we have enough chairs self.company_query.update(num_chairs=F("num_employees")) self.assertSequenceEqual( self.company_query, [ { "num_chairs": 2300, "name": "Example Inc.", "num_employees": 2300 }, { "num_chairs": 3, "name": "Foobar Ltd.", "num_employees": 3 }, { "num_chairs": 32, "name": "Test GmbH", "num_employees": 32 } ], ) def test_arithmetic(self): # We can perform arithmetic operations in expressions # Make sure we have 2 spare chairs self.company_query.update(num_chairs=F("num_employees") + 2) self.assertSequenceEqual( self.company_query, [ { 'num_chairs': 2302, 'name': 'Example Inc.', 'num_employees': 2300 }, { 'num_chairs': 5, 'name': 'Foobar Ltd.', 'num_employees': 3 }, { 'num_chairs': 34, 'name': 'Test GmbH', 'num_employees': 32 } ], ) def test_order_of_operations(self): # Law of order of operations is followed self.company_query.update(num_chairs=F('num_employees') + 2 * F('num_employees')) self.assertSequenceEqual( self.company_query, [ { 'num_chairs': 6900, 'name': 'Example Inc.', 'num_employees': 2300 }, { 'num_chairs': 9, 'name': 'Foobar Ltd.', 'num_employees': 3 }, { 'num_chairs': 96, 'name': 'Test GmbH', 'num_employees': 32 } ], ) def test_parenthesis_priority(self): # Law of order of operations can be overridden by parentheses self.company_query.update(num_chairs=(F('num_employees') + 2) * F('num_employees')) self.assertSequenceEqual( self.company_query, [ { 'num_chairs': 5294600, 'name': 'Example Inc.', 'num_employees': 2300 }, { 'num_chairs': 15, 'name': 'Foobar Ltd.', 'num_employees': 3 }, { 'num_chairs': 1088, 'name': 'Test GmbH', 'num_employees': 32 } ], ) def test_update_with_fk(self): # ForeignKey can become updated with the value of another ForeignKey. self.assertEqual(Company.objects.update(point_of_contact=F('ceo')), 3) self.assertQuerysetEqual( Company.objects.all(), ['Joe Smith', 'Frank Meyer', 'Max Mustermann'], lambda c: str(c.point_of_contact), ordered=False ) def test_update_with_none(self): Number.objects.create(integer=1, float=1.0) Number.objects.create(integer=2) Number.objects.filter(float__isnull=False).update(float=Value(None)) self.assertQuerysetEqual( Number.objects.all(), [None, None], lambda n: n.float, ordered=False ) def test_filter_with_join(self): # F Expressions can also span joins Company.objects.update(point_of_contact=F('ceo')) c = Company.objects.first() c.point_of_contact = Employee.objects.create(firstname="Guido", lastname="van Rossum") c.save() self.assertQuerysetEqual( Company.objects.filter(ceo__firstname=F('point_of_contact__firstname')), ['Foobar Ltd.', 'Test GmbH'], lambda c: c.name, ordered=False ) Company.objects.exclude( ceo__firstname=F("point_of_contact__firstname") ).update(name="foo") self.assertEqual( Company.objects.exclude( ceo__firstname=F('point_of_contact__firstname') ).get().name, "foo", ) msg = "Joined field references are not permitted in this query" with self.assertRaisesMessage(FieldError, msg): Company.objects.exclude( ceo__firstname=F('point_of_contact__firstname') ).update(name=F('point_of_contact__lastname')) def test_object_update(self): # F expressions can be used to update attributes on single objects self.gmbh.num_employees = F('num_employees') + 4 self.gmbh.save() self.gmbh.refresh_from_db() self.assertEqual(self.gmbh.num_employees, 36) def test_new_object_save(self): # We should be able to use Funcs when inserting new data test_co = Company(name=Lower(Value('UPPER')), num_employees=32, num_chairs=1, ceo=self.max) test_co.save() test_co.refresh_from_db() self.assertEqual(test_co.name, "upper") def test_new_object_create(self): test_co = Company.objects.create(name=Lower(Value('UPPER')), num_employees=32, num_chairs=1, ceo=self.max) test_co.refresh_from_db() self.assertEqual(test_co.name, "upper") def test_object_create_with_aggregate(self): # Aggregates are not allowed when inserting new data msg = 'Aggregate functions are not allowed in this query (num_employees=Max(Value(1))).' with self.assertRaisesMessage(FieldError, msg): Company.objects.create( name='Company', num_employees=Max(Value(1)), num_chairs=1, ceo=Employee.objects.create(firstname="Just", lastname="Doit", salary=30), ) def test_object_update_fk(self): # F expressions cannot be used to update attributes which are foreign # keys, or attributes which involve joins. test_gmbh = Company.objects.get(pk=self.gmbh.pk) msg = 'F(ceo)": "Company.point_of_contact" must be a "Employee" instance.' with self.assertRaisesMessage(ValueError, msg): test_gmbh.point_of_contact = F('ceo') test_gmbh.point_of_contact = self.gmbh.ceo test_gmbh.save() test_gmbh.name = F('ceo__lastname') msg = 'Joined field references are not permitted in this query' with self.assertRaisesMessage(FieldError, msg): test_gmbh.save() def test_update_inherited_field_value(self): msg = 'Joined field references are not permitted in this query' with self.assertRaisesMessage(FieldError, msg): RemoteEmployee.objects.update(adjusted_salary=F('salary') * 5) def test_object_update_unsaved_objects(self): # F expressions cannot be used to update attributes on objects which do # not yet exist in the database acme = Company(name='The Acme Widget Co.', num_employees=12, num_chairs=5, ceo=self.max) acme.num_employees = F("num_employees") + 16 msg = ( 'Failed to insert expression "Col(expressions_company, ' 'expressions.Company.num_employees) + Value(16)" on ' 'expressions.Company.num_employees. F() expressions can only be ' 'used to update, not to insert.' ) with self.assertRaisesMessage(ValueError, msg): acme.save() acme.num_employees = 12 acme.name = Lower(F('name')) msg = ( 'Failed to insert expression "Lower(Col(expressions_company, ' 'expressions.Company.name))" on expressions.Company.name. F() ' 'expressions can only be used to update, not to insert.' ) with self.assertRaisesMessage(ValueError, msg): acme.save() def test_ticket_11722_iexact_lookup(self): Employee.objects.create(firstname="John", lastname="Doe") test = Employee.objects.create(firstname="Test", lastname="test") queryset = Employee.objects.filter(firstname__iexact=F('lastname')) self.assertSequenceEqual(queryset, [test]) def test_ticket_16731_startswith_lookup(self): Employee.objects.create(firstname="John", lastname="Doe") e2 = Employee.objects.create(firstname="Jack", lastname="Jackson") e3 = Employee.objects.create(firstname="Jack", lastname="jackson") self.assertSequenceEqual( Employee.objects.filter(lastname__startswith=F('firstname')), [e2, e3] if connection.features.has_case_insensitive_like else [e2] ) qs = Employee.objects.filter(lastname__istartswith=F('firstname')).order_by('pk') self.assertSequenceEqual(qs, [e2, e3]) def test_ticket_18375_join_reuse(self): # Reverse multijoin F() references and the lookup target the same join. # Pre #18375 the F() join was generated first and the lookup couldn't # reuse that join. qs = Employee.objects.filter(company_ceo_set__num_chairs=F('company_ceo_set__num_employees')) self.assertEqual(str(qs.query).count('JOIN'), 1) def test_ticket_18375_kwarg_ordering(self): # The next query was dict-randomization dependent - if the "gte=1" # was seen first, then the F() will reuse the join generated by the # gte lookup, if F() was seen first, then it generated a join the # other lookups could not reuse. qs = Employee.objects.filter( company_ceo_set__num_chairs=F('company_ceo_set__num_employees'), company_ceo_set__num_chairs__gte=1, ) self.assertEqual(str(qs.query).count('JOIN'), 1) def test_ticket_18375_kwarg_ordering_2(self): # Another similar case for F() than above. Now we have the same join # in two filter kwargs, one in the lhs lookup, one in F. Here pre # #18375 the amount of joins generated was random if dict # randomization was enabled, that is the generated query dependent # on which clause was seen first. qs = Employee.objects.filter( company_ceo_set__num_employees=F('pk'), pk=F('company_ceo_set__num_employees') ) self.assertEqual(str(qs.query).count('JOIN'), 1) def test_ticket_18375_chained_filters(self): # F() expressions do not reuse joins from previous filter. qs = Employee.objects.filter( company_ceo_set__num_employees=F('pk') ).filter( company_ceo_set__num_employees=F('company_ceo_set__num_employees') ) self.assertEqual(str(qs.query).count('JOIN'), 2) def test_order_by_exists(self): mary = Employee.objects.create(firstname='Mary', lastname='Mustermann', salary=20) mustermanns_by_seniority = Employee.objects.filter(lastname='Mustermann').order_by( # Order by whether the employee is the CEO of a company Exists(Company.objects.filter(ceo=OuterRef('pk'))).desc() ) self.assertSequenceEqual(mustermanns_by_seniority, [self.max, mary]) def test_order_by_multiline_sql(self): raw_order_by = ( RawSQL(''' CASE WHEN num_employees > 1000 THEN num_chairs ELSE 0 END ''', []).desc(), RawSQL(''' CASE WHEN num_chairs > 1 THEN 1 ELSE 0 END ''', []).asc() ) for qs in ( Company.objects.all(), Company.objects.distinct(), ): with self.subTest(qs=qs): self.assertSequenceEqual( qs.order_by(*raw_order_by), [self.example_inc, self.gmbh, self.foobar_ltd], ) def test_outerref(self): inner = Company.objects.filter(point_of_contact=OuterRef('pk')) msg = ( 'This queryset contains a reference to an outer query and may only ' 'be used in a subquery.' ) with self.assertRaisesMessage(ValueError, msg): inner.exists() outer = Employee.objects.annotate(is_point_of_contact=Exists(inner)) self.assertIs(outer.exists(), True) def test_exist_single_field_output_field(self): queryset = Company.objects.values('pk') self.assertIsInstance(Exists(queryset).output_field, BooleanField) def test_subquery(self): Company.objects.filter(name='Example Inc.').update( point_of_contact=Employee.objects.get(firstname='Joe', lastname='Smith'), ceo=self.max, ) Employee.objects.create(firstname='Bob', lastname='Brown', salary=40) qs = Employee.objects.annotate( is_point_of_contact=Exists(Company.objects.filter(point_of_contact=OuterRef('pk'))), is_not_point_of_contact=~Exists(Company.objects.filter(point_of_contact=OuterRef('pk'))), is_ceo_of_small_company=Exists(Company.objects.filter(num_employees__lt=200, ceo=OuterRef('pk'))), is_ceo_small_2=~~Exists(Company.objects.filter(num_employees__lt=200, ceo=OuterRef('pk'))), largest_company=Subquery(Company.objects.order_by('-num_employees').filter( Q(ceo=OuterRef('pk')) | Q(point_of_contact=OuterRef('pk')) ).values('name')[:1], output_field=CharField()) ).values( 'firstname', 'is_point_of_contact', 'is_not_point_of_contact', 'is_ceo_of_small_company', 'is_ceo_small_2', 'largest_company', ).order_by('firstname') results = list(qs) # Could use Coalesce(subq, Value('')) instead except for the bug in # cx_Oracle mentioned in #23843. bob = results[0] if bob['largest_company'] == '' and connection.features.interprets_empty_strings_as_nulls: bob['largest_company'] = None self.assertEqual(results, [ { 'firstname': 'Bob', 'is_point_of_contact': False, 'is_not_point_of_contact': True, 'is_ceo_of_small_company': False, 'is_ceo_small_2': False, 'largest_company': None, }, { 'firstname': 'Frank', 'is_point_of_contact': False, 'is_not_point_of_contact': True, 'is_ceo_of_small_company': True, 'is_ceo_small_2': True, 'largest_company': 'Foobar Ltd.', }, { 'firstname': 'Joe', 'is_point_of_contact': True, 'is_not_point_of_contact': False, 'is_ceo_of_small_company': False, 'is_ceo_small_2': False, 'largest_company': 'Example Inc.', }, { 'firstname': 'Max', 'is_point_of_contact': False, 'is_not_point_of_contact': True, 'is_ceo_of_small_company': True, 'is_ceo_small_2': True, 'largest_company': 'Example Inc.' } ]) # A less elegant way to write the same query: this uses a LEFT OUTER # JOIN and an IS NULL, inside a WHERE NOT IN which is probably less # efficient than EXISTS. self.assertCountEqual( qs.filter(is_point_of_contact=True).values('pk'), Employee.objects.exclude(company_point_of_contact_set=None).values('pk') ) def test_subquery_eq(self): qs = Employee.objects.annotate( is_ceo=Exists(Company.objects.filter(ceo=OuterRef('pk'))), is_point_of_contact=Exists( Company.objects.filter(point_of_contact=OuterRef('pk')), ), small_company=Exists( queryset=Company.objects.filter(num_employees__lt=200), ), ).filter(is_ceo=True, is_point_of_contact=False, small_company=True) self.assertNotEqual( qs.query.annotations['is_ceo'], qs.query.annotations['is_point_of_contact'], ) self.assertNotEqual( qs.query.annotations['is_ceo'], qs.query.annotations['small_company'], ) def test_in_subquery(self): # This is a contrived test (and you really wouldn't write this query), # but it is a succinct way to test the __in=Subquery() construct. small_companies = Company.objects.filter(num_employees__lt=200).values('pk') subquery_test = Company.objects.filter(pk__in=Subquery(small_companies)) self.assertCountEqual(subquery_test, [self.foobar_ltd, self.gmbh]) subquery_test2 = Company.objects.filter(pk=Subquery(small_companies.filter(num_employees=3))) self.assertCountEqual(subquery_test2, [self.foobar_ltd]) def test_uuid_pk_subquery(self): u = UUIDPK.objects.create() UUID.objects.create(uuid_fk=u) qs = UUIDPK.objects.filter(id__in=Subquery(UUID.objects.values('uuid_fk__id'))) self.assertCountEqual(qs, [u]) def test_nested_subquery(self): inner = Company.objects.filter(point_of_contact=OuterRef('pk')) outer = Employee.objects.annotate(is_point_of_contact=Exists(inner)) contrived = Employee.objects.annotate( is_point_of_contact=Subquery( outer.filter(pk=OuterRef('pk')).values('is_point_of_contact'), output_field=BooleanField(), ), ) self.assertCountEqual(contrived.values_list(), outer.values_list()) def test_nested_subquery_join_outer_ref(self): inner = Employee.objects.filter(pk=OuterRef('ceo__pk')).values('pk') qs = Employee.objects.annotate( ceo_company=Subquery( Company.objects.filter( ceo__in=inner, ceo__pk=OuterRef('pk'), ).values('pk'), ), ) self.assertSequenceEqual( qs.values_list('ceo_company', flat=True), [self.example_inc.pk, self.foobar_ltd.pk, self.gmbh.pk], ) def test_nested_subquery_outer_ref_2(self): first = Time.objects.create(time='09:00') second = Time.objects.create(time='17:00') third = Time.objects.create(time='21:00') SimulationRun.objects.bulk_create([ SimulationRun(start=first, end=second, midpoint='12:00'), SimulationRun(start=first, end=third, midpoint='15:00'), SimulationRun(start=second, end=first, midpoint='00:00'), ]) inner = Time.objects.filter(time=OuterRef(OuterRef('time')), pk=OuterRef('start')).values('time') middle = SimulationRun.objects.annotate(other=Subquery(inner)).values('other')[:1] outer = Time.objects.annotate(other=Subquery(middle, output_field=TimeField())) # This is a contrived example. It exercises the double OuterRef form. self.assertCountEqual(outer, [first, second, third]) def test_nested_subquery_outer_ref_with_autofield(self): first = Time.objects.create(time='09:00') second = Time.objects.create(time='17:00') SimulationRun.objects.create(start=first, end=second, midpoint='12:00') inner = SimulationRun.objects.filter(start=OuterRef(OuterRef('pk'))).values('start') middle = Time.objects.annotate(other=Subquery(inner)).values('other')[:1] outer = Time.objects.annotate(other=Subquery(middle, output_field=IntegerField())) # This exercises the double OuterRef form with AutoField as pk. self.assertCountEqual(outer, [first, second]) def test_annotations_within_subquery(self): Company.objects.filter(num_employees__lt=50).update(ceo=Employee.objects.get(firstname='Frank')) inner = Company.objects.filter( ceo=OuterRef('pk') ).values('ceo').annotate(total_employees=Sum('num_employees')).values('total_employees') outer = Employee.objects.annotate(total_employees=Subquery(inner)).filter(salary__lte=Subquery(inner)) self.assertSequenceEqual( outer.order_by('-total_employees').values('salary', 'total_employees'), [{'salary': 10, 'total_employees': 2300}, {'salary': 20, 'total_employees': 35}], ) def test_subquery_references_joined_table_twice(self): inner = Company.objects.filter( num_chairs__gte=OuterRef('ceo__salary'), num_employees__gte=OuterRef('point_of_contact__salary'), ) # Another contrived example (there is no need to have a subquery here) outer = Company.objects.filter(pk__in=Subquery(inner.values('pk'))) self.assertFalse(outer.exists()) def test_subquery_filter_by_aggregate(self): Number.objects.create(integer=1000, float=1.2) Employee.objects.create(salary=1000) qs = Number.objects.annotate( min_valuable_count=Subquery( Employee.objects.filter( salary=OuterRef('integer'), ).annotate(cnt=Count('salary')).filter(cnt__gt=0).values('cnt')[:1] ), ) self.assertEqual(qs.get().float, 1.2) def test_subquery_filter_by_lazy(self): self.max.manager = Manager.objects.create(name='Manager') self.max.save() max_manager = SimpleLazyObject( lambda: Manager.objects.get(pk=self.max.manager.pk) ) qs = Company.objects.annotate( ceo_manager=Subquery( Employee.objects.filter( lastname=OuterRef('ceo__lastname'), ).values('manager'), ), ).filter(ceo_manager=max_manager) self.assertEqual(qs.get(), self.gmbh) def test_aggregate_subquery_annotation(self): with self.assertNumQueries(1) as ctx: aggregate = Company.objects.annotate( ceo_salary=Subquery( Employee.objects.filter( id=OuterRef('ceo_id'), ).values('salary') ), ).aggregate( ceo_salary_gt_20=Count('pk', filter=Q(ceo_salary__gt=20)), ) self.assertEqual(aggregate, {'ceo_salary_gt_20': 1}) # Aggregation over a subquery annotation doesn't annotate the subquery # twice in the inner query. sql = ctx.captured_queries[0]['sql'] self.assertLessEqual(sql.count('SELECT'), 3) # GROUP BY isn't required to aggregate over a query that doesn't # contain nested aggregates. self.assertNotIn('GROUP BY', sql) @skipUnlessDBFeature('supports_over_clause') def test_aggregate_rawsql_annotation(self): with self.assertNumQueries(1) as ctx: aggregate = Company.objects.annotate( salary=RawSQL('SUM(num_chairs) OVER (ORDER BY num_employees)', []), ).aggregate( count=Count('pk'), ) self.assertEqual(aggregate, {'count': 3}) sql = ctx.captured_queries[0]['sql'] self.assertNotIn('GROUP BY', sql) def test_explicit_output_field(self): class FuncA(Func): output_field = CharField() class FuncB(Func): pass expr = FuncB(FuncA()) self.assertEqual(expr.output_field, FuncA.output_field) def test_outerref_mixed_case_table_name(self): inner = Result.objects.filter(result_time__gte=OuterRef('experiment__assigned')) outer = Result.objects.filter(pk__in=Subquery(inner.values('pk'))) self.assertFalse(outer.exists()) def test_outerref_with_operator(self): inner = Company.objects.filter(num_employees=OuterRef('ceo__salary') + 2) outer = Company.objects.filter(pk__in=Subquery(inner.values('pk'))) self.assertEqual(outer.get().name, 'Test GmbH') def test_nested_outerref_with_function(self): self.gmbh.point_of_contact = Employee.objects.get(lastname='Meyer') self.gmbh.save() inner = Employee.objects.filter( lastname__startswith=Left(OuterRef(OuterRef('lastname')), 1), ) qs = Employee.objects.annotate( ceo_company=Subquery( Company.objects.filter( point_of_contact__in=inner, ceo__pk=OuterRef('pk'), ).values('name'), ), ).filter(ceo_company__isnull=False) self.assertEqual(qs.get().ceo_company, 'Test GmbH') def test_annotation_with_outerref(self): gmbh_salary = Company.objects.annotate( max_ceo_salary_raise=Subquery( Company.objects.annotate( salary_raise=OuterRef('num_employees') + F('num_employees'), ).order_by('-salary_raise').values('salary_raise')[:1], output_field=IntegerField(), ), ).get(pk=self.gmbh.pk) self.assertEqual(gmbh_salary.max_ceo_salary_raise, 2332) def test_annotation_with_nested_outerref(self): self.gmbh.point_of_contact = Employee.objects.get(lastname='Meyer') self.gmbh.save() inner = Employee.objects.annotate( outer_lastname=OuterRef(OuterRef('lastname')), ).filter(lastname__startswith=Left('outer_lastname', 1)) qs = Employee.objects.annotate( ceo_company=Subquery( Company.objects.filter( point_of_contact__in=inner, ceo__pk=OuterRef('pk'), ).values('name'), ), ).filter(ceo_company__isnull=False) self.assertEqual(qs.get().ceo_company, 'Test GmbH') def test_pickle_expression(self): expr = Value(1) expr.convert_value # populate cached property self.assertEqual(pickle.loads(pickle.dumps(expr)), expr) def test_incorrect_field_in_F_expression(self): with self.assertRaisesMessage(FieldError, "Cannot resolve keyword 'nope' into field."): list(Employee.objects.filter(firstname=F('nope'))) def test_incorrect_joined_field_in_F_expression(self): with self.assertRaisesMessage(FieldError, "Cannot resolve keyword 'nope' into field."): list(Company.objects.filter(ceo__pk=F('point_of_contact__nope'))) def test_exists_in_filter(self): inner = Company.objects.filter(ceo=OuterRef('pk')).values('pk') qs1 = Employee.objects.filter(Exists(inner)) qs2 = Employee.objects.annotate(found=Exists(inner)).filter(found=True) self.assertCountEqual(qs1, qs2) self.assertFalse(Employee.objects.exclude(Exists(inner)).exists()) self.assertCountEqual(qs2, Employee.objects.exclude(~Exists(inner))) def test_subquery_in_filter(self): inner = Company.objects.filter(ceo=OuterRef('pk')).values('based_in_eu') self.assertSequenceEqual( Employee.objects.filter(Subquery(inner)), [self.foobar_ltd.ceo], ) def test_subquery_group_by_outerref_in_filter(self): inner = Company.objects.annotate( employee=OuterRef('pk'), ).values('employee').annotate( min_num_chairs=Min('num_chairs'), ).values('ceo') self.assertIs(Employee.objects.filter(pk__in=Subquery(inner)).exists(), True) def test_case_in_filter_if_boolean_output_field(self): is_ceo = Company.objects.filter(ceo=OuterRef('pk')) is_poc = Company.objects.filter(point_of_contact=OuterRef('pk')) qs = Employee.objects.filter( Case( When(Exists(is_ceo), then=True), When(Exists(is_poc), then=True), default=False, output_field=BooleanField(), ), ) self.assertCountEqual(qs, [self.example_inc.ceo, self.foobar_ltd.ceo, self.max]) def test_boolean_expression_combined(self): is_ceo = Company.objects.filter(ceo=OuterRef('pk')) is_poc = Company.objects.filter(point_of_contact=OuterRef('pk')) self.gmbh.point_of_contact = self.max self.gmbh.save() self.assertCountEqual( Employee.objects.filter(Exists(is_ceo) | Exists(is_poc)), [self.example_inc.ceo, self.foobar_ltd.ceo, self.max], ) self.assertCountEqual( Employee.objects.filter(Exists(is_ceo) & Exists(is_poc)), [self.max], ) self.assertCountEqual( Employee.objects.filter(Exists(is_ceo) & Q(salary__gte=30)), [self.max], ) self.assertCountEqual( Employee.objects.filter(Exists(is_poc) | Q(salary__lt=15)), [self.example_inc.ceo, self.max], ) self.assertCountEqual( Employee.objects.filter(Q(salary__gte=30) & Exists(is_ceo)), [self.max], ) self.assertCountEqual( Employee.objects.filter(Q(salary__lt=15) | Exists(is_poc)), [self.example_inc.ceo, self.max], ) def test_boolean_expression_combined_with_empty_Q(self): is_poc = Company.objects.filter(point_of_contact=OuterRef('pk')) self.gmbh.point_of_contact = self.max self.gmbh.save() tests = [ Exists(is_poc) & Q(), Q() & Exists(is_poc), Exists(is_poc) | Q(), Q() | Exists(is_poc), Q(Exists(is_poc)) & Q(), Q() & Q(Exists(is_poc)), Q(Exists(is_poc)) | Q(), Q() | Q(Exists(is_poc)), ] for conditions in tests: with self.subTest(conditions): self.assertCountEqual(Employee.objects.filter(conditions), [self.max]) def test_boolean_expression_in_Q(self): is_poc = Company.objects.filter(point_of_contact=OuterRef('pk')) self.gmbh.point_of_contact = self.max self.gmbh.save() self.assertCountEqual(Employee.objects.filter(Q(Exists(is_poc))), [self.max]) class IterableLookupInnerExpressionsTests(TestCase): @classmethod def setUpTestData(cls): ceo = Employee.objects.create(firstname='Just', lastname='Doit', salary=30) # MySQL requires that the values calculated for expressions don't pass # outside of the field's range, so it's inconvenient to use the values # in the more general tests. cls.c5020 = Company.objects.create(name='5020 Ltd', num_employees=50, num_chairs=20, ceo=ceo) cls.c5040 = Company.objects.create(name='5040 Ltd', num_employees=50, num_chairs=40, ceo=ceo) cls.c5050 = Company.objects.create(name='5050 Ltd', num_employees=50, num_chairs=50, ceo=ceo) cls.c5060 = Company.objects.create(name='5060 Ltd', num_employees=50, num_chairs=60, ceo=ceo) cls.c99300 = Company.objects.create(name='99300 Ltd', num_employees=99, num_chairs=300, ceo=ceo) def test_in_lookup_allows_F_expressions_and_expressions_for_integers(self): # __in lookups can use F() expressions for integers. queryset = Company.objects.filter(num_employees__in=([F('num_chairs') - 10])) self.assertSequenceEqual(queryset, [self.c5060]) self.assertCountEqual( Company.objects.filter(num_employees__in=([F('num_chairs') - 10, F('num_chairs') + 10])), [self.c5040, self.c5060], ) self.assertCountEqual( Company.objects.filter( num_employees__in=([F('num_chairs') - 10, F('num_chairs'), F('num_chairs') + 10]) ), [self.c5040, self.c5050, self.c5060], ) def test_expressions_in_lookups_join_choice(self): midpoint = datetime.time(13, 0) t1 = Time.objects.create(time=datetime.time(12, 0)) t2 = Time.objects.create(time=datetime.time(14, 0)) s1 = SimulationRun.objects.create(start=t1, end=t2, midpoint=midpoint) SimulationRun.objects.create(start=t1, end=None, midpoint=midpoint) SimulationRun.objects.create(start=None, end=t2, midpoint=midpoint) SimulationRun.objects.create(start=None, end=None, midpoint=midpoint) queryset = SimulationRun.objects.filter(midpoint__range=[F('start__time'), F('end__time')]) self.assertSequenceEqual(queryset, [s1]) for alias in queryset.query.alias_map.values(): if isinstance(alias, Join): self.assertEqual(alias.join_type, constants.INNER) queryset = SimulationRun.objects.exclude(midpoint__range=[F('start__time'), F('end__time')]) self.assertQuerysetEqual(queryset, [], ordered=False) for alias in queryset.query.alias_map.values(): if isinstance(alias, Join): self.assertEqual(alias.join_type, constants.LOUTER) def test_range_lookup_allows_F_expressions_and_expressions_for_integers(self): # Range lookups can use F() expressions for integers. Company.objects.filter(num_employees__exact=F("num_chairs")) self.assertCountEqual( Company.objects.filter(num_employees__range=(F('num_chairs'), 100)), [self.c5020, self.c5040, self.c5050], ) self.assertCountEqual( Company.objects.filter(num_employees__range=(F('num_chairs') - 10, F('num_chairs') + 10)), [self.c5040, self.c5050, self.c5060], ) self.assertCountEqual( Company.objects.filter(num_employees__range=(F('num_chairs') - 10, 100)), [self.c5020, self.c5040, self.c5050, self.c5060], ) self.assertCountEqual( Company.objects.filter(num_employees__range=(1, 100)), [self.c5020, self.c5040, self.c5050, self.c5060, self.c99300], ) def test_range_lookup_namedtuple(self): EmployeeRange = namedtuple('EmployeeRange', ['minimum', 'maximum']) qs = Company.objects.filter( num_employees__range=EmployeeRange(minimum=51, maximum=100), ) self.assertSequenceEqual(qs, [self.c99300]) @unittest.skipUnless(connection.vendor == 'sqlite', "This defensive test only works on databases that don't validate parameter types") def test_complex_expressions_do_not_introduce_sql_injection_via_untrusted_string_inclusion(self): """ This tests that SQL injection isn't possible using compilation of expressions in iterable filters, as their compilation happens before the main query compilation. It's limited to SQLite, as PostgreSQL, Oracle and other vendors have defense in depth against this by type checking. Testing against SQLite (the most permissive of the built-in databases) demonstrates that the problem doesn't exist while keeping the test simple. """ queryset = Company.objects.filter(name__in=[F('num_chairs') + '1)) OR ((1==1']) self.assertQuerysetEqual(queryset, [], ordered=False) def test_in_lookup_allows_F_expressions_and_expressions_for_datetimes(self): start = datetime.datetime(2016, 2, 3, 15, 0, 0) end = datetime.datetime(2016, 2, 5, 15, 0, 0) experiment_1 = Experiment.objects.create( name='Integrity testing', assigned=start.date(), start=start, end=end, completed=end.date(), estimated_time=end - start, ) experiment_2 = Experiment.objects.create( name='Taste testing', assigned=start.date(), start=start, end=end, completed=end.date(), estimated_time=end - start, ) r1 = Result.objects.create( experiment=experiment_1, result_time=datetime.datetime(2016, 2, 4, 15, 0, 0), ) Result.objects.create( experiment=experiment_1, result_time=datetime.datetime(2016, 3, 10, 2, 0, 0), ) Result.objects.create( experiment=experiment_2, result_time=datetime.datetime(2016, 1, 8, 5, 0, 0), ) within_experiment_time = [F('experiment__start'), F('experiment__end')] queryset = Result.objects.filter(result_time__range=within_experiment_time) self.assertSequenceEqual(queryset, [r1]) within_experiment_time = [F('experiment__start'), F('experiment__end')] queryset = Result.objects.filter(result_time__range=within_experiment_time) self.assertSequenceEqual(queryset, [r1]) class FTests(SimpleTestCase): def test_deepcopy(self): f = F("foo") g = deepcopy(f) self.assertEqual(f.name, g.name) def test_deconstruct(self): f = F('name') path, args, kwargs = f.deconstruct() self.assertEqual(path, 'django.db.models.expressions.F') self.assertEqual(args, (f.name,)) self.assertEqual(kwargs, {}) def test_equal(self): f = F('name') same_f = F('name') other_f = F('username') self.assertEqual(f, same_f) self.assertNotEqual(f, other_f) def test_hash(self): d = {F('name'): 'Bob'} self.assertIn(F('name'), d) self.assertEqual(d[F('name')], 'Bob') def test_not_equal_Value(self): f = F('name') value = Value('name') self.assertNotEqual(f, value) self.assertNotEqual(value, f) class ExpressionsTests(TestCase): def test_F_reuse(self): f = F('id') n = Number.objects.create(integer=-1) c = Company.objects.create( name="Example Inc.", num_employees=2300, num_chairs=5, ceo=Employee.objects.create(firstname="Joe", lastname="Smith") ) c_qs = Company.objects.filter(id=f) self.assertEqual(c_qs.get(), c) # Reuse the same F-object for another queryset n_qs = Number.objects.filter(id=f) self.assertEqual(n_qs.get(), n) # The original query still works correctly self.assertEqual(c_qs.get(), c) def test_patterns_escape(self): r""" Special characters (e.g. %, _ and \) stored in database are properly escaped when using a pattern lookup with an expression refs #16731 """ Employee.objects.bulk_create([ Employee(firstname="Johnny", lastname="%John"), Employee(firstname="Jean-Claude", lastname="Claud_"), Employee(firstname="Jean-Claude", lastname="Claude%"), Employee(firstname="Johnny", lastname="Joh\\n"), Employee(firstname="Johnny", lastname="_ohn"), ]) claude = Employee.objects.create(firstname='Jean-Claude', lastname='Claude') john = Employee.objects.create(firstname='Johnny', lastname='John') john_sign = Employee.objects.create(firstname='%Joh\\nny', lastname='%Joh\\n') self.assertCountEqual( Employee.objects.filter(firstname__contains=F('lastname')), [john_sign, john, claude], ) self.assertCountEqual( Employee.objects.filter(firstname__startswith=F('lastname')), [john_sign, john], ) self.assertSequenceEqual( Employee.objects.filter(firstname__endswith=F('lastname')), [claude], ) def test_insensitive_patterns_escape(self): r""" Special characters (e.g. %, _ and \) stored in database are properly escaped when using a case insensitive pattern lookup with an expression -- refs #16731 """ Employee.objects.bulk_create([ Employee(firstname="Johnny", lastname="%john"), Employee(firstname="Jean-Claude", lastname="claud_"), Employee(firstname="Jean-Claude", lastname="claude%"), Employee(firstname="Johnny", lastname="joh\\n"), Employee(firstname="Johnny", lastname="_ohn"), ]) claude = Employee.objects.create(firstname='Jean-Claude', lastname='claude') john = Employee.objects.create(firstname='Johnny', lastname='john') john_sign = Employee.objects.create(firstname='%Joh\\nny', lastname='%joh\\n') self.assertCountEqual( Employee.objects.filter(firstname__icontains=F('lastname')), [john_sign, john, claude], ) self.assertCountEqual( Employee.objects.filter(firstname__istartswith=F('lastname')), [john_sign, john], ) self.assertSequenceEqual( Employee.objects.filter(firstname__iendswith=F('lastname')), [claude], ) @isolate_apps('expressions') class SimpleExpressionTests(SimpleTestCase): def test_equal(self): self.assertEqual(Expression(), Expression()) self.assertEqual( Expression(IntegerField()), Expression(output_field=IntegerField()) ) self.assertEqual(Expression(IntegerField()), mock.ANY) self.assertNotEqual( Expression(IntegerField()), Expression(CharField()) ) class TestModel(Model): field = IntegerField() other_field = IntegerField() self.assertNotEqual( Expression(TestModel._meta.get_field('field')), Expression(TestModel._meta.get_field('other_field')), ) def test_hash(self): self.assertEqual(hash(Expression()), hash(Expression())) self.assertEqual( hash(Expression(IntegerField())), hash(Expression(output_field=IntegerField())) ) self.assertNotEqual( hash(Expression(IntegerField())), hash(Expression(CharField())), ) class TestModel(Model): field = IntegerField() other_field = IntegerField() self.assertNotEqual( hash(Expression(TestModel._meta.get_field('field'))), hash(Expression(TestModel._meta.get_field('other_field'))), ) class ExpressionsNumericTests(TestCase): @classmethod def setUpTestData(cls): Number(integer=-1).save() Number(integer=42).save() Number(integer=1337).save() Number.objects.update(float=F('integer')) def test_fill_with_value_from_same_object(self): """ We can fill a value in all objects with an other value of the same object. """ self.assertQuerysetEqual( Number.objects.all(), [(-1, -1), (42, 42), (1337, 1337)], lambda n: (n.integer, round(n.float)), ordered=False ) def test_increment_value(self): """ We can increment a value of all objects in a query set. """ self.assertEqual(Number.objects.filter(integer__gt=0).update(integer=F('integer') + 1), 2) self.assertQuerysetEqual( Number.objects.all(), [(-1, -1), (43, 42), (1338, 1337)], lambda n: (n.integer, round(n.float)), ordered=False ) def test_filter_not_equals_other_field(self): """ We can filter for objects, where a value is not equals the value of an other field. """ self.assertEqual(Number.objects.filter(integer__gt=0).update(integer=F('integer') + 1), 2) self.assertQuerysetEqual( Number.objects.exclude(float=F('integer')), [(43, 42), (1338, 1337)], lambda n: (n.integer, round(n.float)), ordered=False ) def test_complex_expressions(self): """ Complex expressions of different connection types are possible. """ n = Number.objects.create(integer=10, float=123.45) self.assertEqual(Number.objects.filter(pk=n.pk).update( float=F('integer') + F('float') * 2), 1) self.assertEqual(Number.objects.get(pk=n.pk).integer, 10) self.assertEqual(Number.objects.get(pk=n.pk).float, Approximate(256.900, places=3)) class ExpressionOperatorTests(TestCase): @classmethod def setUpTestData(cls): cls.n = Number.objects.create(integer=42, float=15.5) cls.n1 = Number.objects.create(integer=-42, float=-15.5) def test_lefthand_addition(self): # LH Addition of floats and integers Number.objects.filter(pk=self.n.pk).update( integer=F('integer') + 15, float=F('float') + 42.7 ) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3)) def test_lefthand_subtraction(self): # LH Subtraction of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') - 15, float=F('float') - 42.7) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(-27.200, places=3)) def test_lefthand_multiplication(self): # Multiplication of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') * 15, float=F('float') * 42.7) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3)) def test_lefthand_division(self): # LH Division of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') / 2, float=F('float') / 42.7) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 21) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(0.363, places=3)) def test_lefthand_modulo(self): # LH Modulo arithmetic on integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') % 20) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 2) def test_lefthand_bitwise_and(self): # LH Bitwise ands on integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer').bitand(56)) Number.objects.filter(pk=self.n1.pk).update(integer=F('integer').bitand(-56)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 40) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -64) def test_lefthand_bitwise_left_shift_operator(self): Number.objects.update(integer=F('integer').bitleftshift(2)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 168) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -168) def test_lefthand_bitwise_right_shift_operator(self): Number.objects.update(integer=F('integer').bitrightshift(2)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 10) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -11) def test_lefthand_bitwise_or(self): # LH Bitwise or on integers Number.objects.update(integer=F('integer').bitor(48)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 58) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -10) def test_lefthand_transformed_field_bitwise_or(self): Employee.objects.create(firstname='Max', lastname='Mustermann') with register_lookup(CharField, Length): qs = Employee.objects.annotate(bitor=F('lastname__length').bitor(48)) self.assertEqual(qs.get().bitor, 58) def test_lefthand_power(self): # LH Power arithmetic operation on floats and integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') ** 2, float=F('float') ** 1.5) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 1764) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(61.02, places=2)) def test_lefthand_bitwise_xor(self): Number.objects.update(integer=F('integer').bitxor(48)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 26) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -26) def test_lefthand_bitwise_xor_null(self): employee = Employee.objects.create(firstname='John', lastname='Doe') Employee.objects.update(salary=F('salary').bitxor(48)) employee.refresh_from_db() self.assertIsNone(employee.salary) @unittest.skipUnless(connection.vendor == 'oracle', "Oracle doesn't support bitwise XOR.") def test_lefthand_bitwise_xor_not_supported(self): msg = 'Bitwise XOR is not supported in Oracle.' with self.assertRaisesMessage(NotSupportedError, msg): Number.objects.update(integer=F('integer').bitxor(48)) def test_right_hand_addition(self): # Right hand operators Number.objects.filter(pk=self.n.pk).update(integer=15 + F('integer'), float=42.7 + F('float')) # RH Addition of floats and integers self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3)) def test_right_hand_subtraction(self): Number.objects.filter(pk=self.n.pk).update(integer=15 - F('integer'), float=42.7 - F('float')) # RH Subtraction of floats and integers self.assertEqual(Number.objects.get(pk=self.n.pk).integer, -27) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(27.200, places=3)) def test_right_hand_multiplication(self): # RH Multiplication of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=15 * F('integer'), float=42.7 * F('float')) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3)) def test_right_hand_division(self): # RH Division of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=640 / F('integer'), float=42.7 / F('float')) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 15) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(2.755, places=3)) def test_right_hand_modulo(self): # RH Modulo arithmetic on integers Number.objects.filter(pk=self.n.pk).update(integer=69 % F('integer')) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27) def test_righthand_power(self): # RH Power arithmetic operation on floats and integers Number.objects.filter(pk=self.n.pk).update(integer=2 ** F('integer'), float=1.5 ** F('float')) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 4398046511104) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(536.308, places=3)) class FTimeDeltaTests(TestCase): @classmethod def setUpTestData(cls): cls.sday = sday = datetime.date(2010, 6, 25) cls.stime = stime = datetime.datetime(2010, 6, 25, 12, 15, 30, 747000) midnight = datetime.time(0) delta0 = datetime.timedelta(0) delta1 = datetime.timedelta(microseconds=253000) delta2 = datetime.timedelta(seconds=44) delta3 = datetime.timedelta(hours=21, minutes=8) delta4 = datetime.timedelta(days=10) delta5 = datetime.timedelta(days=90) # Test data is set so that deltas and delays will be # strictly increasing. cls.deltas = [] cls.delays = [] cls.days_long = [] # e0: started same day as assigned, zero duration end = stime + delta0 cls.e0 = Experiment.objects.create( name='e0', assigned=sday, start=stime, end=end, completed=end.date(), estimated_time=delta0, ) cls.deltas.append(delta0) cls.delays.append(cls.e0.start - datetime.datetime.combine(cls.e0.assigned, midnight)) cls.days_long.append(cls.e0.completed - cls.e0.assigned) # e1: started one day after assigned, tiny duration, data # set so that end time has no fractional seconds, which # tests an edge case on sqlite. delay = datetime.timedelta(1) end = stime + delay + delta1 e1 = Experiment.objects.create( name='e1', assigned=sday, start=stime + delay, end=end, completed=end.date(), estimated_time=delta1, ) cls.deltas.append(delta1) cls.delays.append(e1.start - datetime.datetime.combine(e1.assigned, midnight)) cls.days_long.append(e1.completed - e1.assigned) # e2: started three days after assigned, small duration end = stime + delta2 e2 = Experiment.objects.create( name='e2', assigned=sday - datetime.timedelta(3), start=stime, end=end, completed=end.date(), estimated_time=datetime.timedelta(hours=1), ) cls.deltas.append(delta2) cls.delays.append(e2.start - datetime.datetime.combine(e2.assigned, midnight)) cls.days_long.append(e2.completed - e2.assigned) # e3: started four days after assigned, medium duration delay = datetime.timedelta(4) end = stime + delay + delta3 e3 = Experiment.objects.create( name='e3', assigned=sday, start=stime + delay, end=end, completed=end.date(), estimated_time=delta3, ) cls.deltas.append(delta3) cls.delays.append(e3.start - datetime.datetime.combine(e3.assigned, midnight)) cls.days_long.append(e3.completed - e3.assigned) # e4: started 10 days after assignment, long duration end = stime + delta4 e4 = Experiment.objects.create( name='e4', assigned=sday - datetime.timedelta(10), start=stime, end=end, completed=end.date(), estimated_time=delta4 - datetime.timedelta(1), ) cls.deltas.append(delta4) cls.delays.append(e4.start - datetime.datetime.combine(e4.assigned, midnight)) cls.days_long.append(e4.completed - e4.assigned) # e5: started a month after assignment, very long duration delay = datetime.timedelta(30) end = stime + delay + delta5 e5 = Experiment.objects.create( name='e5', assigned=sday, start=stime + delay, end=end, completed=end.date(), estimated_time=delta5, ) cls.deltas.append(delta5) cls.delays.append(e5.start - datetime.datetime.combine(e5.assigned, midnight)) cls.days_long.append(e5.completed - e5.assigned) cls.expnames = [e.name for e in Experiment.objects.all()] def test_multiple_query_compilation(self): # Ticket #21643 queryset = Experiment.objects.filter(end__lt=F('start') + datetime.timedelta(hours=1)) q1 = str(queryset.query) q2 = str(queryset.query) self.assertEqual(q1, q2) def test_query_clone(self): # Ticket #21643 - Crash when compiling query more than once qs = Experiment.objects.filter(end__lt=F('start') + datetime.timedelta(hours=1)) qs2 = qs.all() list(qs) list(qs2) # Intentionally no assert def test_delta_add(self): for i, delta in enumerate(self.deltas): test_set = [e.name for e in Experiment.objects.filter(end__lt=F('start') + delta)] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(end__lt=delta + F('start'))] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(end__lte=F('start') + delta)] self.assertEqual(test_set, self.expnames[:i + 1]) def test_delta_subtract(self): for i, delta in enumerate(self.deltas): test_set = [e.name for e in Experiment.objects.filter(start__gt=F('end') - delta)] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(start__gte=F('end') - delta)] self.assertEqual(test_set, self.expnames[:i + 1]) def test_exclude(self): for i, delta in enumerate(self.deltas): test_set = [e.name for e in Experiment.objects.exclude(end__lt=F('start') + delta)] self.assertEqual(test_set, self.expnames[i:]) test_set = [e.name for e in Experiment.objects.exclude(end__lte=F('start') + delta)] self.assertEqual(test_set, self.expnames[i + 1:]) def test_date_comparison(self): for i, days in enumerate(self.days_long): test_set = [e.name for e in Experiment.objects.filter(completed__lt=F('assigned') + days)] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(completed__lte=F('assigned') + days)] self.assertEqual(test_set, self.expnames[:i + 1]) @skipUnlessDBFeature("supports_mixed_date_datetime_comparisons") def test_mixed_comparisons1(self): for i, delay in enumerate(self.delays): test_set = [e.name for e in Experiment.objects.filter(assigned__gt=F('start') - delay)] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(assigned__gte=F('start') - delay)] self.assertEqual(test_set, self.expnames[:i + 1]) def test_mixed_comparisons2(self): for i, delay in enumerate(self.delays): delay = datetime.timedelta(delay.days) test_set = [e.name for e in Experiment.objects.filter(start__lt=F('assigned') + delay)] self.assertEqual(test_set, self.expnames[:i]) test_set = [ e.name for e in Experiment.objects.filter(start__lte=F('assigned') + delay + datetime.timedelta(1)) ] self.assertEqual(test_set, self.expnames[:i + 1]) def test_delta_update(self): for delta in self.deltas: exps = Experiment.objects.all() expected_durations = [e.duration() for e in exps] expected_starts = [e.start + delta for e in exps] expected_ends = [e.end + delta for e in exps] Experiment.objects.update(start=F('start') + delta, end=F('end') + delta) exps = Experiment.objects.all() new_starts = [e.start for e in exps] new_ends = [e.end for e in exps] new_durations = [e.duration() for e in exps] self.assertEqual(expected_starts, new_starts) self.assertEqual(expected_ends, new_ends) self.assertEqual(expected_durations, new_durations) def test_invalid_operator(self): with self.assertRaises(DatabaseError): list(Experiment.objects.filter(start=F('start') * datetime.timedelta(0))) def test_durationfield_add(self): zeros = [e.name for e in Experiment.objects.filter(start=F('start') + F('estimated_time'))] self.assertEqual(zeros, ['e0']) end_less = [e.name for e in Experiment.objects.filter(end__lt=F('start') + F('estimated_time'))] self.assertEqual(end_less, ['e2']) delta_math = [ e.name for e in Experiment.objects.filter(end__gte=F('start') + F('estimated_time') + datetime.timedelta(hours=1)) ] self.assertEqual(delta_math, ['e4']) queryset = Experiment.objects.annotate(shifted=ExpressionWrapper( F('start') + Value(None, output_field=DurationField()), output_field=DateTimeField(), )) self.assertIsNone(queryset.first().shifted) def test_durationfield_multiply_divide(self): Experiment.objects.update(scalar=2) tests = [ (Decimal('2'), 2), (F('scalar'), 2), (2, 2), (3.2, 3.2), ] for expr, scalar in tests: with self.subTest(expr=expr): qs = Experiment.objects.annotate( multiplied=ExpressionWrapper( expr * F('estimated_time'), output_field=DurationField(), ), divided=ExpressionWrapper( F('estimated_time') / expr, output_field=DurationField(), ), ) for experiment in qs: self.assertEqual( experiment.multiplied, experiment.estimated_time * scalar, ) self.assertEqual( experiment.divided, experiment.estimated_time / scalar, ) def test_duration_expressions(self): for delta in self.deltas: qs = Experiment.objects.annotate(duration=F('estimated_time') + delta) for obj in qs: self.assertEqual(obj.duration, obj.estimated_time + delta) @skipUnlessDBFeature('supports_temporal_subtraction') def test_date_subtraction(self): queryset = Experiment.objects.annotate( completion_duration=F('completed') - F('assigned'), ) at_least_5_days = {e.name for e in queryset.filter(completion_duration__gte=datetime.timedelta(days=5))} self.assertEqual(at_least_5_days, {'e3', 'e4', 'e5'}) at_least_120_days = {e.name for e in queryset.filter(completion_duration__gte=datetime.timedelta(days=120))} self.assertEqual(at_least_120_days, {'e5'}) less_than_5_days = {e.name for e in queryset.filter(completion_duration__lt=datetime.timedelta(days=5))} self.assertEqual(less_than_5_days, {'e0', 'e1', 'e2'}) queryset = Experiment.objects.annotate( difference=F('completed') - Value(None, output_field=DateField()), ) self.assertIsNone(queryset.first().difference) queryset = Experiment.objects.annotate(shifted=ExpressionWrapper( F('completed') - Value(None, output_field=DurationField()), output_field=DateField(), )) self.assertIsNone(queryset.first().shifted) @skipUnlessDBFeature('supports_temporal_subtraction') def test_date_subquery_subtraction(self): subquery = Experiment.objects.filter(pk=OuterRef('pk')).values('completed') queryset = Experiment.objects.annotate( difference=subquery - F('completed'), ).filter(difference=datetime.timedelta()) self.assertTrue(queryset.exists()) @skipUnlessDBFeature('supports_temporal_subtraction') def test_date_case_subtraction(self): queryset = Experiment.objects.annotate( date_case=Case( When(Q(name='e0'), then=F('completed')), output_field=DateField(), ), completed_value=Value( self.e0.completed, output_field=DateField(), ), difference=F('date_case') - F('completed_value'), ).filter(difference=datetime.timedelta()) self.assertEqual(queryset.get(), self.e0) @skipUnlessDBFeature('supports_temporal_subtraction') def test_time_subtraction(self): Time.objects.create(time=datetime.time(12, 30, 15, 2345)) queryset = Time.objects.annotate( difference=F('time') - Value(datetime.time(11, 15, 0)), ) self.assertEqual( queryset.get().difference, datetime.timedelta(hours=1, minutes=15, seconds=15, microseconds=2345) ) queryset = Time.objects.annotate( difference=F('time') - Value(None, output_field=TimeField()), ) self.assertIsNone(queryset.first().difference) queryset = Time.objects.annotate(shifted=ExpressionWrapper( F('time') - Value(None, output_field=DurationField()), output_field=TimeField(), )) self.assertIsNone(queryset.first().shifted) @skipUnlessDBFeature('supports_temporal_subtraction') def test_time_subquery_subtraction(self): Time.objects.create(time=datetime.time(12, 30, 15, 2345)) subquery = Time.objects.filter(pk=OuterRef('pk')).values('time') queryset = Time.objects.annotate( difference=subquery - F('time'), ).filter(difference=datetime.timedelta()) self.assertTrue(queryset.exists()) @skipUnlessDBFeature('supports_temporal_subtraction') def test_datetime_subtraction(self): under_estimate = [ e.name for e in Experiment.objects.filter(estimated_time__gt=F('end') - F('start')) ] self.assertEqual(under_estimate, ['e2']) over_estimate = [ e.name for e in Experiment.objects.filter(estimated_time__lt=F('end') - F('start')) ] self.assertEqual(over_estimate, ['e4']) queryset = Experiment.objects.annotate( difference=F('start') - Value(None, output_field=DateTimeField()), ) self.assertIsNone(queryset.first().difference) queryset = Experiment.objects.annotate(shifted=ExpressionWrapper( F('start') - Value(None, output_field=DurationField()), output_field=DateTimeField(), )) self.assertIsNone(queryset.first().shifted) @skipUnlessDBFeature('supports_temporal_subtraction') def test_datetime_subquery_subtraction(self): subquery = Experiment.objects.filter(pk=OuterRef('pk')).values('start') queryset = Experiment.objects.annotate( difference=subquery - F('start'), ).filter(difference=datetime.timedelta()) self.assertTrue(queryset.exists()) @skipUnlessDBFeature('supports_temporal_subtraction') def test_datetime_subtraction_microseconds(self): delta = datetime.timedelta(microseconds=8999999999999999) Experiment.objects.update(end=F('start') + delta) qs = Experiment.objects.annotate(delta=F('end') - F('start')) for e in qs: self.assertEqual(e.delta, delta) def test_duration_with_datetime(self): # Exclude e1 which has very high precision so we can test this on all # backends regardless of whether or not it supports # microsecond_precision. over_estimate = Experiment.objects.exclude(name='e1').filter( completed__gt=self.stime + F('estimated_time'), ).order_by('name') self.assertQuerysetEqual(over_estimate, ['e3', 'e4', 'e5'], lambda e: e.name) def test_duration_with_datetime_microseconds(self): delta = datetime.timedelta(microseconds=8999999999999999) qs = Experiment.objects.annotate(dt=ExpressionWrapper( F('start') + delta, output_field=DateTimeField(), )) for e in qs: self.assertEqual(e.dt, e.start + delta) def test_date_minus_duration(self): more_than_4_days = Experiment.objects.filter( assigned__lt=F('completed') - Value(datetime.timedelta(days=4)) ) self.assertQuerysetEqual(more_than_4_days, ['e3', 'e4', 'e5'], lambda e: e.name) def test_negative_timedelta_update(self): # subtract 30 seconds, 30 minutes, 2 hours and 2 days experiments = Experiment.objects.filter(name='e0').annotate( start_sub_seconds=F('start') + datetime.timedelta(seconds=-30), ).annotate( start_sub_minutes=F('start_sub_seconds') + datetime.timedelta(minutes=-30), ).annotate( start_sub_hours=F('start_sub_minutes') + datetime.timedelta(hours=-2), ).annotate( new_start=F('start_sub_hours') + datetime.timedelta(days=-2), ) expected_start = datetime.datetime(2010, 6, 23, 9, 45, 0) # subtract 30 microseconds experiments = experiments.annotate(new_start=F('new_start') + datetime.timedelta(microseconds=-30)) expected_start += datetime.timedelta(microseconds=+746970) experiments.update(start=F('new_start')) e0 = Experiment.objects.get(name='e0') self.assertEqual(e0.start, expected_start) class ValueTests(TestCase): def test_update_TimeField_using_Value(self): Time.objects.create() Time.objects.update(time=Value(datetime.time(1), output_field=TimeField())) self.assertEqual(Time.objects.get().time, datetime.time(1)) def test_update_UUIDField_using_Value(self): UUID.objects.create() UUID.objects.update(uuid=Value(uuid.UUID('12345678901234567890123456789012'), output_field=UUIDField())) self.assertEqual(UUID.objects.get().uuid, uuid.UUID('12345678901234567890123456789012')) def test_deconstruct(self): value = Value('name') path, args, kwargs = value.deconstruct() self.assertEqual(path, 'django.db.models.expressions.Value') self.assertEqual(args, (value.value,)) self.assertEqual(kwargs, {}) def test_deconstruct_output_field(self): value = Value('name', output_field=CharField()) path, args, kwargs = value.deconstruct() self.assertEqual(path, 'django.db.models.expressions.Value') self.assertEqual(args, (value.value,)) self.assertEqual(len(kwargs), 1) self.assertEqual(kwargs['output_field'].deconstruct(), CharField().deconstruct()) def test_equal(self): value = Value('name') self.assertEqual(value, Value('name')) self.assertNotEqual(value, Value('username')) def test_hash(self): d = {Value('name'): 'Bob'} self.assertIn(Value('name'), d) self.assertEqual(d[Value('name')], 'Bob') def test_equal_output_field(self): value = Value('name', output_field=CharField()) same_value = Value('name', output_field=CharField()) other_value = Value('name', output_field=TimeField()) no_output_field = Value('name') self.assertEqual(value, same_value) self.assertNotEqual(value, other_value) self.assertNotEqual(value, no_output_field) def test_raise_empty_expressionlist(self): msg = 'ExpressionList requires at least one expression' with self.assertRaisesMessage(ValueError, msg): ExpressionList() def test_compile_unresolved(self): # This test might need to be revisited later on if #25425 is enforced. compiler = Time.objects.all().query.get_compiler(connection=connection) value = Value('foo') self.assertEqual(value.as_sql(compiler, connection), ('%s', ['foo'])) value = Value('foo', output_field=CharField()) self.assertEqual(value.as_sql(compiler, connection), ('%s', ['foo'])) def test_output_field_decimalfield(self): Time.objects.create() time = Time.objects.annotate(one=Value(1, output_field=DecimalField())).first() self.assertEqual(time.one, 1) def test_resolve_output_field(self): value_types = [ ('str', CharField), (True, BooleanField), (42, IntegerField), (3.14, FloatField), (datetime.date(2019, 5, 15), DateField), (datetime.datetime(2019, 5, 15), DateTimeField), (datetime.time(3, 16), TimeField), (datetime.timedelta(1), DurationField), (Decimal('3.14'), DecimalField), (b'', BinaryField), (uuid.uuid4(), UUIDField), ] for value, ouput_field_type in value_types: with self.subTest(type=type(value)): expr = Value(value) self.assertIsInstance(expr.output_field, ouput_field_type) def test_resolve_output_field_failure(self): msg = 'Cannot resolve expression type, unknown output_field' with self.assertRaisesMessage(FieldError, msg): Value(object()).output_field class ExistsTests(TestCase): def test_optimizations(self): with CaptureQueriesContext(connection) as context: list(Experiment.objects.values(exists=Exists( Experiment.objects.order_by('pk'), )).order_by()) captured_queries = context.captured_queries self.assertEqual(len(captured_queries), 1) captured_sql = captured_queries[0]['sql'] self.assertNotIn( connection.ops.quote_name(Experiment._meta.pk.column), captured_sql, ) self.assertIn( connection.ops.limit_offset_sql(None, 1), captured_sql, ) self.assertNotIn('ORDER BY', captured_sql) class FieldTransformTests(TestCase): @classmethod def setUpTestData(cls): cls.sday = sday = datetime.date(2010, 6, 25) cls.stime = stime = datetime.datetime(2010, 6, 25, 12, 15, 30, 747000) cls.ex1 = Experiment.objects.create( name='Experiment 1', assigned=sday, completed=sday + datetime.timedelta(2), estimated_time=datetime.timedelta(2), start=stime, end=stime + datetime.timedelta(2), ) def test_month_aggregation(self): self.assertEqual( Experiment.objects.aggregate(month_count=Count('assigned__month')), {'month_count': 1} ) def test_transform_in_values(self): self.assertSequenceEqual( Experiment.objects.values('assigned__month'), [{'assigned__month': 6}], ) def test_multiple_transforms_in_values(self): self.assertSequenceEqual( Experiment.objects.values('end__date__month'), [{'end__date__month': 6}], ) class ReprTests(SimpleTestCase): def test_expressions(self): self.assertEqual( repr(Case(When(a=1))), "<Case: CASE WHEN <Q: (AND: ('a', 1))> THEN Value(None), ELSE Value(None)>" ) self.assertEqual( repr(When(Q(age__gte=18), then=Value('legal'))), "<When: WHEN <Q: (AND: ('age__gte', 18))> THEN Value(legal)>" ) self.assertEqual(repr(Col('alias', 'field')), "Col(alias, field)") self.assertEqual(repr(F('published')), "F(published)") self.assertEqual(repr(F('cost') + F('tax')), "<CombinedExpression: F(cost) + F(tax)>") self.assertEqual( repr(ExpressionWrapper(F('cost') + F('tax'), IntegerField())), "ExpressionWrapper(F(cost) + F(tax))" ) self.assertEqual(repr(Func('published', function='TO_CHAR')), "Func(F(published), function=TO_CHAR)") self.assertEqual(repr(OrderBy(Value(1))), 'OrderBy(Value(1), descending=False)') self.assertEqual(repr(RawSQL('table.col', [])), "RawSQL(table.col, [])") self.assertEqual(repr(Ref('sum_cost', Sum('cost'))), "Ref(sum_cost, Sum(F(cost)))") self.assertEqual(repr(Value(1)), "Value(1)") self.assertEqual( repr(ExpressionList(F('col'), F('anothercol'))), 'ExpressionList(F(col), F(anothercol))' ) self.assertEqual( repr(ExpressionList(OrderBy(F('col'), descending=False))), 'ExpressionList(OrderBy(F(col), descending=False))' ) def test_functions(self): self.assertEqual(repr(Coalesce('a', 'b')), "Coalesce(F(a), F(b))") self.assertEqual(repr(Concat('a', 'b')), "Concat(ConcatPair(F(a), F(b)))") self.assertEqual(repr(Length('a')), "Length(F(a))") self.assertEqual(repr(Lower('a')), "Lower(F(a))") self.assertEqual(repr(Substr('a', 1, 3)), "Substr(F(a), Value(1), Value(3))") self.assertEqual(repr(Upper('a')), "Upper(F(a))") def test_aggregates(self): self.assertEqual(repr(Avg('a')), "Avg(F(a))") self.assertEqual(repr(Count('a')), "Count(F(a))") self.assertEqual(repr(Count('*')), "Count('*')") self.assertEqual(repr(Max('a')), "Max(F(a))") self.assertEqual(repr(Min('a')), "Min(F(a))") self.assertEqual(repr(StdDev('a')), "StdDev(F(a), sample=False)") self.assertEqual(repr(Sum('a')), "Sum(F(a))") self.assertEqual(repr(Variance('a', sample=True)), "Variance(F(a), sample=True)") def test_distinct_aggregates(self): self.assertEqual(repr(Count('a', distinct=True)), "Count(F(a), distinct=True)") self.assertEqual(repr(Count('*', distinct=True)), "Count('*', distinct=True)") def test_filtered_aggregates(self): filter = Q(a=1) self.assertEqual(repr(Avg('a', filter=filter)), "Avg(F(a), filter=(AND: ('a', 1)))") self.assertEqual(repr(Count('a', filter=filter)), "Count(F(a), filter=(AND: ('a', 1)))") self.assertEqual(repr(Max('a', filter=filter)), "Max(F(a), filter=(AND: ('a', 1)))") self.assertEqual(repr(Min('a', filter=filter)), "Min(F(a), filter=(AND: ('a', 1)))") self.assertEqual(repr(StdDev('a', filter=filter)), "StdDev(F(a), filter=(AND: ('a', 1)), sample=False)") self.assertEqual(repr(Sum('a', filter=filter)), "Sum(F(a), filter=(AND: ('a', 1)))") self.assertEqual( repr(Variance('a', sample=True, filter=filter)), "Variance(F(a), filter=(AND: ('a', 1)), sample=True)" ) self.assertEqual( repr(Count('a', filter=filter, distinct=True)), "Count(F(a), distinct=True, filter=(AND: ('a', 1)))" ) class CombinableTests(SimpleTestCase): bitwise_msg = 'Use .bitand() and .bitor() for bitwise logical operations.' def test_negation(self): c = Combinable() self.assertEqual(-c, c * -1) def test_and(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): Combinable() & Combinable() def test_or(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): Combinable() | Combinable() def test_reversed_and(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): object() & Combinable() def test_reversed_or(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): object() | Combinable() class CombinedExpressionTests(SimpleTestCase): def test_resolve_output_field(self): tests = [ (IntegerField, AutoField, IntegerField), (AutoField, IntegerField, IntegerField), (IntegerField, DecimalField, DecimalField), (DecimalField, IntegerField, DecimalField), (IntegerField, FloatField, FloatField), (FloatField, IntegerField, FloatField), ] connectors = [Combinable.ADD, Combinable.SUB, Combinable.MUL, Combinable.DIV] for lhs, rhs, combined in tests: for connector in connectors: with self.subTest(lhs=lhs, connector=connector, rhs=rhs, combined=combined): expr = CombinedExpression( Expression(lhs()), connector, Expression(rhs()), ) self.assertIsInstance(expr.output_field, combined) class ExpressionWrapperTests(SimpleTestCase): def test_empty_group_by(self): expr = ExpressionWrapper(Value(3), output_field=IntegerField()) self.assertEqual(expr.get_group_by_cols(alias=None), []) def test_non_empty_group_by(self): value = Value('f') value.output_field = None expr = ExpressionWrapper(Lower(value), output_field=IntegerField()) group_by_cols = expr.get_group_by_cols(alias=None) self.assertEqual(group_by_cols, [expr.expression]) self.assertEqual(group_by_cols[0].output_field, expr.output_field)
3ce676fa8c31dc053f9f14c2510b5af90261c008d00310ed0acc61b527b62b25
import datetime import decimal import gettext as gettext_module import os import pickle import re import tempfile from contextlib import contextmanager from importlib import import_module from pathlib import Path from unittest import mock from asgiref.local import Local from django import forms from django.apps import AppConfig from django.conf import settings from django.conf.locale import LANG_INFO from django.conf.urls.i18n import i18n_patterns from django.template import Context, Template from django.test import ( RequestFactory, SimpleTestCase, TestCase, override_settings, ) from django.utils import translation from django.utils.formats import ( date_format, get_format, get_format_modules, iter_format_modules, localize, localize_input, reset_format_cache, sanitize_separators, time_format, ) from django.utils.numberformat import format as nformat from django.utils.safestring import SafeString, mark_safe from django.utils.translation import ( activate, check_for_language, deactivate, get_language, get_language_bidi, get_language_from_request, get_language_info, gettext, gettext_lazy, ngettext, ngettext_lazy, npgettext, npgettext_lazy, pgettext, round_away_from_one, to_language, to_locale, trans_null, trans_real, ) from django.utils.translation.reloader import ( translation_file_changed, watch_for_translation_changes, ) from .forms import CompanyForm, I18nForm, SelectDateForm from .models import Company, TestModel here = os.path.dirname(os.path.abspath(__file__)) extended_locale_paths = settings.LOCALE_PATHS + [ os.path.join(here, 'other', 'locale'), ] class AppModuleStub: def __init__(self, **kwargs): self.__dict__.update(kwargs) @contextmanager def patch_formats(lang, **settings): from django.utils.formats import _format_cache # Populate _format_cache with temporary values for key, value in settings.items(): _format_cache[(key, lang)] = value try: yield finally: reset_format_cache() class TranslationTests(SimpleTestCase): @translation.override('fr') def test_plural(self): """ Test plurals with ngettext. French differs from English in that 0 is singular. """ self.assertEqual(ngettext("%d year", "%d years", 0) % 0, "0 année") self.assertEqual(ngettext("%d year", "%d years", 2) % 2, "2 années") self.assertEqual(ngettext("%(size)d byte", "%(size)d bytes", 0) % {'size': 0}, "0 octet") self.assertEqual(ngettext("%(size)d byte", "%(size)d bytes", 2) % {'size': 2}, "2 octets") def test_plural_null(self): g = trans_null.ngettext self.assertEqual(g('%d year', '%d years', 0) % 0, '0 years') self.assertEqual(g('%d year', '%d years', 1) % 1, '1 year') self.assertEqual(g('%d year', '%d years', 2) % 2, '2 years') @override_settings(LOCALE_PATHS=extended_locale_paths) @translation.override('fr') def test_multiple_plurals_per_language(self): """ Normally, French has 2 plurals. As other/locale/fr/LC_MESSAGES/django.po has a different plural equation with 3 plurals, this tests if those plural are honored. """ self.assertEqual(ngettext("%d singular", "%d plural", 0) % 0, "0 pluriel1") self.assertEqual(ngettext("%d singular", "%d plural", 1) % 1, "1 singulier") self.assertEqual(ngettext("%d singular", "%d plural", 2) % 2, "2 pluriel2") french = trans_real.catalog() # Internal _catalog can query subcatalogs (from different po files). self.assertEqual(french._catalog[('%d singular', 0)], '%d singulier') self.assertEqual(french._catalog[('%d hour', 0)], '%d heure') def test_override(self): activate('de') try: with translation.override('pl'): self.assertEqual(get_language(), 'pl') self.assertEqual(get_language(), 'de') with translation.override(None): self.assertIsNone(get_language()) with translation.override('pl'): pass self.assertIsNone(get_language()) self.assertEqual(get_language(), 'de') finally: deactivate() def test_override_decorator(self): @translation.override('pl') def func_pl(): self.assertEqual(get_language(), 'pl') @translation.override(None) def func_none(): self.assertIsNone(get_language()) try: activate('de') func_pl() self.assertEqual(get_language(), 'de') func_none() self.assertEqual(get_language(), 'de') finally: deactivate() def test_override_exit(self): """ The language restored is the one used when the function was called, not the one used when the decorator was initialized (#23381). """ activate('fr') @translation.override('pl') def func_pl(): pass deactivate() try: activate('en') func_pl() self.assertEqual(get_language(), 'en') finally: deactivate() def test_lazy_objects(self): """ Format string interpolation should work with *_lazy objects. """ s = gettext_lazy('Add %(name)s') d = {'name': 'Ringo'} self.assertEqual('Add Ringo', s % d) with translation.override('de', deactivate=True): self.assertEqual('Ringo hinzuf\xfcgen', s % d) with translation.override('pl'): self.assertEqual('Dodaj Ringo', s % d) # It should be possible to compare *_lazy objects. s1 = gettext_lazy('Add %(name)s') self.assertEqual(s, s1) s2 = gettext_lazy('Add %(name)s') s3 = gettext_lazy('Add %(name)s') self.assertEqual(s2, s3) self.assertEqual(s, s2) s4 = gettext_lazy('Some other string') self.assertNotEqual(s, s4) def test_lazy_pickle(self): s1 = gettext_lazy("test") self.assertEqual(str(s1), "test") s2 = pickle.loads(pickle.dumps(s1)) self.assertEqual(str(s2), "test") @override_settings(LOCALE_PATHS=extended_locale_paths) def test_ngettext_lazy(self): simple_with_format = ngettext_lazy('%d good result', '%d good results') simple_context_with_format = npgettext_lazy('Exclamation', '%d good result', '%d good results') simple_without_format = ngettext_lazy('good result', 'good results') with translation.override('de'): self.assertEqual(simple_with_format % 1, '1 gutes Resultat') self.assertEqual(simple_with_format % 4, '4 guten Resultate') self.assertEqual(simple_context_with_format % 1, '1 gutes Resultat!') self.assertEqual(simple_context_with_format % 4, '4 guten Resultate!') self.assertEqual(simple_without_format % 1, 'gutes Resultat') self.assertEqual(simple_without_format % 4, 'guten Resultate') complex_nonlazy = ngettext_lazy('Hi %(name)s, %(num)d good result', 'Hi %(name)s, %(num)d good results', 4) complex_deferred = ngettext_lazy( 'Hi %(name)s, %(num)d good result', 'Hi %(name)s, %(num)d good results', 'num' ) complex_context_nonlazy = npgettext_lazy( 'Greeting', 'Hi %(name)s, %(num)d good result', 'Hi %(name)s, %(num)d good results', 4 ) complex_context_deferred = npgettext_lazy( 'Greeting', 'Hi %(name)s, %(num)d good result', 'Hi %(name)s, %(num)d good results', 'num' ) with translation.override('de'): self.assertEqual(complex_nonlazy % {'num': 4, 'name': 'Jim'}, 'Hallo Jim, 4 guten Resultate') self.assertEqual(complex_deferred % {'name': 'Jim', 'num': 1}, 'Hallo Jim, 1 gutes Resultat') self.assertEqual(complex_deferred % {'name': 'Jim', 'num': 5}, 'Hallo Jim, 5 guten Resultate') with self.assertRaisesMessage(KeyError, 'Your dictionary lacks key'): complex_deferred % {'name': 'Jim'} self.assertEqual(complex_context_nonlazy % {'num': 4, 'name': 'Jim'}, 'Willkommen Jim, 4 guten Resultate') self.assertEqual(complex_context_deferred % {'name': 'Jim', 'num': 1}, 'Willkommen Jim, 1 gutes Resultat') self.assertEqual(complex_context_deferred % {'name': 'Jim', 'num': 5}, 'Willkommen Jim, 5 guten Resultate') with self.assertRaisesMessage(KeyError, 'Your dictionary lacks key'): complex_context_deferred % {'name': 'Jim'} @override_settings(LOCALE_PATHS=extended_locale_paths) def test_ngettext_lazy_format_style(self): simple_with_format = ngettext_lazy('{} good result', '{} good results') simple_context_with_format = npgettext_lazy('Exclamation', '{} good result', '{} good results') with translation.override('de'): self.assertEqual(simple_with_format.format(1), '1 gutes Resultat') self.assertEqual(simple_with_format.format(4), '4 guten Resultate') self.assertEqual(simple_context_with_format.format(1), '1 gutes Resultat!') self.assertEqual(simple_context_with_format.format(4), '4 guten Resultate!') complex_nonlazy = ngettext_lazy('Hi {name}, {num} good result', 'Hi {name}, {num} good results', 4) complex_deferred = ngettext_lazy( 'Hi {name}, {num} good result', 'Hi {name}, {num} good results', 'num' ) complex_context_nonlazy = npgettext_lazy( 'Greeting', 'Hi {name}, {num} good result', 'Hi {name}, {num} good results', 4 ) complex_context_deferred = npgettext_lazy( 'Greeting', 'Hi {name}, {num} good result', 'Hi {name}, {num} good results', 'num' ) with translation.override('de'): self.assertEqual(complex_nonlazy.format(num=4, name='Jim'), 'Hallo Jim, 4 guten Resultate') self.assertEqual(complex_deferred.format(name='Jim', num=1), 'Hallo Jim, 1 gutes Resultat') self.assertEqual(complex_deferred.format(name='Jim', num=5), 'Hallo Jim, 5 guten Resultate') with self.assertRaisesMessage(KeyError, 'Your dictionary lacks key'): complex_deferred.format(name='Jim') self.assertEqual(complex_context_nonlazy.format(num=4, name='Jim'), 'Willkommen Jim, 4 guten Resultate') self.assertEqual(complex_context_deferred.format(name='Jim', num=1), 'Willkommen Jim, 1 gutes Resultat') self.assertEqual(complex_context_deferred.format(name='Jim', num=5), 'Willkommen Jim, 5 guten Resultate') with self.assertRaisesMessage(KeyError, 'Your dictionary lacks key'): complex_context_deferred.format(name='Jim') def test_ngettext_lazy_bool(self): self.assertTrue(ngettext_lazy('%d good result', '%d good results')) self.assertFalse(ngettext_lazy('', '')) def test_ngettext_lazy_pickle(self): s1 = ngettext_lazy('%d good result', '%d good results') self.assertEqual(s1 % 1, '1 good result') self.assertEqual(s1 % 8, '8 good results') s2 = pickle.loads(pickle.dumps(s1)) self.assertEqual(s2 % 1, '1 good result') self.assertEqual(s2 % 8, '8 good results') @override_settings(LOCALE_PATHS=extended_locale_paths) def test_pgettext(self): trans_real._active = Local() trans_real._translations = {} with translation.override('de'): self.assertEqual(pgettext("unexisting", "May"), "May") self.assertEqual(pgettext("month name", "May"), "Mai") self.assertEqual(pgettext("verb", "May"), "Kann") self.assertEqual(npgettext("search", "%d result", "%d results", 4) % 4, "4 Resultate") def test_empty_value(self): """Empty value must stay empty after being translated (#23196).""" with translation.override('de'): self.assertEqual('', gettext('')) s = mark_safe('') self.assertEqual(s, gettext(s)) @override_settings(LOCALE_PATHS=extended_locale_paths) def test_safe_status(self): """ Translating a string requiring no auto-escaping with gettext or pgettext shouldn't change the "safe" status. """ trans_real._active = Local() trans_real._translations = {} s1 = mark_safe('Password') s2 = mark_safe('May') with translation.override('de', deactivate=True): self.assertIs(type(gettext(s1)), SafeString) self.assertIs(type(pgettext('month name', s2)), SafeString) self.assertEqual('aPassword', SafeString('a') + s1) self.assertEqual('Passworda', s1 + SafeString('a')) self.assertEqual('Passworda', s1 + mark_safe('a')) self.assertEqual('aPassword', mark_safe('a') + s1) self.assertEqual('as', mark_safe('a') + mark_safe('s')) def test_maclines(self): """ Translations on files with Mac or DOS end of lines will be converted to unix EOF in .po catalogs. """ ca_translation = trans_real.translation('ca') ca_translation._catalog['Mac\nEOF\n'] = 'Catalan Mac\nEOF\n' ca_translation._catalog['Win\nEOF\n'] = 'Catalan Win\nEOF\n' with translation.override('ca', deactivate=True): self.assertEqual('Catalan Mac\nEOF\n', gettext('Mac\rEOF\r')) self.assertEqual('Catalan Win\nEOF\n', gettext('Win\r\nEOF\r\n')) def test_to_locale(self): tests = ( ('en', 'en'), ('EN', 'en'), ('en-us', 'en_US'), ('EN-US', 'en_US'), ('en_US', 'en_US'), # With > 2 characters after the dash. ('sr-latn', 'sr_Latn'), ('sr-LATN', 'sr_Latn'), ('sr_Latn', 'sr_Latn'), # 3-char language codes. ('ber-MA', 'ber_MA'), ('BER-MA', 'ber_MA'), ('BER_MA', 'ber_MA'), ('ber_MA', 'ber_MA'), # With private use subtag (x-informal). ('nl-nl-x-informal', 'nl_NL-x-informal'), ('NL-NL-X-INFORMAL', 'nl_NL-x-informal'), ('sr-latn-x-informal', 'sr_Latn-x-informal'), ('SR-LATN-X-INFORMAL', 'sr_Latn-x-informal'), ) for lang, locale in tests: with self.subTest(lang=lang): self.assertEqual(to_locale(lang), locale) def test_to_language(self): self.assertEqual(to_language('en_US'), 'en-us') self.assertEqual(to_language('sr_Lat'), 'sr-lat') def test_language_bidi(self): self.assertIs(get_language_bidi(), False) with translation.override(None): self.assertIs(get_language_bidi(), False) def test_language_bidi_null(self): self.assertIs(trans_null.get_language_bidi(), False) with override_settings(LANGUAGE_CODE='he'): self.assertIs(get_language_bidi(), True) class TranslationLoadingTests(SimpleTestCase): def setUp(self): """Clear translation state.""" self._old_language = get_language() self._old_translations = trans_real._translations deactivate() trans_real._translations = {} def tearDown(self): trans_real._translations = self._old_translations activate(self._old_language) @override_settings( USE_I18N=True, LANGUAGE_CODE='en', LANGUAGES=[ ('en', 'English'), ('en-ca', 'English (Canada)'), ('en-nz', 'English (New Zealand)'), ('en-au', 'English (Australia)'), ], LOCALE_PATHS=[os.path.join(here, 'loading')], INSTALLED_APPS=['i18n.loading_app'], ) def test_translation_loading(self): """ "loading_app" does not have translations for all languages provided by "loading". Catalogs are merged correctly. """ tests = [ ('en', 'local country person'), ('en_AU', 'aussie'), ('en_NZ', 'kiwi'), ('en_CA', 'canuck'), ] # Load all relevant translations. for language, _ in tests: activate(language) # Catalogs are merged correctly. for language, nickname in tests: with self.subTest(language=language): activate(language) self.assertEqual(gettext('local country person'), nickname) class TranslationThreadSafetyTests(SimpleTestCase): def setUp(self): self._old_language = get_language() self._translations = trans_real._translations # here we rely on .split() being called inside the _fetch() # in trans_real.translation() class sideeffect_str(str): def split(self, *args, **kwargs): res = str.split(self, *args, **kwargs) trans_real._translations['en-YY'] = None return res trans_real._translations = {sideeffect_str('en-XX'): None} def tearDown(self): trans_real._translations = self._translations activate(self._old_language) def test_bug14894_translation_activate_thread_safety(self): translation_count = len(trans_real._translations) # May raise RuntimeError if translation.activate() isn't thread-safe. translation.activate('pl') # make sure sideeffect_str actually added a new translation self.assertLess(translation_count, len(trans_real._translations)) @override_settings(USE_L10N=True) class FormattingTests(SimpleTestCase): def setUp(self): super().setUp() self.n = decimal.Decimal('66666.666') self.f = 99999.999 self.d = datetime.date(2009, 12, 31) self.dt = datetime.datetime(2009, 12, 31, 20, 50) self.t = datetime.time(10, 15, 48) self.long = 10000 self.ctxt = Context({ 'n': self.n, 't': self.t, 'd': self.d, 'dt': self.dt, 'f': self.f, 'l': self.long, }) def test_all_format_strings(self): all_locales = LANG_INFO.keys() some_date = datetime.date(2017, 10, 14) some_datetime = datetime.datetime(2017, 10, 14, 10, 23) for locale in all_locales: with self.subTest(locale=locale), translation.override(locale): self.assertIn('2017', date_format(some_date)) # Uses DATE_FORMAT by default self.assertIn('23', time_format(some_datetime)) # Uses TIME_FORMAT by default self.assertIn('2017', date_format(some_datetime, format=get_format('DATETIME_FORMAT'))) self.assertIn('2017', date_format(some_date, format=get_format('YEAR_MONTH_FORMAT'))) self.assertIn('14', date_format(some_date, format=get_format('MONTH_DAY_FORMAT'))) self.assertIn('2017', date_format(some_date, format=get_format('SHORT_DATE_FORMAT'))) self.assertIn('2017', date_format(some_datetime, format=get_format('SHORT_DATETIME_FORMAT'))) def test_locale_independent(self): """ Localization of numbers """ with self.settings(USE_THOUSAND_SEPARATOR=False): self.assertEqual('66666.66', nformat(self.n, decimal_sep='.', decimal_pos=2, grouping=3, thousand_sep=',')) self.assertEqual('66666A6', nformat(self.n, decimal_sep='A', decimal_pos=1, grouping=1, thousand_sep='B')) self.assertEqual('66666', nformat(self.n, decimal_sep='X', decimal_pos=0, grouping=1, thousand_sep='Y')) with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual( '66,666.66', nformat(self.n, decimal_sep='.', decimal_pos=2, grouping=3, thousand_sep=',') ) self.assertEqual( '6B6B6B6B6A6', nformat(self.n, decimal_sep='A', decimal_pos=1, grouping=1, thousand_sep='B') ) self.assertEqual('-66666.6', nformat(-66666.666, decimal_sep='.', decimal_pos=1)) self.assertEqual('-66666.0', nformat(int('-66666'), decimal_sep='.', decimal_pos=1)) self.assertEqual('10000.0', nformat(self.long, decimal_sep='.', decimal_pos=1)) self.assertEqual( '10,00,00,000.00', nformat(100000000.00, decimal_sep='.', decimal_pos=2, grouping=(3, 2, 0), thousand_sep=',') ) self.assertEqual( '1,0,00,000,0000.00', nformat(10000000000.00, decimal_sep='.', decimal_pos=2, grouping=(4, 3, 2, 1, 0), thousand_sep=',') ) self.assertEqual( '10000,00,000.00', nformat(1000000000.00, decimal_sep='.', decimal_pos=2, grouping=(3, 2, -1), thousand_sep=',') ) # This unusual grouping/force_grouping combination may be triggered by the intcomma filter (#17414) self.assertEqual( '10000', nformat(self.long, decimal_sep='.', decimal_pos=0, grouping=0, force_grouping=True) ) # date filter self.assertEqual('31.12.2009 в 20:50', Template('{{ dt|date:"d.m.Y в H:i" }}').render(self.ctxt)) self.assertEqual('⌚ 10:15', Template('{{ t|time:"⌚ H:i" }}').render(self.ctxt)) @override_settings(USE_L10N=False) def test_l10n_disabled(self): """ Catalan locale with format i18n disabled translations will be used, but not formats """ with translation.override('ca', deactivate=True): self.maxDiff = 3000 self.assertEqual('N j, Y', get_format('DATE_FORMAT')) self.assertEqual(0, get_format('FIRST_DAY_OF_WEEK')) self.assertEqual('.', get_format('DECIMAL_SEPARATOR')) self.assertEqual('10:15 a.m.', time_format(self.t)) self.assertEqual('Des. 31, 2009', date_format(self.d)) self.assertEqual('desembre 2009', date_format(self.d, 'YEAR_MONTH_FORMAT')) self.assertEqual('12/31/2009 8:50 p.m.', date_format(self.dt, 'SHORT_DATETIME_FORMAT')) self.assertEqual('No localizable', localize('No localizable')) self.assertEqual('66666.666', localize(self.n)) self.assertEqual('99999.999', localize(self.f)) self.assertEqual('10000', localize(self.long)) self.assertEqual('Des. 31, 2009', localize(self.d)) self.assertEqual('Des. 31, 2009, 8:50 p.m.', localize(self.dt)) self.assertEqual('66666.666', Template('{{ n }}').render(self.ctxt)) self.assertEqual('99999.999', Template('{{ f }}').render(self.ctxt)) self.assertEqual('Des. 31, 2009', Template('{{ d }}').render(self.ctxt)) self.assertEqual('Des. 31, 2009, 8:50 p.m.', Template('{{ dt }}').render(self.ctxt)) self.assertEqual('66666.67', Template('{{ n|floatformat:2 }}').render(self.ctxt)) self.assertEqual('100000.0', Template('{{ f|floatformat }}').render(self.ctxt)) self.assertEqual( '66666.67', Template('{{ n|floatformat:"2g" }}').render(self.ctxt), ) self.assertEqual( '100000.0', Template('{{ f|floatformat:"g" }}').render(self.ctxt), ) self.assertEqual('10:15 a.m.', Template('{{ t|time:"TIME_FORMAT" }}').render(self.ctxt)) self.assertEqual('12/31/2009', Template('{{ d|date:"SHORT_DATE_FORMAT" }}').render(self.ctxt)) self.assertEqual( '12/31/2009 8:50 p.m.', Template('{{ dt|date:"SHORT_DATETIME_FORMAT" }}').render(self.ctxt) ) form = I18nForm({ 'decimal_field': '66666,666', 'float_field': '99999,999', 'date_field': '31/12/2009', 'datetime_field': '31/12/2009 20:50', 'time_field': '20:50', 'integer_field': '1.234', }) self.assertFalse(form.is_valid()) self.assertEqual(['Introdu\xefu un n\xfamero.'], form.errors['float_field']) self.assertEqual(['Introdu\xefu un n\xfamero.'], form.errors['decimal_field']) self.assertEqual(['Introdu\xefu una data v\xe0lida.'], form.errors['date_field']) self.assertEqual(['Introdu\xefu una data/hora v\xe0lides.'], form.errors['datetime_field']) self.assertEqual(['Introdu\xefu un n\xfamero enter.'], form.errors['integer_field']) form2 = SelectDateForm({ 'date_field_month': '12', 'date_field_day': '31', 'date_field_year': '2009' }) self.assertTrue(form2.is_valid()) self.assertEqual(datetime.date(2009, 12, 31), form2.cleaned_data['date_field']) self.assertHTMLEqual( '<select name="mydate_month" id="id_mydate_month">' '<option value="">---</option>' '<option value="1">gener</option>' '<option value="2">febrer</option>' '<option value="3">mar\xe7</option>' '<option value="4">abril</option>' '<option value="5">maig</option>' '<option value="6">juny</option>' '<option value="7">juliol</option>' '<option value="8">agost</option>' '<option value="9">setembre</option>' '<option value="10">octubre</option>' '<option value="11">novembre</option>' '<option value="12" selected>desembre</option>' '</select>' '<select name="mydate_day" id="id_mydate_day">' '<option value="">---</option>' '<option value="1">1</option>' '<option value="2">2</option>' '<option value="3">3</option>' '<option value="4">4</option>' '<option value="5">5</option>' '<option value="6">6</option>' '<option value="7">7</option>' '<option value="8">8</option>' '<option value="9">9</option>' '<option value="10">10</option>' '<option value="11">11</option>' '<option value="12">12</option>' '<option value="13">13</option>' '<option value="14">14</option>' '<option value="15">15</option>' '<option value="16">16</option>' '<option value="17">17</option>' '<option value="18">18</option>' '<option value="19">19</option>' '<option value="20">20</option>' '<option value="21">21</option>' '<option value="22">22</option>' '<option value="23">23</option>' '<option value="24">24</option>' '<option value="25">25</option>' '<option value="26">26</option>' '<option value="27">27</option>' '<option value="28">28</option>' '<option value="29">29</option>' '<option value="30">30</option>' '<option value="31" selected>31</option>' '</select>' '<select name="mydate_year" id="id_mydate_year">' '<option value="">---</option>' '<option value="2009" selected>2009</option>' '<option value="2010">2010</option>' '<option value="2011">2011</option>' '<option value="2012">2012</option>' '<option value="2013">2013</option>' '<option value="2014">2014</option>' '<option value="2015">2015</option>' '<option value="2016">2016</option>' '<option value="2017">2017</option>' '<option value="2018">2018</option>' '</select>', forms.SelectDateWidget(years=range(2009, 2019)).render('mydate', datetime.date(2009, 12, 31)) ) # We shouldn't change the behavior of the floatformat filter re: # thousand separator and grouping when USE_L10N is False even # if the USE_THOUSAND_SEPARATOR, NUMBER_GROUPING and # THOUSAND_SEPARATOR settings are specified with self.settings(USE_THOUSAND_SEPARATOR=True, NUMBER_GROUPING=1, THOUSAND_SEPARATOR='!'): self.assertEqual('66666.67', Template('{{ n|floatformat:2 }}').render(self.ctxt)) self.assertEqual('100000.0', Template('{{ f|floatformat }}').render(self.ctxt)) def test_false_like_locale_formats(self): """ The active locale's formats take precedence over the default settings even if they would be interpreted as False in a conditional test (e.g. 0 or empty string) (#16938). """ with translation.override('fr'): with self.settings(USE_THOUSAND_SEPARATOR=True, THOUSAND_SEPARATOR='!'): self.assertEqual('\xa0', get_format('THOUSAND_SEPARATOR')) # Even a second time (after the format has been cached)... self.assertEqual('\xa0', get_format('THOUSAND_SEPARATOR')) with self.settings(FIRST_DAY_OF_WEEK=0): self.assertEqual(1, get_format('FIRST_DAY_OF_WEEK')) # Even a second time (after the format has been cached)... self.assertEqual(1, get_format('FIRST_DAY_OF_WEEK')) def test_l10n_enabled(self): self.maxDiff = 3000 # Catalan locale with translation.override('ca', deactivate=True): self.assertEqual(r'j \d\e F \d\e Y', get_format('DATE_FORMAT')) self.assertEqual(1, get_format('FIRST_DAY_OF_WEEK')) self.assertEqual(',', get_format('DECIMAL_SEPARATOR')) self.assertEqual('10:15', time_format(self.t)) self.assertEqual('31 de desembre de 2009', date_format(self.d)) self.assertEqual('desembre del 2009', date_format(self.d, 'YEAR_MONTH_FORMAT')) self.assertEqual('31/12/2009 20:50', date_format(self.dt, 'SHORT_DATETIME_FORMAT')) self.assertEqual('No localizable', localize('No localizable')) with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual('66.666,666', localize(self.n)) self.assertEqual('99.999,999', localize(self.f)) self.assertEqual('10.000', localize(self.long)) self.assertEqual('True', localize(True)) with self.settings(USE_THOUSAND_SEPARATOR=False): self.assertEqual('66666,666', localize(self.n)) self.assertEqual('99999,999', localize(self.f)) self.assertEqual('10000', localize(self.long)) self.assertEqual('31 de desembre de 2009', localize(self.d)) self.assertEqual('31 de desembre de 2009 a les 20:50', localize(self.dt)) with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual('66.666,666', Template('{{ n }}').render(self.ctxt)) self.assertEqual('99.999,999', Template('{{ f }}').render(self.ctxt)) self.assertEqual('10.000', Template('{{ l }}').render(self.ctxt)) with self.settings(USE_THOUSAND_SEPARATOR=True): form3 = I18nForm({ 'decimal_field': '66.666,666', 'float_field': '99.999,999', 'date_field': '31/12/2009', 'datetime_field': '31/12/2009 20:50', 'time_field': '20:50', 'integer_field': '1.234', }) self.assertTrue(form3.is_valid()) self.assertEqual(decimal.Decimal('66666.666'), form3.cleaned_data['decimal_field']) self.assertEqual(99999.999, form3.cleaned_data['float_field']) self.assertEqual(datetime.date(2009, 12, 31), form3.cleaned_data['date_field']) self.assertEqual(datetime.datetime(2009, 12, 31, 20, 50), form3.cleaned_data['datetime_field']) self.assertEqual(datetime.time(20, 50), form3.cleaned_data['time_field']) self.assertEqual(1234, form3.cleaned_data['integer_field']) with self.settings(USE_THOUSAND_SEPARATOR=False): self.assertEqual('66666,666', Template('{{ n }}').render(self.ctxt)) self.assertEqual('99999,999', Template('{{ f }}').render(self.ctxt)) self.assertEqual('31 de desembre de 2009', Template('{{ d }}').render(self.ctxt)) self.assertEqual('31 de desembre de 2009 a les 20:50', Template('{{ dt }}').render(self.ctxt)) self.assertEqual('66666,67', Template('{{ n|floatformat:2 }}').render(self.ctxt)) self.assertEqual('100000,0', Template('{{ f|floatformat }}').render(self.ctxt)) self.assertEqual( '66.666,67', Template('{{ n|floatformat:"2g" }}').render(self.ctxt), ) self.assertEqual( '100.000,0', Template('{{ f|floatformat:"g" }}').render(self.ctxt), ) self.assertEqual('10:15', Template('{{ t|time:"TIME_FORMAT" }}').render(self.ctxt)) self.assertEqual('31/12/2009', Template('{{ d|date:"SHORT_DATE_FORMAT" }}').render(self.ctxt)) self.assertEqual( '31/12/2009 20:50', Template('{{ dt|date:"SHORT_DATETIME_FORMAT" }}').render(self.ctxt) ) self.assertEqual(date_format(datetime.datetime.now(), "DATE_FORMAT"), Template('{% now "DATE_FORMAT" %}').render(self.ctxt)) with self.settings(USE_THOUSAND_SEPARATOR=False): form4 = I18nForm({ 'decimal_field': '66666,666', 'float_field': '99999,999', 'date_field': '31/12/2009', 'datetime_field': '31/12/2009 20:50', 'time_field': '20:50', 'integer_field': '1234', }) self.assertTrue(form4.is_valid()) self.assertEqual(decimal.Decimal('66666.666'), form4.cleaned_data['decimal_field']) self.assertEqual(99999.999, form4.cleaned_data['float_field']) self.assertEqual(datetime.date(2009, 12, 31), form4.cleaned_data['date_field']) self.assertEqual(datetime.datetime(2009, 12, 31, 20, 50), form4.cleaned_data['datetime_field']) self.assertEqual(datetime.time(20, 50), form4.cleaned_data['time_field']) self.assertEqual(1234, form4.cleaned_data['integer_field']) form5 = SelectDateForm({ 'date_field_month': '12', 'date_field_day': '31', 'date_field_year': '2009' }) self.assertTrue(form5.is_valid()) self.assertEqual(datetime.date(2009, 12, 31), form5.cleaned_data['date_field']) self.assertHTMLEqual( '<select name="mydate_day" id="id_mydate_day">' '<option value="">---</option>' '<option value="1">1</option>' '<option value="2">2</option>' '<option value="3">3</option>' '<option value="4">4</option>' '<option value="5">5</option>' '<option value="6">6</option>' '<option value="7">7</option>' '<option value="8">8</option>' '<option value="9">9</option>' '<option value="10">10</option>' '<option value="11">11</option>' '<option value="12">12</option>' '<option value="13">13</option>' '<option value="14">14</option>' '<option value="15">15</option>' '<option value="16">16</option>' '<option value="17">17</option>' '<option value="18">18</option>' '<option value="19">19</option>' '<option value="20">20</option>' '<option value="21">21</option>' '<option value="22">22</option>' '<option value="23">23</option>' '<option value="24">24</option>' '<option value="25">25</option>' '<option value="26">26</option>' '<option value="27">27</option>' '<option value="28">28</option>' '<option value="29">29</option>' '<option value="30">30</option>' '<option value="31" selected>31</option>' '</select>' '<select name="mydate_month" id="id_mydate_month">' '<option value="">---</option>' '<option value="1">gener</option>' '<option value="2">febrer</option>' '<option value="3">mar\xe7</option>' '<option value="4">abril</option>' '<option value="5">maig</option>' '<option value="6">juny</option>' '<option value="7">juliol</option>' '<option value="8">agost</option>' '<option value="9">setembre</option>' '<option value="10">octubre</option>' '<option value="11">novembre</option>' '<option value="12" selected>desembre</option>' '</select>' '<select name="mydate_year" id="id_mydate_year">' '<option value="">---</option>' '<option value="2009" selected>2009</option>' '<option value="2010">2010</option>' '<option value="2011">2011</option>' '<option value="2012">2012</option>' '<option value="2013">2013</option>' '<option value="2014">2014</option>' '<option value="2015">2015</option>' '<option value="2016">2016</option>' '<option value="2017">2017</option>' '<option value="2018">2018</option>' '</select>', forms.SelectDateWidget(years=range(2009, 2019)).render('mydate', datetime.date(2009, 12, 31)) ) # Russian locale (with E as month) with translation.override('ru', deactivate=True): self.assertHTMLEqual( '<select name="mydate_day" id="id_mydate_day">' '<option value="">---</option>' '<option value="1">1</option>' '<option value="2">2</option>' '<option value="3">3</option>' '<option value="4">4</option>' '<option value="5">5</option>' '<option value="6">6</option>' '<option value="7">7</option>' '<option value="8">8</option>' '<option value="9">9</option>' '<option value="10">10</option>' '<option value="11">11</option>' '<option value="12">12</option>' '<option value="13">13</option>' '<option value="14">14</option>' '<option value="15">15</option>' '<option value="16">16</option>' '<option value="17">17</option>' '<option value="18">18</option>' '<option value="19">19</option>' '<option value="20">20</option>' '<option value="21">21</option>' '<option value="22">22</option>' '<option value="23">23</option>' '<option value="24">24</option>' '<option value="25">25</option>' '<option value="26">26</option>' '<option value="27">27</option>' '<option value="28">28</option>' '<option value="29">29</option>' '<option value="30">30</option>' '<option value="31" selected>31</option>' '</select>' '<select name="mydate_month" id="id_mydate_month">' '<option value="">---</option>' '<option value="1">\u042f\u043d\u0432\u0430\u0440\u044c</option>' '<option value="2">\u0424\u0435\u0432\u0440\u0430\u043b\u044c</option>' '<option value="3">\u041c\u0430\u0440\u0442</option>' '<option value="4">\u0410\u043f\u0440\u0435\u043b\u044c</option>' '<option value="5">\u041c\u0430\u0439</option>' '<option value="6">\u0418\u044e\u043d\u044c</option>' '<option value="7">\u0418\u044e\u043b\u044c</option>' '<option value="8">\u0410\u0432\u0433\u0443\u0441\u0442</option>' '<option value="9">\u0421\u0435\u043d\u0442\u044f\u0431\u0440\u044c</option>' '<option value="10">\u041e\u043a\u0442\u044f\u0431\u0440\u044c</option>' '<option value="11">\u041d\u043e\u044f\u0431\u0440\u044c</option>' '<option value="12" selected>\u0414\u0435\u043a\u0430\u0431\u0440\u044c</option>' '</select>' '<select name="mydate_year" id="id_mydate_year">' '<option value="">---</option>' '<option value="2009" selected>2009</option>' '<option value="2010">2010</option>' '<option value="2011">2011</option>' '<option value="2012">2012</option>' '<option value="2013">2013</option>' '<option value="2014">2014</option>' '<option value="2015">2015</option>' '<option value="2016">2016</option>' '<option value="2017">2017</option>' '<option value="2018">2018</option>' '</select>', forms.SelectDateWidget(years=range(2009, 2019)).render('mydate', datetime.date(2009, 12, 31)) ) # English locale with translation.override('en', deactivate=True): self.assertEqual('N j, Y', get_format('DATE_FORMAT')) self.assertEqual(0, get_format('FIRST_DAY_OF_WEEK')) self.assertEqual('.', get_format('DECIMAL_SEPARATOR')) self.assertEqual('Dec. 31, 2009', date_format(self.d)) self.assertEqual('December 2009', date_format(self.d, 'YEAR_MONTH_FORMAT')) self.assertEqual('12/31/2009 8:50 p.m.', date_format(self.dt, 'SHORT_DATETIME_FORMAT')) self.assertEqual('No localizable', localize('No localizable')) with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual('66,666.666', localize(self.n)) self.assertEqual('99,999.999', localize(self.f)) self.assertEqual('10,000', localize(self.long)) with self.settings(USE_THOUSAND_SEPARATOR=False): self.assertEqual('66666.666', localize(self.n)) self.assertEqual('99999.999', localize(self.f)) self.assertEqual('10000', localize(self.long)) self.assertEqual('Dec. 31, 2009', localize(self.d)) self.assertEqual('Dec. 31, 2009, 8:50 p.m.', localize(self.dt)) with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual('66,666.666', Template('{{ n }}').render(self.ctxt)) self.assertEqual('99,999.999', Template('{{ f }}').render(self.ctxt)) self.assertEqual('10,000', Template('{{ l }}').render(self.ctxt)) with self.settings(USE_THOUSAND_SEPARATOR=False): self.assertEqual('66666.666', Template('{{ n }}').render(self.ctxt)) self.assertEqual('99999.999', Template('{{ f }}').render(self.ctxt)) self.assertEqual('Dec. 31, 2009', Template('{{ d }}').render(self.ctxt)) self.assertEqual('Dec. 31, 2009, 8:50 p.m.', Template('{{ dt }}').render(self.ctxt)) self.assertEqual('66666.67', Template('{{ n|floatformat:2 }}').render(self.ctxt)) self.assertEqual('100000.0', Template('{{ f|floatformat }}').render(self.ctxt)) self.assertEqual( '66,666.67', Template('{{ n|floatformat:"2g" }}').render(self.ctxt), ) self.assertEqual( '100,000.0', Template('{{ f|floatformat:"g" }}').render(self.ctxt), ) self.assertEqual('12/31/2009', Template('{{ d|date:"SHORT_DATE_FORMAT" }}').render(self.ctxt)) self.assertEqual( '12/31/2009 8:50 p.m.', Template('{{ dt|date:"SHORT_DATETIME_FORMAT" }}').render(self.ctxt) ) form5 = I18nForm({ 'decimal_field': '66666.666', 'float_field': '99999.999', 'date_field': '12/31/2009', 'datetime_field': '12/31/2009 20:50', 'time_field': '20:50', 'integer_field': '1234', }) self.assertTrue(form5.is_valid()) self.assertEqual(decimal.Decimal('66666.666'), form5.cleaned_data['decimal_field']) self.assertEqual(99999.999, form5.cleaned_data['float_field']) self.assertEqual(datetime.date(2009, 12, 31), form5.cleaned_data['date_field']) self.assertEqual(datetime.datetime(2009, 12, 31, 20, 50), form5.cleaned_data['datetime_field']) self.assertEqual(datetime.time(20, 50), form5.cleaned_data['time_field']) self.assertEqual(1234, form5.cleaned_data['integer_field']) form6 = SelectDateForm({ 'date_field_month': '12', 'date_field_day': '31', 'date_field_year': '2009' }) self.assertTrue(form6.is_valid()) self.assertEqual(datetime.date(2009, 12, 31), form6.cleaned_data['date_field']) self.assertHTMLEqual( '<select name="mydate_month" id="id_mydate_month">' '<option value="">---</option>' '<option value="1">January</option>' '<option value="2">February</option>' '<option value="3">March</option>' '<option value="4">April</option>' '<option value="5">May</option>' '<option value="6">June</option>' '<option value="7">July</option>' '<option value="8">August</option>' '<option value="9">September</option>' '<option value="10">October</option>' '<option value="11">November</option>' '<option value="12" selected>December</option>' '</select>' '<select name="mydate_day" id="id_mydate_day">' '<option value="">---</option>' '<option value="1">1</option>' '<option value="2">2</option>' '<option value="3">3</option>' '<option value="4">4</option>' '<option value="5">5</option>' '<option value="6">6</option>' '<option value="7">7</option>' '<option value="8">8</option>' '<option value="9">9</option>' '<option value="10">10</option>' '<option value="11">11</option>' '<option value="12">12</option>' '<option value="13">13</option>' '<option value="14">14</option>' '<option value="15">15</option>' '<option value="16">16</option>' '<option value="17">17</option>' '<option value="18">18</option>' '<option value="19">19</option>' '<option value="20">20</option>' '<option value="21">21</option>' '<option value="22">22</option>' '<option value="23">23</option>' '<option value="24">24</option>' '<option value="25">25</option>' '<option value="26">26</option>' '<option value="27">27</option>' '<option value="28">28</option>' '<option value="29">29</option>' '<option value="30">30</option>' '<option value="31" selected>31</option>' '</select>' '<select name="mydate_year" id="id_mydate_year">' '<option value="">---</option>' '<option value="2009" selected>2009</option>' '<option value="2010">2010</option>' '<option value="2011">2011</option>' '<option value="2012">2012</option>' '<option value="2013">2013</option>' '<option value="2014">2014</option>' '<option value="2015">2015</option>' '<option value="2016">2016</option>' '<option value="2017">2017</option>' '<option value="2018">2018</option>' '</select>', forms.SelectDateWidget(years=range(2009, 2019)).render('mydate', datetime.date(2009, 12, 31)) ) def test_sub_locales(self): """ Check if sublocales fall back to the main locale """ with self.settings(USE_THOUSAND_SEPARATOR=True): with translation.override('de-at', deactivate=True): self.assertEqual('66.666,666', Template('{{ n }}').render(self.ctxt)) with translation.override('es-us', deactivate=True): self.assertEqual('31 de Diciembre de 2009', date_format(self.d)) def test_localized_input(self): """ Tests if form input is correctly localized """ self.maxDiff = 1200 with translation.override('de-at', deactivate=True): form6 = CompanyForm({ 'name': 'acme', 'date_added': datetime.datetime(2009, 12, 31, 6, 0, 0), 'cents_paid': decimal.Decimal('59.47'), 'products_delivered': 12000, }) self.assertTrue(form6.is_valid()) self.assertHTMLEqual( form6.as_ul(), '<li><label for="id_name">Name:</label>' '<input id="id_name" type="text" name="name" value="acme" maxlength="50" required></li>' '<li><label for="id_date_added">Date added:</label>' '<input type="text" name="date_added" value="31.12.2009 06:00:00" id="id_date_added" required></li>' '<li><label for="id_cents_paid">Cents paid:</label>' '<input type="text" name="cents_paid" value="59,47" id="id_cents_paid" required></li>' '<li><label for="id_products_delivered">Products delivered:</label>' '<input type="text" name="products_delivered" value="12000" id="id_products_delivered" required>' '</li>' ) self.assertEqual(localize_input(datetime.datetime(2009, 12, 31, 6, 0, 0)), '31.12.2009 06:00:00') self.assertEqual(datetime.datetime(2009, 12, 31, 6, 0, 0), form6.cleaned_data['date_added']) with self.settings(USE_THOUSAND_SEPARATOR=True): # Checking for the localized "products_delivered" field self.assertInHTML( '<input type="text" name="products_delivered" ' 'value="12.000" id="id_products_delivered" required>', form6.as_ul() ) def test_localized_input_func(self): tests = ( (True, 'True'), (datetime.date(1, 1, 1), '0001-01-01'), (datetime.datetime(1, 1, 1), '0001-01-01 00:00:00'), ) with self.settings(USE_THOUSAND_SEPARATOR=True): for value, expected in tests: with self.subTest(value=value): self.assertEqual(localize_input(value), expected) def test_sanitize_separators(self): """ Tests django.utils.formats.sanitize_separators. """ # Non-strings are untouched self.assertEqual(sanitize_separators(123), 123) with translation.override('ru', deactivate=True): # Russian locale has non-breaking space (\xa0) as thousand separator # Usual space is accepted too when sanitizing inputs with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual(sanitize_separators('1\xa0234\xa0567'), '1234567') self.assertEqual(sanitize_separators('77\xa0777,777'), '77777.777') self.assertEqual(sanitize_separators('12 345'), '12345') self.assertEqual(sanitize_separators('77 777,777'), '77777.777') with self.settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=False): self.assertEqual(sanitize_separators('12\xa0345'), '12\xa0345') with self.settings(USE_THOUSAND_SEPARATOR=True): with patch_formats(get_language(), THOUSAND_SEPARATOR='.', DECIMAL_SEPARATOR=','): self.assertEqual(sanitize_separators('10.234'), '10234') # Suspicion that user entered dot as decimal separator (#22171) self.assertEqual(sanitize_separators('10.10'), '10.10') with self.settings(USE_L10N=False, DECIMAL_SEPARATOR=','): self.assertEqual(sanitize_separators('1001,10'), '1001.10') self.assertEqual(sanitize_separators('1001.10'), '1001.10') with self.settings( USE_L10N=False, DECIMAL_SEPARATOR=',', USE_THOUSAND_SEPARATOR=True, THOUSAND_SEPARATOR='.' ): self.assertEqual(sanitize_separators('1.001,10'), '1001.10') self.assertEqual(sanitize_separators('1001,10'), '1001.10') self.assertEqual(sanitize_separators('1001.10'), '1001.10') self.assertEqual(sanitize_separators('1,001.10'), '1.001.10') # Invalid output def test_iter_format_modules(self): """ Tests the iter_format_modules function. """ # Importing some format modules so that we can compare the returned # modules with these expected modules default_mod = import_module('django.conf.locale.de.formats') test_mod = import_module('i18n.other.locale.de.formats') test_mod2 = import_module('i18n.other2.locale.de.formats') with translation.override('de-at', deactivate=True): # Should return the correct default module when no setting is set self.assertEqual(list(iter_format_modules('de')), [default_mod]) # When the setting is a string, should return the given module and # the default module self.assertEqual( list(iter_format_modules('de', 'i18n.other.locale')), [test_mod, default_mod]) # When setting is a list of strings, should return the given # modules and the default module self.assertEqual( list(iter_format_modules('de', ['i18n.other.locale', 'i18n.other2.locale'])), [test_mod, test_mod2, default_mod]) def test_iter_format_modules_stability(self): """ Tests the iter_format_modules function always yields format modules in a stable and correct order in presence of both base ll and ll_CC formats. """ en_format_mod = import_module('django.conf.locale.en.formats') en_gb_format_mod = import_module('django.conf.locale.en_GB.formats') self.assertEqual(list(iter_format_modules('en-gb')), [en_gb_format_mod, en_format_mod]) def test_get_format_modules_lang(self): with translation.override('de', deactivate=True): self.assertEqual('.', get_format('DECIMAL_SEPARATOR', lang='en')) def test_get_format_modules_stability(self): with self.settings(FORMAT_MODULE_PATH='i18n.other.locale'): with translation.override('de', deactivate=True): old = "%r" % get_format_modules(reverse=True) new = "%r" % get_format_modules(reverse=True) # second try self.assertEqual(new, old, 'Value returned by get_formats_modules() must be preserved between calls.') def test_localize_templatetag_and_filter(self): """ Test the {% localize %} templatetag and the localize/unlocalize filters. """ context = Context({'int': 1455, 'float': 3.14, 'date': datetime.date(2016, 12, 31)}) template1 = Template( '{% load l10n %}{% localize %}{{ int }}/{{ float }}/{{ date }}{% endlocalize %}; ' '{% localize on %}{{ int }}/{{ float }}/{{ date }}{% endlocalize %}' ) template2 = Template( '{% load l10n %}{{ int }}/{{ float }}/{{ date }}; ' '{% localize off %}{{ int }}/{{ float }}/{{ date }};{% endlocalize %} ' '{{ int }}/{{ float }}/{{ date }}' ) template3 = Template( '{% load l10n %}{{ int }}/{{ float }}/{{ date }}; ' '{{ int|unlocalize }}/{{ float|unlocalize }}/{{ date|unlocalize }}' ) template4 = Template( '{% load l10n %}{{ int }}/{{ float }}/{{ date }}; ' '{{ int|localize }}/{{ float|localize }}/{{ date|localize }}' ) expected_localized = '1.455/3,14/31. Dezember 2016' expected_unlocalized = '1455/3.14/Dez. 31, 2016' output1 = '; '.join([expected_localized, expected_localized]) output2 = '; '.join([expected_localized, expected_unlocalized, expected_localized]) output3 = '; '.join([expected_localized, expected_unlocalized]) output4 = '; '.join([expected_unlocalized, expected_localized]) with translation.override('de', deactivate=True): with self.settings(USE_L10N=False, USE_THOUSAND_SEPARATOR=True): self.assertEqual(template1.render(context), output1) self.assertEqual(template4.render(context), output4) with self.settings(USE_L10N=True, USE_THOUSAND_SEPARATOR=True): self.assertEqual(template1.render(context), output1) self.assertEqual(template2.render(context), output2) self.assertEqual(template3.render(context), output3) def test_localized_off_numbers(self): """A string representation is returned for unlocalized numbers.""" template = Template( '{% load l10n %}{% localize off %}' '{{ int }}/{{ float }}/{{ decimal }}{% endlocalize %}' ) context = Context( {'int': 1455, 'float': 3.14, 'decimal': decimal.Decimal('24.1567')} ) for use_l10n in [True, False]: with self.subTest(use_l10n=use_l10n), self.settings( USE_L10N=use_l10n, DECIMAL_SEPARATOR=',', USE_THOUSAND_SEPARATOR=True, THOUSAND_SEPARATOR='°', NUMBER_GROUPING=2, ): self.assertEqual(template.render(context), '1455/3.14/24.1567') def test_localized_as_text_as_hidden_input(self): """ Tests if form input with 'as_hidden' or 'as_text' is correctly localized. Ticket #18777 """ self.maxDiff = 1200 with translation.override('de-at', deactivate=True): template = Template('{% load l10n %}{{ form.date_added }}; {{ form.cents_paid }}') template_as_text = Template('{% load l10n %}{{ form.date_added.as_text }}; {{ form.cents_paid.as_text }}') template_as_hidden = Template( '{% load l10n %}{{ form.date_added.as_hidden }}; {{ form.cents_paid.as_hidden }}' ) form = CompanyForm({ 'name': 'acme', 'date_added': datetime.datetime(2009, 12, 31, 6, 0, 0), 'cents_paid': decimal.Decimal('59.47'), 'products_delivered': 12000, }) context = Context({'form': form}) self.assertTrue(form.is_valid()) self.assertHTMLEqual( template.render(context), '<input id="id_date_added" name="date_added" type="text" value="31.12.2009 06:00:00" required>;' '<input id="id_cents_paid" name="cents_paid" type="text" value="59,47" required>' ) self.assertHTMLEqual( template_as_text.render(context), '<input id="id_date_added" name="date_added" type="text" value="31.12.2009 06:00:00" required>;' ' <input id="id_cents_paid" name="cents_paid" type="text" value="59,47" required>' ) self.assertHTMLEqual( template_as_hidden.render(context), '<input id="id_date_added" name="date_added" type="hidden" value="31.12.2009 06:00:00">;' '<input id="id_cents_paid" name="cents_paid" type="hidden" value="59,47">' ) def test_format_arbitrary_settings(self): self.assertEqual(get_format('DEBUG'), 'DEBUG') def test_get_custom_format(self): reset_format_cache() with self.settings(FORMAT_MODULE_PATH='i18n.other.locale'): with translation.override('fr', deactivate=True): self.assertEqual('d/m/Y CUSTOM', get_format('CUSTOM_DAY_FORMAT')) def test_admin_javascript_supported_input_formats(self): """ The first input format for DATE_INPUT_FORMATS, TIME_INPUT_FORMATS, and DATETIME_INPUT_FORMATS must not contain %f since that's unsupported by the admin's time picker widget. """ regex = re.compile('%([^BcdHImMpSwxXyY%])') for language_code, language_name in settings.LANGUAGES: for format_name in ('DATE_INPUT_FORMATS', 'TIME_INPUT_FORMATS', 'DATETIME_INPUT_FORMATS'): with self.subTest(language=language_code, format=format_name): formatter = get_format(format_name, lang=language_code)[0] self.assertEqual( regex.findall(formatter), [], "%s locale's %s uses an unsupported format code." % (language_code, format_name) ) class MiscTests(SimpleTestCase): rf = RequestFactory() @override_settings(LANGUAGE_CODE='de') def test_english_fallback(self): """ With a non-English LANGUAGE_CODE and if the active language is English or one of its variants, the untranslated string should be returned (instead of falling back to LANGUAGE_CODE) (See #24413). """ self.assertEqual(gettext("Image"), "Bild") with translation.override('en'): self.assertEqual(gettext("Image"), "Image") with translation.override('en-us'): self.assertEqual(gettext("Image"), "Image") with translation.override('en-ca'): self.assertEqual(gettext("Image"), "Image") def test_parse_spec_http_header(self): """ Testing HTTP header parsing. First, we test that we can parse the values according to the spec (and that we extract all the pieces in the right order). """ tests = [ # Good headers ('de', [('de', 1.0)]), ('en-AU', [('en-au', 1.0)]), ('es-419', [('es-419', 1.0)]), ('*;q=1.00', [('*', 1.0)]), ('en-AU;q=0.123', [('en-au', 0.123)]), ('en-au;q=0.5', [('en-au', 0.5)]), ('en-au;q=1.0', [('en-au', 1.0)]), ('da, en-gb;q=0.25, en;q=0.5', [('da', 1.0), ('en', 0.5), ('en-gb', 0.25)]), ('en-au-xx', [('en-au-xx', 1.0)]), ('de,en-au;q=0.75,en-us;q=0.5,en;q=0.25,es;q=0.125,fa;q=0.125', [('de', 1.0), ('en-au', 0.75), ('en-us', 0.5), ('en', 0.25), ('es', 0.125), ('fa', 0.125)]), ('*', [('*', 1.0)]), ('de;q=0.', [('de', 0.0)]), ('en; q=1,', [('en', 1.0)]), ('en; q=1.0, * ; q=0.5', [('en', 1.0), ('*', 0.5)]), # Bad headers ('en-gb;q=1.0000', []), ('en;q=0.1234', []), ('en;q=.2', []), ('abcdefghi-au', []), ('**', []), ('en,,gb', []), ('en-au;q=0.1.0', []), (('X' * 97) + 'Z,en', []), ('da, en-gb;q=0.8, en;q=0.7,#', []), ('de;q=2.0', []), ('de;q=0.a', []), ('12-345', []), ('', []), ('en;q=1e0', []), ] for value, expected in tests: with self.subTest(value=value): self.assertEqual(trans_real.parse_accept_lang_header(value), tuple(expected)) def test_parse_literal_http_header(self): """ Now test that we parse a literal HTTP header correctly. """ g = get_language_from_request r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt-br'} self.assertEqual('pt-br', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt'} self.assertEqual('pt', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'es,de'} self.assertEqual('es', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'es-ar,de'} self.assertEqual('es-ar', g(r)) # This test assumes there won't be a Django translation to a US # variation of the Spanish language, a safe assumption. When the # user sets it as the preferred language, the main 'es' # translation should be selected instead. r.META = {'HTTP_ACCEPT_LANGUAGE': 'es-us'} self.assertEqual(g(r), 'es') # This tests the following scenario: there isn't a main language (zh) # translation of Django but there is a translation to variation (zh-hans) # the user sets zh-hans as the preferred language, it should be selected # by Django without falling back nor ignoring it. r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-hans,de'} self.assertEqual(g(r), 'zh-hans') r.META = {'HTTP_ACCEPT_LANGUAGE': 'NL'} self.assertEqual('nl', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'fy'} self.assertEqual('fy', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'ia'} self.assertEqual('ia', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'sr-latn'} self.assertEqual('sr-latn', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-hans'} self.assertEqual('zh-hans', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-hant'} self.assertEqual('zh-hant', g(r)) @override_settings( LANGUAGES=[ ('en', 'English'), ('zh-hans', 'Simplified Chinese'), ('zh-hant', 'Traditional Chinese'), ] ) def test_support_for_deprecated_chinese_language_codes(self): """ Some browsers (Firefox, IE, etc.) use deprecated language codes. As these language codes will be removed in Django 1.9, these will be incorrectly matched. For example zh-tw (traditional) will be interpreted as zh-hans (simplified), which is wrong. So we should also accept these deprecated language codes. refs #18419 -- this is explicitly for browser compatibility """ g = get_language_from_request r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-cn,en'} self.assertEqual(g(r), 'zh-hans') r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-tw,en'} self.assertEqual(g(r), 'zh-hant') def test_special_fallback_language(self): """ Some languages may have special fallbacks that don't follow the simple 'fr-ca' -> 'fr' logic (notably Chinese codes). """ r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-my,en'} self.assertEqual(get_language_from_request(r), 'zh-hans') def test_parse_language_cookie(self): """ Now test that we parse language preferences stored in a cookie correctly. """ g = get_language_from_request r = self.rf.get('/') r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'pt-br'} r.META = {} self.assertEqual('pt-br', g(r)) r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'pt'} r.META = {} self.assertEqual('pt', g(r)) r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'es'} r.META = {'HTTP_ACCEPT_LANGUAGE': 'de'} self.assertEqual('es', g(r)) # This test assumes there won't be a Django translation to a US # variation of the Spanish language, a safe assumption. When the # user sets it as the preferred language, the main 'es' # translation should be selected instead. r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'es-us'} r.META = {} self.assertEqual(g(r), 'es') # This tests the following scenario: there isn't a main language (zh) # translation of Django but there is a translation to variation (zh-hans) # the user sets zh-hans as the preferred language, it should be selected # by Django without falling back nor ignoring it. r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'zh-hans'} r.META = {'HTTP_ACCEPT_LANGUAGE': 'de'} self.assertEqual(g(r), 'zh-hans') @override_settings( USE_I18N=True, LANGUAGES=[ ('en', 'English'), ('de', 'German'), ('de-at', 'Austrian German'), ('pt-br', 'Portuguese (Brazil)'), ], ) def test_get_supported_language_variant_real(self): g = trans_real.get_supported_language_variant self.assertEqual(g('en'), 'en') self.assertEqual(g('en-gb'), 'en') self.assertEqual(g('de'), 'de') self.assertEqual(g('de-at'), 'de-at') self.assertEqual(g('de-ch'), 'de') self.assertEqual(g('pt-br'), 'pt-br') self.assertEqual(g('pt'), 'pt-br') self.assertEqual(g('pt-pt'), 'pt-br') with self.assertRaises(LookupError): g('pt', strict=True) with self.assertRaises(LookupError): g('pt-pt', strict=True) with self.assertRaises(LookupError): g('xyz') with self.assertRaises(LookupError): g('xy-zz') def test_get_supported_language_variant_null(self): g = trans_null.get_supported_language_variant self.assertEqual(g(settings.LANGUAGE_CODE), settings.LANGUAGE_CODE) with self.assertRaises(LookupError): g('pt') with self.assertRaises(LookupError): g('de') with self.assertRaises(LookupError): g('de-at') with self.assertRaises(LookupError): g('de', strict=True) with self.assertRaises(LookupError): g('de-at', strict=True) with self.assertRaises(LookupError): g('xyz') @override_settings( LANGUAGES=[ ('en', 'English'), ('de', 'German'), ('de-at', 'Austrian German'), ('pl', 'Polish'), ], ) def test_get_language_from_path_real(self): g = trans_real.get_language_from_path self.assertEqual(g('/pl/'), 'pl') self.assertEqual(g('/pl'), 'pl') self.assertIsNone(g('/xyz/')) self.assertEqual(g('/en/'), 'en') self.assertEqual(g('/en-gb/'), 'en') self.assertEqual(g('/de/'), 'de') self.assertEqual(g('/de-at/'), 'de-at') self.assertEqual(g('/de-ch/'), 'de') self.assertIsNone(g('/de-simple-page/')) def test_get_language_from_path_null(self): g = trans_null.get_language_from_path self.assertIsNone(g('/pl/')) self.assertIsNone(g('/pl')) self.assertIsNone(g('/xyz/')) def test_cache_resetting(self): """ After setting LANGUAGE, the cache should be cleared and languages previously valid should not be used (#14170). """ g = get_language_from_request r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt-br'} self.assertEqual('pt-br', g(r)) with self.settings(LANGUAGES=[('en', 'English')]): self.assertNotEqual('pt-br', g(r)) def test_i18n_patterns_returns_list(self): with override_settings(USE_I18N=False): self.assertIsInstance(i18n_patterns([]), list) with override_settings(USE_I18N=True): self.assertIsInstance(i18n_patterns([]), list) class ResolutionOrderI18NTests(SimpleTestCase): def setUp(self): super().setUp() activate('de') def tearDown(self): deactivate() super().tearDown() def assertGettext(self, msgid, msgstr): result = gettext(msgid) self.assertIn( msgstr, result, "The string '%s' isn't in the translation of '%s'; the actual result is '%s'." % (msgstr, msgid, result) ) class AppResolutionOrderI18NTests(ResolutionOrderI18NTests): @override_settings(LANGUAGE_CODE='de') def test_app_translation(self): # Original translation. self.assertGettext('Date/time', 'Datum/Zeit') # Different translation. with self.modify_settings(INSTALLED_APPS={'append': 'i18n.resolution'}): # Force refreshing translations. activate('de') # Doesn't work because it's added later in the list. self.assertGettext('Date/time', 'Datum/Zeit') with self.modify_settings(INSTALLED_APPS={'remove': 'django.contrib.admin.apps.SimpleAdminConfig'}): # Force refreshing translations. activate('de') # Unless the original is removed from the list. self.assertGettext('Date/time', 'Datum/Zeit (APP)') @override_settings(LOCALE_PATHS=extended_locale_paths) class LocalePathsResolutionOrderI18NTests(ResolutionOrderI18NTests): def test_locale_paths_translation(self): self.assertGettext('Time', 'LOCALE_PATHS') def test_locale_paths_override_app_translation(self): with self.settings(INSTALLED_APPS=['i18n.resolution']): self.assertGettext('Time', 'LOCALE_PATHS') class DjangoFallbackResolutionOrderI18NTests(ResolutionOrderI18NTests): def test_django_fallback(self): self.assertEqual(gettext('Date/time'), 'Datum/Zeit') @override_settings(INSTALLED_APPS=['i18n.territorial_fallback']) class TranslationFallbackI18NTests(ResolutionOrderI18NTests): def test_sparse_territory_catalog(self): """ Untranslated strings for territorial language variants use the translations of the generic language. In this case, the de-de translation falls back to de. """ with translation.override('de-de'): self.assertGettext('Test 1 (en)', '(de-de)') self.assertGettext('Test 2 (en)', '(de)') class TestModels(TestCase): def test_lazy(self): tm = TestModel() tm.save() def test_safestr(self): c = Company(cents_paid=12, products_delivered=1) c.name = SafeString('Iñtërnâtiônàlizætiøn1') c.save() class TestLanguageInfo(SimpleTestCase): def test_localized_language_info(self): li = get_language_info('de') self.assertEqual(li['code'], 'de') self.assertEqual(li['name_local'], 'Deutsch') self.assertEqual(li['name'], 'German') self.assertIs(li['bidi'], False) def test_unknown_language_code(self): with self.assertRaisesMessage(KeyError, "Unknown language code xx"): get_language_info('xx') with translation.override('xx'): # A language with no translation catalogs should fallback to the # untranslated string. self.assertEqual(gettext("Title"), "Title") def test_unknown_only_country_code(self): li = get_language_info('de-xx') self.assertEqual(li['code'], 'de') self.assertEqual(li['name_local'], 'Deutsch') self.assertEqual(li['name'], 'German') self.assertIs(li['bidi'], False) def test_unknown_language_code_and_country_code(self): with self.assertRaisesMessage(KeyError, "Unknown language code xx-xx and xx"): get_language_info('xx-xx') def test_fallback_language_code(self): """ get_language_info return the first fallback language info if the lang_info struct does not contain the 'name' key. """ li = get_language_info('zh-my') self.assertEqual(li['code'], 'zh-hans') li = get_language_info('zh-hans') self.assertEqual(li['code'], 'zh-hans') @override_settings( USE_I18N=True, LANGUAGES=[ ('en', 'English'), ('fr', 'French'), ], MIDDLEWARE=[ 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', ], ROOT_URLCONF='i18n.urls', ) class LocaleMiddlewareTests(TestCase): def test_streaming_response(self): # Regression test for #5241 response = self.client.get('/fr/streaming/') self.assertContains(response, "Oui/Non") response = self.client.get('/en/streaming/') self.assertContains(response, "Yes/No") @override_settings( USE_I18N=True, LANGUAGES=[ ('en', 'English'), ('de', 'German'), ('fr', 'French'), ], MIDDLEWARE=[ 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', ], ROOT_URLCONF='i18n.urls_default_unprefixed', LANGUAGE_CODE='en', ) class UnprefixedDefaultLanguageTests(SimpleTestCase): def test_default_lang_without_prefix(self): """ With i18n_patterns(..., prefix_default_language=False), the default language (settings.LANGUAGE_CODE) should be accessible without a prefix. """ response = self.client.get('/simple/') self.assertEqual(response.content, b'Yes') def test_other_lang_with_prefix(self): response = self.client.get('/fr/simple/') self.assertEqual(response.content, b'Oui') def test_unprefixed_language_other_than_accept_language(self): response = self.client.get('/simple/', HTTP_ACCEPT_LANGUAGE='fr') self.assertEqual(response.content, b'Yes') def test_page_with_dash(self): # A page starting with /de* shouldn't match the 'de' language code. response = self.client.get('/de-simple-page/') self.assertEqual(response.content, b'Yes') def test_no_redirect_on_404(self): """ A request for a nonexistent URL shouldn't cause a redirect to /<default_language>/<request_url> when prefix_default_language=False and /<default_language>/<request_url> has a URL match (#27402). """ # A match for /group1/group2/ must exist for this to act as a # regression test. response = self.client.get('/group1/group2/') self.assertEqual(response.status_code, 200) response = self.client.get('/nonexistent/') self.assertEqual(response.status_code, 404) @override_settings( USE_I18N=True, LANGUAGES=[ ('bg', 'Bulgarian'), ('en-us', 'English'), ('pt-br', 'Portuguese (Brazil)'), ], MIDDLEWARE=[ 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', ], ROOT_URLCONF='i18n.urls' ) class CountrySpecificLanguageTests(SimpleTestCase): rf = RequestFactory() def test_check_for_language(self): self.assertTrue(check_for_language('en')) self.assertTrue(check_for_language('en-us')) self.assertTrue(check_for_language('en-US')) self.assertFalse(check_for_language('en_US')) self.assertTrue(check_for_language('be')) self.assertTrue(check_for_language('be@latin')) self.assertTrue(check_for_language('sr-RS@latin')) self.assertTrue(check_for_language('sr-RS@12345')) self.assertFalse(check_for_language('en-ü')) self.assertFalse(check_for_language('en\x00')) self.assertFalse(check_for_language(None)) self.assertFalse(check_for_language('be@ ')) # Specifying encoding is not supported (Django enforces UTF-8) self.assertFalse(check_for_language('tr-TR.UTF-8')) self.assertFalse(check_for_language('tr-TR.UTF8')) self.assertFalse(check_for_language('de-DE.utf-8')) def test_check_for_language_null(self): self.assertIs(trans_null.check_for_language('en'), True) def test_get_language_from_request(self): # issue 19919 r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'en-US,en;q=0.8,bg;q=0.6,ru;q=0.4'} lang = get_language_from_request(r) self.assertEqual('en-us', lang) r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'bg-bg,en-US;q=0.8,en;q=0.6,ru;q=0.4'} lang = get_language_from_request(r) self.assertEqual('bg', lang) def test_get_language_from_request_null(self): lang = trans_null.get_language_from_request(None) self.assertEqual(lang, 'en') with override_settings(LANGUAGE_CODE='de'): lang = trans_null.get_language_from_request(None) self.assertEqual(lang, 'de') def test_specific_language_codes(self): # issue 11915 r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt,en-US;q=0.8,en;q=0.6,ru;q=0.4'} lang = get_language_from_request(r) self.assertEqual('pt-br', lang) r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt-pt,en-US;q=0.8,en;q=0.6,ru;q=0.4'} lang = get_language_from_request(r) self.assertEqual('pt-br', lang) class TranslationFilesMissing(SimpleTestCase): def setUp(self): super().setUp() self.gettext_find_builtin = gettext_module.find def tearDown(self): gettext_module.find = self.gettext_find_builtin super().tearDown() def patchGettextFind(self): gettext_module.find = lambda *args, **kw: None def test_failure_finding_default_mo_files(self): """OSError is raised if the default language is unparseable.""" self.patchGettextFind() trans_real._translations = {} with self.assertRaises(OSError): activate('en') class NonDjangoLanguageTests(SimpleTestCase): """ A language non present in default Django languages can still be installed/used by a Django project. """ @override_settings( USE_I18N=True, LANGUAGES=[ ('en-us', 'English'), ('xxx', 'Somelanguage'), ], LANGUAGE_CODE='xxx', LOCALE_PATHS=[os.path.join(here, 'commands', 'locale')], ) def test_non_django_language(self): self.assertEqual(get_language(), 'xxx') self.assertEqual(gettext("year"), "reay") @override_settings(USE_I18N=True) def test_check_for_language(self): with tempfile.TemporaryDirectory() as app_dir: os.makedirs(os.path.join(app_dir, 'locale', 'dummy_Lang', 'LC_MESSAGES')) open(os.path.join(app_dir, 'locale', 'dummy_Lang', 'LC_MESSAGES', 'django.mo'), 'w').close() app_config = AppConfig('dummy_app', AppModuleStub(__path__=[app_dir])) with mock.patch('django.apps.apps.get_app_configs', return_value=[app_config]): self.assertIs(check_for_language('dummy-lang'), True) @override_settings( USE_I18N=True, LANGUAGES=[ ('en-us', 'English'), # xyz language has no locale files ('xyz', 'XYZ'), ], ) @translation.override('xyz') def test_plural_non_django_language(self): self.assertEqual(get_language(), 'xyz') self.assertEqual(ngettext('year', 'years', 2), 'years') @override_settings(USE_I18N=True) class WatchForTranslationChangesTests(SimpleTestCase): @override_settings(USE_I18N=False) def test_i18n_disabled(self): mocked_sender = mock.MagicMock() watch_for_translation_changes(mocked_sender) mocked_sender.watch_dir.assert_not_called() def test_i18n_enabled(self): mocked_sender = mock.MagicMock() watch_for_translation_changes(mocked_sender) self.assertGreater(mocked_sender.watch_dir.call_count, 1) def test_i18n_locale_paths(self): mocked_sender = mock.MagicMock() with tempfile.TemporaryDirectory() as app_dir: with self.settings(LOCALE_PATHS=[app_dir]): watch_for_translation_changes(mocked_sender) mocked_sender.watch_dir.assert_any_call(Path(app_dir), '**/*.mo') def test_i18n_app_dirs(self): mocked_sender = mock.MagicMock() with self.settings(INSTALLED_APPS=['tests.i18n.sampleproject']): watch_for_translation_changes(mocked_sender) project_dir = Path(__file__).parent / 'sampleproject' / 'locale' mocked_sender.watch_dir.assert_any_call(project_dir, '**/*.mo') def test_i18n_app_dirs_ignore_django_apps(self): mocked_sender = mock.MagicMock() with self.settings(INSTALLED_APPS=['django.contrib.admin']): watch_for_translation_changes(mocked_sender) mocked_sender.watch_dir.assert_called_once_with(Path('locale'), '**/*.mo') def test_i18n_local_locale(self): mocked_sender = mock.MagicMock() watch_for_translation_changes(mocked_sender) locale_dir = Path(__file__).parent / 'locale' mocked_sender.watch_dir.assert_any_call(locale_dir, '**/*.mo') class TranslationFileChangedTests(SimpleTestCase): def setUp(self): self.gettext_translations = gettext_module._translations.copy() self.trans_real_translations = trans_real._translations.copy() def tearDown(self): gettext._translations = self.gettext_translations trans_real._translations = self.trans_real_translations def test_ignores_non_mo_files(self): gettext_module._translations = {'foo': 'bar'} path = Path('test.py') self.assertIsNone(translation_file_changed(None, path)) self.assertEqual(gettext_module._translations, {'foo': 'bar'}) def test_resets_cache_with_mo_files(self): gettext_module._translations = {'foo': 'bar'} trans_real._translations = {'foo': 'bar'} trans_real._default = 1 trans_real._active = False path = Path('test.mo') self.assertIs(translation_file_changed(None, path), True) self.assertEqual(gettext_module._translations, {}) self.assertEqual(trans_real._translations, {}) self.assertIsNone(trans_real._default) self.assertIsInstance(trans_real._active, Local) class UtilsTests(SimpleTestCase): def test_round_away_from_one(self): tests = [ (0, 0), (0., 0), (0.25, 0), (0.5, 0), (0.75, 0), (1, 1), (1., 1), (1.25, 2), (1.5, 2), (1.75, 2), (-0., 0), (-0.25, -1), (-0.5, -1), (-0.75, -1), (-1, -1), (-1., -1), (-1.25, -2), (-1.5, -2), (-1.75, -2), ] for value, expected in tests: with self.subTest(value=value): self.assertEqual(round_away_from_one(value), expected)
851a0e89ae1e1be11b46c032cc071ae87e093adc98a34e134a20cbe60899f9ee
import datetime from django.contrib import admin from django.contrib.admin.models import LogEntry from django.contrib.admin.options import IncorrectLookupParameters from django.contrib.admin.templatetags.admin_list import pagination from django.contrib.admin.tests import AdminSeleniumTestCase from django.contrib.admin.views.main import ( ALL_VAR, IS_POPUP_VAR, ORDER_VAR, PAGE_VAR, SEARCH_VAR, TO_FIELD_VAR, ) from django.contrib.auth.models import User from django.contrib.contenttypes.models import ContentType from django.contrib.messages.storage.cookie import CookieStorage from django.db import connection, models from django.db.models import F, Field, IntegerField from django.db.models.functions import Upper from django.db.models.lookups import Contains, Exact from django.template import Context, Template, TemplateSyntaxError from django.test import TestCase, override_settings from django.test.client import RequestFactory from django.test.utils import ( CaptureQueriesContext, isolate_apps, register_lookup, ) from django.urls import reverse from django.utils import formats from .admin import ( BandAdmin, ChildAdmin, ChordsBandAdmin, ConcertAdmin, CustomPaginationAdmin, CustomPaginator, DynamicListDisplayChildAdmin, DynamicListDisplayLinksChildAdmin, DynamicListFilterChildAdmin, DynamicSearchFieldsChildAdmin, EmptyValueChildAdmin, EventAdmin, FilteredChildAdmin, GroupAdmin, InvitationAdmin, NoListDisplayLinksParentAdmin, ParentAdmin, QuartetAdmin, SwallowAdmin, site as custom_site, ) from .models import ( Band, CharPK, Child, ChordsBand, ChordsMusician, Concert, CustomIdUser, Event, Genre, Group, Invitation, Membership, Musician, OrderedObject, Parent, Quartet, Swallow, SwallowOneToOne, UnorderedObject, ) def build_tbody_html(pk, href, extra_fields): return ( '<tbody><tr>' '<td class="action-checkbox">' '<input type="checkbox" name="_selected_action" value="{}" ' 'class="action-select"></td>' '<th class="field-name"><a href="{}">name</a></th>' '{}</tr></tbody>' ).format(pk, href, extra_fields) @override_settings(ROOT_URLCONF="admin_changelist.urls") class ChangeListTests(TestCase): factory = RequestFactory() @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', email='[email protected]', password='xxx') def _create_superuser(self, username): return User.objects.create_superuser(username=username, email='[email protected]', password='xxx') def _mocked_authenticated_request(self, url, user): request = self.factory.get(url) request.user = user return request def test_specified_ordering_by_f_expression(self): class OrderedByFBandAdmin(admin.ModelAdmin): list_display = ['name', 'genres', 'nr_of_members'] ordering = ( F('nr_of_members').desc(nulls_last=True), Upper(F('name')).asc(), F('genres').asc(), ) m = OrderedByFBandAdmin(Band, custom_site) request = self.factory.get('/band/') request.user = self.superuser cl = m.get_changelist_instance(request) self.assertEqual(cl.get_ordering_field_columns(), {3: 'desc', 2: 'asc'}) def test_specified_ordering_by_f_expression_without_asc_desc(self): class OrderedByFBandAdmin(admin.ModelAdmin): list_display = ['name', 'genres', 'nr_of_members'] ordering = (F('nr_of_members'), Upper('name'), F('genres')) m = OrderedByFBandAdmin(Band, custom_site) request = self.factory.get('/band/') request.user = self.superuser cl = m.get_changelist_instance(request) self.assertEqual(cl.get_ordering_field_columns(), {3: 'asc', 2: 'asc'}) def test_select_related_preserved(self): """ Regression test for #10348: ChangeList.get_queryset() shouldn't overwrite a custom select_related provided by ModelAdmin.get_queryset(). """ m = ChildAdmin(Child, custom_site) request = self.factory.get('/child/') request.user = self.superuser cl = m.get_changelist_instance(request) self.assertEqual(cl.queryset.query.select_related, {'parent': {}}) def test_select_related_preserved_when_multi_valued_in_search_fields(self): parent = Parent.objects.create(name='Mary') Child.objects.create(parent=parent, name='Danielle') Child.objects.create(parent=parent, name='Daniel') m = ParentAdmin(Parent, custom_site) request = self.factory.get('/parent/', data={SEARCH_VAR: 'daniel'}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertEqual(cl.queryset.count(), 1) # select_related is preserved. self.assertEqual(cl.queryset.query.select_related, {'child': {}}) def test_select_related_as_tuple(self): ia = InvitationAdmin(Invitation, custom_site) request = self.factory.get('/invitation/') request.user = self.superuser cl = ia.get_changelist_instance(request) self.assertEqual(cl.queryset.query.select_related, {'player': {}}) def test_select_related_as_empty_tuple(self): ia = InvitationAdmin(Invitation, custom_site) ia.list_select_related = () request = self.factory.get('/invitation/') request.user = self.superuser cl = ia.get_changelist_instance(request) self.assertIs(cl.queryset.query.select_related, False) def test_get_select_related_custom_method(self): class GetListSelectRelatedAdmin(admin.ModelAdmin): list_display = ('band', 'player') def get_list_select_related(self, request): return ('band', 'player') ia = GetListSelectRelatedAdmin(Invitation, custom_site) request = self.factory.get('/invitation/') request.user = self.superuser cl = ia.get_changelist_instance(request) self.assertEqual(cl.queryset.query.select_related, {'player': {}, 'band': {}}) def test_result_list_empty_changelist_value(self): """ Regression test for #14982: EMPTY_CHANGELIST_VALUE should be honored for relationship fields """ new_child = Child.objects.create(name='name', parent=None) request = self.factory.get('/child/') request.user = self.superuser m = ChildAdmin(Child, custom_site) cl = m.get_changelist_instance(request) cl.formset = None template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}') context = Context({'cl': cl, 'opts': Child._meta}) table_output = template.render(context) link = reverse('admin:admin_changelist_child_change', args=(new_child.id,)) row_html = build_tbody_html(new_child.id, link, '<td class="field-parent nowrap">-</td>') self.assertNotEqual(table_output.find(row_html), -1, 'Failed to find expected row element: %s' % table_output) def test_result_list_set_empty_value_display_on_admin_site(self): """ Empty value display can be set on AdminSite. """ new_child = Child.objects.create(name='name', parent=None) request = self.factory.get('/child/') request.user = self.superuser # Set a new empty display value on AdminSite. admin.site.empty_value_display = '???' m = ChildAdmin(Child, admin.site) cl = m.get_changelist_instance(request) cl.formset = None template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}') context = Context({'cl': cl, 'opts': Child._meta}) table_output = template.render(context) link = reverse('admin:admin_changelist_child_change', args=(new_child.id,)) row_html = build_tbody_html(new_child.id, link, '<td class="field-parent nowrap">???</td>') self.assertNotEqual(table_output.find(row_html), -1, 'Failed to find expected row element: %s' % table_output) def test_result_list_set_empty_value_display_in_model_admin(self): """ Empty value display can be set in ModelAdmin or individual fields. """ new_child = Child.objects.create(name='name', parent=None) request = self.factory.get('/child/') request.user = self.superuser m = EmptyValueChildAdmin(Child, admin.site) cl = m.get_changelist_instance(request) cl.formset = None template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}') context = Context({'cl': cl, 'opts': Child._meta}) table_output = template.render(context) link = reverse('admin:admin_changelist_child_change', args=(new_child.id,)) row_html = build_tbody_html( new_child.id, link, '<td class="field-age_display">&amp;dagger;</td>' '<td class="field-age">-empty-</td>' ) self.assertNotEqual(table_output.find(row_html), -1, 'Failed to find expected row element: %s' % table_output) def test_result_list_html(self): """ Inclusion tag result_list generates a table when with default ModelAdmin settings. """ new_parent = Parent.objects.create(name='parent') new_child = Child.objects.create(name='name', parent=new_parent) request = self.factory.get('/child/') request.user = self.superuser m = ChildAdmin(Child, custom_site) cl = m.get_changelist_instance(request) cl.formset = None template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}') context = Context({'cl': cl, 'opts': Child._meta}) table_output = template.render(context) link = reverse('admin:admin_changelist_child_change', args=(new_child.id,)) row_html = build_tbody_html(new_child.id, link, '<td class="field-parent nowrap">%s</td>' % new_parent) self.assertNotEqual(table_output.find(row_html), -1, 'Failed to find expected row element: %s' % table_output) def test_result_list_editable_html(self): """ Regression tests for #11791: Inclusion tag result_list generates a table and this checks that the items are nested within the table element tags. Also a regression test for #13599, verifies that hidden fields when list_editable is enabled are rendered in a div outside the table. """ new_parent = Parent.objects.create(name='parent') new_child = Child.objects.create(name='name', parent=new_parent) request = self.factory.get('/child/') request.user = self.superuser m = ChildAdmin(Child, custom_site) # Test with list_editable fields m.list_display = ['id', 'name', 'parent'] m.list_display_links = ['id'] m.list_editable = ['name'] cl = m.get_changelist_instance(request) FormSet = m.get_changelist_formset(request) cl.formset = FormSet(queryset=cl.result_list) template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}') context = Context({'cl': cl, 'opts': Child._meta}) table_output = template.render(context) # make sure that hidden fields are in the correct place hiddenfields_div = ( '<div class="hiddenfields">' '<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">' '</div>' ) % new_child.id self.assertInHTML(hiddenfields_div, table_output, msg_prefix='Failed to find hidden fields') # make sure that list editable fields are rendered in divs correctly editable_name_field = ( '<input name="form-0-name" value="name" class="vTextField" ' 'maxlength="30" type="text" id="id_form-0-name">' ) self.assertInHTML( '<td class="field-name">%s</td>' % editable_name_field, table_output, msg_prefix='Failed to find "name" list_editable field', ) def test_result_list_editable(self): """ Regression test for #14312: list_editable with pagination """ new_parent = Parent.objects.create(name='parent') for i in range(1, 201): Child.objects.create(name='name %s' % i, parent=new_parent) request = self.factory.get('/child/', data={'p': -1}) # Anything outside range request.user = self.superuser m = ChildAdmin(Child, custom_site) # Test with list_editable fields m.list_display = ['id', 'name', 'parent'] m.list_display_links = ['id'] m.list_editable = ['name'] with self.assertRaises(IncorrectLookupParameters): m.get_changelist_instance(request) def test_custom_paginator(self): new_parent = Parent.objects.create(name='parent') for i in range(1, 201): Child.objects.create(name='name %s' % i, parent=new_parent) request = self.factory.get('/child/') request.user = self.superuser m = CustomPaginationAdmin(Child, custom_site) cl = m.get_changelist_instance(request) cl.get_results(request) self.assertIsInstance(cl.paginator, CustomPaginator) def test_no_duplicates_for_m2m_in_list_filter(self): """ Regression test for #13902: When using a ManyToMany in list_filter, results shouldn't appear more than once. Basic ManyToMany. """ blues = Genre.objects.create(name='Blues') band = Band.objects.create(name='B.B. King Review', nr_of_members=11) band.genres.add(blues) band.genres.add(blues) m = BandAdmin(Band, custom_site) request = self.factory.get('/band/', data={'genres': blues.pk}) request.user = self.superuser cl = m.get_changelist_instance(request) cl.get_results(request) # There's only one Group instance self.assertEqual(cl.result_count, 1) # Queryset must be deletable. self.assertIs(cl.queryset.query.distinct, False) cl.queryset.delete() self.assertEqual(cl.queryset.count(), 0) def test_no_duplicates_for_through_m2m_in_list_filter(self): """ Regression test for #13902: When using a ManyToMany in list_filter, results shouldn't appear more than once. With an intermediate model. """ lead = Musician.objects.create(name='Vox') band = Group.objects.create(name='The Hype') Membership.objects.create(group=band, music=lead, role='lead voice') Membership.objects.create(group=band, music=lead, role='bass player') m = GroupAdmin(Group, custom_site) request = self.factory.get('/group/', data={'members': lead.pk}) request.user = self.superuser cl = m.get_changelist_instance(request) cl.get_results(request) # There's only one Group instance self.assertEqual(cl.result_count, 1) # Queryset must be deletable. self.assertIs(cl.queryset.query.distinct, False) cl.queryset.delete() self.assertEqual(cl.queryset.count(), 0) def test_no_duplicates_for_through_m2m_at_second_level_in_list_filter(self): """ When using a ManyToMany in list_filter at the second level behind a ForeignKey, results shouldn't appear more than once. """ lead = Musician.objects.create(name='Vox') band = Group.objects.create(name='The Hype') Concert.objects.create(name='Woodstock', group=band) Membership.objects.create(group=band, music=lead, role='lead voice') Membership.objects.create(group=band, music=lead, role='bass player') m = ConcertAdmin(Concert, custom_site) request = self.factory.get('/concert/', data={'group__members': lead.pk}) request.user = self.superuser cl = m.get_changelist_instance(request) cl.get_results(request) # There's only one Concert instance self.assertEqual(cl.result_count, 1) # Queryset must be deletable. self.assertIs(cl.queryset.query.distinct, False) cl.queryset.delete() self.assertEqual(cl.queryset.count(), 0) def test_no_duplicates_for_inherited_m2m_in_list_filter(self): """ Regression test for #13902: When using a ManyToMany in list_filter, results shouldn't appear more than once. Model managed in the admin inherits from the one that defines the relationship. """ lead = Musician.objects.create(name='John') four = Quartet.objects.create(name='The Beatles') Membership.objects.create(group=four, music=lead, role='lead voice') Membership.objects.create(group=four, music=lead, role='guitar player') m = QuartetAdmin(Quartet, custom_site) request = self.factory.get('/quartet/', data={'members': lead.pk}) request.user = self.superuser cl = m.get_changelist_instance(request) cl.get_results(request) # There's only one Quartet instance self.assertEqual(cl.result_count, 1) # Queryset must be deletable. self.assertIs(cl.queryset.query.distinct, False) cl.queryset.delete() self.assertEqual(cl.queryset.count(), 0) def test_no_duplicates_for_m2m_to_inherited_in_list_filter(self): """ Regression test for #13902: When using a ManyToMany in list_filter, results shouldn't appear more than once. Target of the relationship inherits from another. """ lead = ChordsMusician.objects.create(name='Player A') three = ChordsBand.objects.create(name='The Chords Trio') Invitation.objects.create(band=three, player=lead, instrument='guitar') Invitation.objects.create(band=three, player=lead, instrument='bass') m = ChordsBandAdmin(ChordsBand, custom_site) request = self.factory.get('/chordsband/', data={'members': lead.pk}) request.user = self.superuser cl = m.get_changelist_instance(request) cl.get_results(request) # There's only one ChordsBand instance self.assertEqual(cl.result_count, 1) # Queryset must be deletable. self.assertIs(cl.queryset.query.distinct, False) cl.queryset.delete() self.assertEqual(cl.queryset.count(), 0) def test_no_duplicates_for_non_unique_related_object_in_list_filter(self): """ Regressions tests for #15819: If a field listed in list_filters is a non-unique related object, results shouldn't appear more than once. """ parent = Parent.objects.create(name='Mary') # Two children with the same name Child.objects.create(parent=parent, name='Daniel') Child.objects.create(parent=parent, name='Daniel') m = ParentAdmin(Parent, custom_site) request = self.factory.get('/parent/', data={'child__name': 'Daniel'}) request.user = self.superuser cl = m.get_changelist_instance(request) # Exists() is applied. self.assertEqual(cl.queryset.count(), 1) # Queryset must be deletable. self.assertIs(cl.queryset.query.distinct, False) cl.queryset.delete() self.assertEqual(cl.queryset.count(), 0) def test_changelist_search_form_validation(self): m = ConcertAdmin(Concert, custom_site) tests = [ ({SEARCH_VAR: '\x00'}, 'Null characters are not allowed.'), ({SEARCH_VAR: 'some\x00thing'}, 'Null characters are not allowed.'), ] for case, error in tests: with self.subTest(case=case): request = self.factory.get('/concert/', case) request.user = self.superuser request._messages = CookieStorage(request) m.get_changelist_instance(request) messages = [m.message for m in request._messages] self.assertEqual(1, len(messages)) self.assertEqual(error, messages[0]) def test_no_duplicates_for_non_unique_related_object_in_search_fields(self): """ Regressions tests for #15819: If a field listed in search_fields is a non-unique related object, Exists() must be applied. """ parent = Parent.objects.create(name='Mary') Child.objects.create(parent=parent, name='Danielle') Child.objects.create(parent=parent, name='Daniel') m = ParentAdmin(Parent, custom_site) request = self.factory.get('/parent/', data={SEARCH_VAR: 'daniel'}) request.user = self.superuser cl = m.get_changelist_instance(request) # Exists() is applied. self.assertEqual(cl.queryset.count(), 1) # Queryset must be deletable. self.assertIs(cl.queryset.query.distinct, False) cl.queryset.delete() self.assertEqual(cl.queryset.count(), 0) def test_no_duplicates_for_many_to_many_at_second_level_in_search_fields(self): """ When using a ManyToMany in search_fields at the second level behind a ForeignKey, Exists() must be applied and results shouldn't appear more than once. """ lead = Musician.objects.create(name='Vox') band = Group.objects.create(name='The Hype') Concert.objects.create(name='Woodstock', group=band) Membership.objects.create(group=band, music=lead, role='lead voice') Membership.objects.create(group=band, music=lead, role='bass player') m = ConcertAdmin(Concert, custom_site) request = self.factory.get('/concert/', data={SEARCH_VAR: 'vox'}) request.user = self.superuser cl = m.get_changelist_instance(request) # There's only one Concert instance self.assertEqual(cl.queryset.count(), 1) # Queryset must be deletable. self.assertIs(cl.queryset.query.distinct, False) cl.queryset.delete() self.assertEqual(cl.queryset.count(), 0) def test_pk_in_search_fields(self): band = Group.objects.create(name='The Hype') Concert.objects.create(name='Woodstock', group=band) m = ConcertAdmin(Concert, custom_site) m.search_fields = ['group__pk'] request = self.factory.get('/concert/', data={SEARCH_VAR: band.pk}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertEqual(cl.queryset.count(), 1) request = self.factory.get('/concert/', data={SEARCH_VAR: band.pk + 5}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertEqual(cl.queryset.count(), 0) def test_builtin_lookup_in_search_fields(self): band = Group.objects.create(name='The Hype') concert = Concert.objects.create(name='Woodstock', group=band) m = ConcertAdmin(Concert, custom_site) m.search_fields = ['name__iexact'] request = self.factory.get('/', data={SEARCH_VAR: 'woodstock'}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertCountEqual(cl.queryset, [concert]) request = self.factory.get('/', data={SEARCH_VAR: 'wood'}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertCountEqual(cl.queryset, []) def test_custom_lookup_in_search_fields(self): band = Group.objects.create(name='The Hype') concert = Concert.objects.create(name='Woodstock', group=band) m = ConcertAdmin(Concert, custom_site) m.search_fields = ['group__name__cc'] with register_lookup(Field, Contains, lookup_name='cc'): request = self.factory.get('/', data={SEARCH_VAR: 'Hype'}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertCountEqual(cl.queryset, [concert]) request = self.factory.get('/', data={SEARCH_VAR: 'Woodstock'}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertCountEqual(cl.queryset, []) def test_spanning_relations_with_custom_lookup_in_search_fields(self): hype = Group.objects.create(name='The Hype') concert = Concert.objects.create(name='Woodstock', group=hype) vox = Musician.objects.create(name='Vox', age=20) Membership.objects.create(music=vox, group=hype) # Register a custom lookup on IntegerField to ensure that field # traversing logic in ModelAdmin.get_search_results() works. with register_lookup(IntegerField, Exact, lookup_name='exactly'): m = ConcertAdmin(Concert, custom_site) m.search_fields = ['group__members__age__exactly'] request = self.factory.get('/', data={SEARCH_VAR: '20'}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertCountEqual(cl.queryset, [concert]) request = self.factory.get('/', data={SEARCH_VAR: '21'}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertCountEqual(cl.queryset, []) def test_custom_lookup_with_pk_shortcut(self): self.assertEqual(CharPK._meta.pk.name, 'char_pk') # Not equal to 'pk'. m = admin.ModelAdmin(CustomIdUser, custom_site) abc = CharPK.objects.create(char_pk='abc') abcd = CharPK.objects.create(char_pk='abcd') m = admin.ModelAdmin(CharPK, custom_site) m.search_fields = ['pk__exact'] request = self.factory.get('/', data={SEARCH_VAR: 'abc'}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertCountEqual(cl.queryset, [abc]) request = self.factory.get('/', data={SEARCH_VAR: 'abcd'}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertCountEqual(cl.queryset, [abcd]) def test_no_exists_for_m2m_in_list_filter_without_params(self): """ If a ManyToManyField is in list_filter but isn't in any lookup params, the changelist's query shouldn't have Exists(). """ m = BandAdmin(Band, custom_site) for lookup_params in ({}, {'name': 'test'}): request = self.factory.get('/band/', lookup_params) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertNotIn(' EXISTS', str(cl.queryset.query)) # A ManyToManyField in params does have Exists() applied. request = self.factory.get('/band/', {'genres': '0'}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertIn(' EXISTS', str(cl.queryset.query)) def test_pagination(self): """ Regression tests for #12893: Pagination in admins changelist doesn't use queryset set by modeladmin. """ parent = Parent.objects.create(name='anything') for i in range(1, 31): Child.objects.create(name='name %s' % i, parent=parent) Child.objects.create(name='filtered %s' % i, parent=parent) request = self.factory.get('/child/') request.user = self.superuser # Test default queryset m = ChildAdmin(Child, custom_site) cl = m.get_changelist_instance(request) self.assertEqual(cl.queryset.count(), 60) self.assertEqual(cl.paginator.count, 60) self.assertEqual(list(cl.paginator.page_range), [1, 2, 3, 4, 5, 6]) # Test custom queryset m = FilteredChildAdmin(Child, custom_site) cl = m.get_changelist_instance(request) self.assertEqual(cl.queryset.count(), 30) self.assertEqual(cl.paginator.count, 30) self.assertEqual(list(cl.paginator.page_range), [1, 2, 3]) def test_computed_list_display_localization(self): """ Regression test for #13196: output of functions should be localized in the changelist. """ self.client.force_login(self.superuser) event = Event.objects.create(date=datetime.date.today()) response = self.client.get(reverse('admin:admin_changelist_event_changelist')) self.assertContains(response, formats.localize(event.date)) self.assertNotContains(response, str(event.date)) def test_dynamic_list_display(self): """ Regression tests for #14206: dynamic list_display support. """ parent = Parent.objects.create(name='parent') for i in range(10): Child.objects.create(name='child %s' % i, parent=parent) user_noparents = self._create_superuser('noparents') user_parents = self._create_superuser('parents') # Test with user 'noparents' m = custom_site._registry[Child] request = self._mocked_authenticated_request('/child/', user_noparents) response = m.changelist_view(request) self.assertNotContains(response, 'Parent object') list_display = m.get_list_display(request) list_display_links = m.get_list_display_links(request, list_display) self.assertEqual(list_display, ['name', 'age']) self.assertEqual(list_display_links, ['name']) # Test with user 'parents' m = DynamicListDisplayChildAdmin(Child, custom_site) request = self._mocked_authenticated_request('/child/', user_parents) response = m.changelist_view(request) self.assertContains(response, 'Parent object') custom_site.unregister(Child) list_display = m.get_list_display(request) list_display_links = m.get_list_display_links(request, list_display) self.assertEqual(list_display, ('parent', 'name', 'age')) self.assertEqual(list_display_links, ['parent']) # Test default implementation custom_site.register(Child, ChildAdmin) m = custom_site._registry[Child] request = self._mocked_authenticated_request('/child/', user_noparents) response = m.changelist_view(request) self.assertContains(response, 'Parent object') def test_show_all(self): parent = Parent.objects.create(name='anything') for i in range(1, 31): Child.objects.create(name='name %s' % i, parent=parent) Child.objects.create(name='filtered %s' % i, parent=parent) # Add "show all" parameter to request request = self.factory.get('/child/', data={ALL_VAR: ''}) request.user = self.superuser # Test valid "show all" request (number of total objects is under max) m = ChildAdmin(Child, custom_site) m.list_max_show_all = 200 # 200 is the max we'll pass to ChangeList cl = m.get_changelist_instance(request) cl.get_results(request) self.assertEqual(len(cl.result_list), 60) # Test invalid "show all" request (number of total objects over max) # falls back to paginated pages m = ChildAdmin(Child, custom_site) m.list_max_show_all = 30 # 30 is the max we'll pass to ChangeList for this test cl = m.get_changelist_instance(request) cl.get_results(request) self.assertEqual(len(cl.result_list), 10) def test_dynamic_list_display_links(self): """ Regression tests for #16257: dynamic list_display_links support. """ parent = Parent.objects.create(name='parent') for i in range(1, 10): Child.objects.create(id=i, name='child %s' % i, parent=parent, age=i) m = DynamicListDisplayLinksChildAdmin(Child, custom_site) superuser = self._create_superuser('superuser') request = self._mocked_authenticated_request('/child/', superuser) response = m.changelist_view(request) for i in range(1, 10): link = reverse('admin:admin_changelist_child_change', args=(i,)) self.assertContains(response, '<a href="%s">%s</a>' % (link, i)) list_display = m.get_list_display(request) list_display_links = m.get_list_display_links(request, list_display) self.assertEqual(list_display, ('parent', 'name', 'age')) self.assertEqual(list_display_links, ['age']) def test_no_list_display_links(self): """#15185 -- Allow no links from the 'change list' view grid.""" p = Parent.objects.create(name='parent') m = NoListDisplayLinksParentAdmin(Parent, custom_site) superuser = self._create_superuser('superuser') request = self._mocked_authenticated_request('/parent/', superuser) response = m.changelist_view(request) link = reverse('admin:admin_changelist_parent_change', args=(p.pk,)) self.assertNotContains(response, '<a href="%s">' % link) def test_clear_all_filters_link(self): self.client.force_login(self.superuser) url = reverse('admin:auth_user_changelist') response = self.client.get(url) self.assertNotContains(response, '&#10006; Clear all filters') link = '<a href="%s">&#10006; Clear all filters</a>' for data, href in ( ({'is_staff__exact': '0'}, '?'), ( {'is_staff__exact': '0', 'username__startswith': 'test'}, '?username__startswith=test', ), ( {'is_staff__exact': '0', SEARCH_VAR: 'test'}, '?%s=test' % SEARCH_VAR, ), ( {'is_staff__exact': '0', IS_POPUP_VAR: 'id'}, '?%s=id' % IS_POPUP_VAR, ), ): with self.subTest(data=data): response = self.client.get(url, data=data) self.assertContains(response, link % href) def test_clear_all_filters_link_callable_filter(self): self.client.force_login(self.superuser) url = reverse('admin:admin_changelist_band_changelist') response = self.client.get(url) self.assertNotContains(response, '&#10006; Clear all filters') link = '<a href="%s">&#10006; Clear all filters</a>' for data, href in ( ({'nr_of_members_partition': '5'}, '?'), ( {'nr_of_members_partition': 'more', 'name__startswith': 'test'}, '?name__startswith=test', ), ( {'nr_of_members_partition': '5', IS_POPUP_VAR: 'id'}, '?%s=id' % IS_POPUP_VAR, ), ): with self.subTest(data=data): response = self.client.get(url, data=data) self.assertContains(response, link % href) def test_no_clear_all_filters_link(self): self.client.force_login(self.superuser) url = reverse('admin:auth_user_changelist') link = '>&#10006; Clear all filters</a>' for data in ( {SEARCH_VAR: 'test'}, {ORDER_VAR: '-1'}, {TO_FIELD_VAR: 'id'}, {PAGE_VAR: '1'}, {IS_POPUP_VAR: '1'}, {'username__startswith': 'test'}, ): with self.subTest(data=data): response = self.client.get(url, data=data) self.assertNotContains(response, link) def test_tuple_list_display(self): swallow = Swallow.objects.create(origin='Africa', load='12.34', speed='22.2') swallow2 = Swallow.objects.create(origin='Africa', load='12.34', speed='22.2') swallow_o2o = SwallowOneToOne.objects.create(swallow=swallow2) model_admin = SwallowAdmin(Swallow, custom_site) superuser = self._create_superuser('superuser') request = self._mocked_authenticated_request('/swallow/', superuser) response = model_admin.changelist_view(request) # just want to ensure it doesn't blow up during rendering self.assertContains(response, str(swallow.origin)) self.assertContains(response, str(swallow.load)) self.assertContains(response, str(swallow.speed)) # Reverse one-to-one relations should work. self.assertContains(response, '<td class="field-swallowonetoone">-</td>') self.assertContains(response, '<td class="field-swallowonetoone">%s</td>' % swallow_o2o) def test_multiuser_edit(self): """ Simultaneous edits of list_editable fields on the changelist by different users must not result in one user's edits creating a new object instead of modifying the correct existing object (#11313). """ # To replicate this issue, simulate the following steps: # 1. User1 opens an admin changelist with list_editable fields. # 2. User2 edits object "Foo" such that it moves to another page in # the pagination order and saves. # 3. User1 edits object "Foo" and saves. # 4. The edit made by User1 does not get applied to object "Foo" but # instead is used to create a new object (bug). # For this test, order the changelist by the 'speed' attribute and # display 3 objects per page (SwallowAdmin.list_per_page = 3). # Setup the test to reflect the DB state after step 2 where User2 has # edited the first swallow object's speed from '4' to '1'. a = Swallow.objects.create(origin='Swallow A', load=4, speed=1) b = Swallow.objects.create(origin='Swallow B', load=2, speed=2) c = Swallow.objects.create(origin='Swallow C', load=5, speed=5) d = Swallow.objects.create(origin='Swallow D', load=9, speed=9) superuser = self._create_superuser('superuser') self.client.force_login(superuser) changelist_url = reverse('admin:admin_changelist_swallow_changelist') # Send the POST from User1 for step 3. It's still using the changelist # ordering from before User2's edits in step 2. data = { 'form-TOTAL_FORMS': '3', 'form-INITIAL_FORMS': '3', 'form-MIN_NUM_FORMS': '0', 'form-MAX_NUM_FORMS': '1000', 'form-0-uuid': str(d.pk), 'form-1-uuid': str(c.pk), 'form-2-uuid': str(a.pk), 'form-0-load': '9.0', 'form-0-speed': '9.0', 'form-1-load': '5.0', 'form-1-speed': '5.0', 'form-2-load': '5.0', 'form-2-speed': '4.0', '_save': 'Save', } response = self.client.post(changelist_url, data, follow=True, extra={'o': '-2'}) # The object User1 edited in step 3 is displayed on the changelist and # has the correct edits applied. self.assertContains(response, '1 swallow was changed successfully.') self.assertContains(response, a.origin) a.refresh_from_db() self.assertEqual(a.load, float(data['form-2-load'])) self.assertEqual(a.speed, float(data['form-2-speed'])) b.refresh_from_db() self.assertEqual(b.load, 2) self.assertEqual(b.speed, 2) c.refresh_from_db() self.assertEqual(c.load, float(data['form-1-load'])) self.assertEqual(c.speed, float(data['form-1-speed'])) d.refresh_from_db() self.assertEqual(d.load, float(data['form-0-load'])) self.assertEqual(d.speed, float(data['form-0-speed'])) # No new swallows were created. self.assertEqual(len(Swallow.objects.all()), 4) def test_get_edited_object_ids(self): a = Swallow.objects.create(origin='Swallow A', load=4, speed=1) b = Swallow.objects.create(origin='Swallow B', load=2, speed=2) c = Swallow.objects.create(origin='Swallow C', load=5, speed=5) superuser = self._create_superuser('superuser') self.client.force_login(superuser) changelist_url = reverse('admin:admin_changelist_swallow_changelist') m = SwallowAdmin(Swallow, custom_site) data = { 'form-TOTAL_FORMS': '3', 'form-INITIAL_FORMS': '3', 'form-MIN_NUM_FORMS': '0', 'form-MAX_NUM_FORMS': '1000', 'form-0-uuid': str(a.pk), 'form-1-uuid': str(b.pk), 'form-2-uuid': str(c.pk), 'form-0-load': '9.0', 'form-0-speed': '9.0', 'form-1-load': '5.0', 'form-1-speed': '5.0', 'form-2-load': '5.0', 'form-2-speed': '4.0', '_save': 'Save', } request = self.factory.post(changelist_url, data=data) pks = m._get_edited_object_pks(request, prefix='form') self.assertEqual(sorted(pks), sorted([str(a.pk), str(b.pk), str(c.pk)])) def test_get_list_editable_queryset(self): a = Swallow.objects.create(origin='Swallow A', load=4, speed=1) Swallow.objects.create(origin='Swallow B', load=2, speed=2) data = { 'form-TOTAL_FORMS': '2', 'form-INITIAL_FORMS': '2', 'form-MIN_NUM_FORMS': '0', 'form-MAX_NUM_FORMS': '1000', 'form-0-uuid': str(a.pk), 'form-0-load': '10', '_save': 'Save', } superuser = self._create_superuser('superuser') self.client.force_login(superuser) changelist_url = reverse('admin:admin_changelist_swallow_changelist') m = SwallowAdmin(Swallow, custom_site) request = self.factory.post(changelist_url, data=data) queryset = m._get_list_editable_queryset(request, prefix='form') self.assertEqual(queryset.count(), 1) data['form-0-uuid'] = 'INVALD_PRIMARY_KEY' # The unfiltered queryset is returned if there's invalid data. request = self.factory.post(changelist_url, data=data) queryset = m._get_list_editable_queryset(request, prefix='form') self.assertEqual(queryset.count(), 2) def test_get_list_editable_queryset_with_regex_chars_in_prefix(self): a = Swallow.objects.create(origin='Swallow A', load=4, speed=1) Swallow.objects.create(origin='Swallow B', load=2, speed=2) data = { 'form$-TOTAL_FORMS': '2', 'form$-INITIAL_FORMS': '2', 'form$-MIN_NUM_FORMS': '0', 'form$-MAX_NUM_FORMS': '1000', 'form$-0-uuid': str(a.pk), 'form$-0-load': '10', '_save': 'Save', } superuser = self._create_superuser('superuser') self.client.force_login(superuser) changelist_url = reverse('admin:admin_changelist_swallow_changelist') m = SwallowAdmin(Swallow, custom_site) request = self.factory.post(changelist_url, data=data) queryset = m._get_list_editable_queryset(request, prefix='form$') self.assertEqual(queryset.count(), 1) def test_changelist_view_list_editable_changed_objects_uses_filter(self): """list_editable edits use a filtered queryset to limit memory usage.""" a = Swallow.objects.create(origin='Swallow A', load=4, speed=1) Swallow.objects.create(origin='Swallow B', load=2, speed=2) data = { 'form-TOTAL_FORMS': '2', 'form-INITIAL_FORMS': '2', 'form-MIN_NUM_FORMS': '0', 'form-MAX_NUM_FORMS': '1000', 'form-0-uuid': str(a.pk), 'form-0-load': '10', '_save': 'Save', } superuser = self._create_superuser('superuser') self.client.force_login(superuser) changelist_url = reverse('admin:admin_changelist_swallow_changelist') with CaptureQueriesContext(connection) as context: response = self.client.post(changelist_url, data=data) self.assertEqual(response.status_code, 200) self.assertIn('WHERE', context.captured_queries[4]['sql']) self.assertIn('IN', context.captured_queries[4]['sql']) # Check only the first few characters since the UUID may have dashes. self.assertIn(str(a.pk)[:8], context.captured_queries[4]['sql']) def test_deterministic_order_for_unordered_model(self): """ The primary key is used in the ordering of the changelist's results to guarantee a deterministic order, even when the model doesn't have any default ordering defined (#17198). """ superuser = self._create_superuser('superuser') for counter in range(1, 51): UnorderedObject.objects.create(id=counter, bool=True) class UnorderedObjectAdmin(admin.ModelAdmin): list_per_page = 10 def check_results_order(ascending=False): custom_site.register(UnorderedObject, UnorderedObjectAdmin) model_admin = UnorderedObjectAdmin(UnorderedObject, custom_site) counter = 0 if ascending else 51 for page in range(1, 6): request = self._mocked_authenticated_request('/unorderedobject/?p=%s' % page, superuser) response = model_admin.changelist_view(request) for result in response.context_data['cl'].result_list: counter += 1 if ascending else -1 self.assertEqual(result.id, counter) custom_site.unregister(UnorderedObject) # When no order is defined at all, everything is ordered by '-pk'. check_results_order() # When an order field is defined but multiple records have the same # value for that field, make sure everything gets ordered by -pk as well. UnorderedObjectAdmin.ordering = ['bool'] check_results_order() # When order fields are defined, including the pk itself, use them. UnorderedObjectAdmin.ordering = ['bool', '-pk'] check_results_order() UnorderedObjectAdmin.ordering = ['bool', 'pk'] check_results_order(ascending=True) UnorderedObjectAdmin.ordering = ['-id', 'bool'] check_results_order() UnorderedObjectAdmin.ordering = ['id', 'bool'] check_results_order(ascending=True) def test_deterministic_order_for_model_ordered_by_its_manager(self): """ The primary key is used in the ordering of the changelist's results to guarantee a deterministic order, even when the model has a manager that defines a default ordering (#17198). """ superuser = self._create_superuser('superuser') for counter in range(1, 51): OrderedObject.objects.create(id=counter, bool=True, number=counter) class OrderedObjectAdmin(admin.ModelAdmin): list_per_page = 10 def check_results_order(ascending=False): custom_site.register(OrderedObject, OrderedObjectAdmin) model_admin = OrderedObjectAdmin(OrderedObject, custom_site) counter = 0 if ascending else 51 for page in range(1, 6): request = self._mocked_authenticated_request('/orderedobject/?p=%s' % page, superuser) response = model_admin.changelist_view(request) for result in response.context_data['cl'].result_list: counter += 1 if ascending else -1 self.assertEqual(result.id, counter) custom_site.unregister(OrderedObject) # When no order is defined at all, use the model's default ordering (i.e. 'number') check_results_order(ascending=True) # When an order field is defined but multiple records have the same # value for that field, make sure everything gets ordered by -pk as well. OrderedObjectAdmin.ordering = ['bool'] check_results_order() # When order fields are defined, including the pk itself, use them. OrderedObjectAdmin.ordering = ['bool', '-pk'] check_results_order() OrderedObjectAdmin.ordering = ['bool', 'pk'] check_results_order(ascending=True) OrderedObjectAdmin.ordering = ['-id', 'bool'] check_results_order() OrderedObjectAdmin.ordering = ['id', 'bool'] check_results_order(ascending=True) @isolate_apps('admin_changelist') def test_total_ordering_optimization(self): class Related(models.Model): unique_field = models.BooleanField(unique=True) class Meta: ordering = ('unique_field',) class Model(models.Model): unique_field = models.BooleanField(unique=True) unique_nullable_field = models.BooleanField(unique=True, null=True) related = models.ForeignKey(Related, models.CASCADE) other_related = models.ForeignKey(Related, models.CASCADE) related_unique = models.OneToOneField(Related, models.CASCADE) field = models.BooleanField() other_field = models.BooleanField() null_field = models.BooleanField(null=True) class Meta: unique_together = { ('field', 'other_field'), ('field', 'null_field'), ('related', 'other_related_id'), } class ModelAdmin(admin.ModelAdmin): def get_queryset(self, request): return Model.objects.none() request = self._mocked_authenticated_request('/', self.superuser) site = admin.AdminSite(name='admin') model_admin = ModelAdmin(Model, site) change_list = model_admin.get_changelist_instance(request) tests = ( ([], ['-pk']), # Unique non-nullable field. (['unique_field'], ['unique_field']), (['-unique_field'], ['-unique_field']), # Unique nullable field. (['unique_nullable_field'], ['unique_nullable_field', '-pk']), # Field. (['field'], ['field', '-pk']), # Related field introspection is not implemented. (['related__unique_field'], ['related__unique_field', '-pk']), # Related attname unique. (['related_unique_id'], ['related_unique_id']), # Related ordering introspection is not implemented. (['related_unique'], ['related_unique', '-pk']), # Composite unique. (['field', '-other_field'], ['field', '-other_field']), # Composite unique nullable. (['-field', 'null_field'], ['-field', 'null_field', '-pk']), # Composite unique and nullable. (['-field', 'null_field', 'other_field'], ['-field', 'null_field', 'other_field']), # Composite unique attnames. (['related_id', '-other_related_id'], ['related_id', '-other_related_id']), # Composite unique names. (['related', '-other_related_id'], ['related', '-other_related_id', '-pk']), ) # F() objects composite unique. total_ordering = [F('field'), F('other_field').desc(nulls_last=True)] # F() objects composite unique nullable. non_total_ordering = [F('field'), F('null_field').desc(nulls_last=True)] tests += ( (total_ordering, total_ordering), (non_total_ordering, non_total_ordering + ['-pk']), ) for ordering, expected in tests: with self.subTest(ordering=ordering): self.assertEqual(change_list._get_deterministic_ordering(ordering), expected) @isolate_apps('admin_changelist') def test_total_ordering_optimization_meta_constraints(self): class Related(models.Model): unique_field = models.BooleanField(unique=True) class Meta: ordering = ('unique_field',) class Model(models.Model): field_1 = models.BooleanField() field_2 = models.BooleanField() field_3 = models.BooleanField() field_4 = models.BooleanField() field_5 = models.BooleanField() field_6 = models.BooleanField() nullable_1 = models.BooleanField(null=True) nullable_2 = models.BooleanField(null=True) related_1 = models.ForeignKey(Related, models.CASCADE) related_2 = models.ForeignKey(Related, models.CASCADE) related_3 = models.ForeignKey(Related, models.CASCADE) related_4 = models.ForeignKey(Related, models.CASCADE) class Meta: constraints = [ *[ models.UniqueConstraint(fields=fields, name=''.join(fields)) for fields in ( ['field_1'], ['nullable_1'], ['related_1'], ['related_2_id'], ['field_2', 'field_3'], ['field_2', 'nullable_2'], ['field_2', 'related_3'], ['field_3', 'related_4_id'], ) ], models.CheckConstraint(check=models.Q(id__gt=0), name='foo'), models.UniqueConstraint( fields=['field_5'], condition=models.Q(id__gt=10), name='total_ordering_1', ), models.UniqueConstraint( fields=['field_6'], condition=models.Q(), name='total_ordering', ), ] class ModelAdmin(admin.ModelAdmin): def get_queryset(self, request): return Model.objects.none() request = self._mocked_authenticated_request('/', self.superuser) site = admin.AdminSite(name='admin') model_admin = ModelAdmin(Model, site) change_list = model_admin.get_changelist_instance(request) tests = ( # Unique non-nullable field. (['field_1'], ['field_1']), # Unique nullable field. (['nullable_1'], ['nullable_1', '-pk']), # Related attname unique. (['related_1_id'], ['related_1_id']), (['related_2_id'], ['related_2_id']), # Related ordering introspection is not implemented. (['related_1'], ['related_1', '-pk']), # Composite unique. (['-field_2', 'field_3'], ['-field_2', 'field_3']), # Composite unique nullable. (['field_2', '-nullable_2'], ['field_2', '-nullable_2', '-pk']), # Composite unique and nullable. ( ['field_2', '-nullable_2', 'field_3'], ['field_2', '-nullable_2', 'field_3'], ), # Composite field and related field name. (['field_2', '-related_3'], ['field_2', '-related_3', '-pk']), (['field_3', 'related_4'], ['field_3', 'related_4', '-pk']), # Composite field and related field attname. (['field_2', 'related_3_id'], ['field_2', 'related_3_id']), (['field_3', '-related_4_id'], ['field_3', '-related_4_id']), # Partial unique constraint is ignored. (['field_5'], ['field_5', '-pk']), # Unique constraint with an empty condition. (['field_6'], ['field_6']), ) for ordering, expected in tests: with self.subTest(ordering=ordering): self.assertEqual(change_list._get_deterministic_ordering(ordering), expected) def test_dynamic_list_filter(self): """ Regression tests for ticket #17646: dynamic list_filter support. """ parent = Parent.objects.create(name='parent') for i in range(10): Child.objects.create(name='child %s' % i, parent=parent) user_noparents = self._create_superuser('noparents') user_parents = self._create_superuser('parents') # Test with user 'noparents' m = DynamicListFilterChildAdmin(Child, custom_site) request = self._mocked_authenticated_request('/child/', user_noparents) response = m.changelist_view(request) self.assertEqual(response.context_data['cl'].list_filter, ['name', 'age']) # Test with user 'parents' m = DynamicListFilterChildAdmin(Child, custom_site) request = self._mocked_authenticated_request('/child/', user_parents) response = m.changelist_view(request) self.assertEqual(response.context_data['cl'].list_filter, ('parent', 'name', 'age')) def test_dynamic_search_fields(self): child = self._create_superuser('child') m = DynamicSearchFieldsChildAdmin(Child, custom_site) request = self._mocked_authenticated_request('/child/', child) response = m.changelist_view(request) self.assertEqual(response.context_data['cl'].search_fields, ('name', 'age')) def test_pagination_page_range(self): """ Regression tests for ticket #15653: ensure the number of pages generated for changelist views are correct. """ # instantiating and setting up ChangeList object m = GroupAdmin(Group, custom_site) request = self.factory.get('/group/') request.user = self.superuser cl = m.get_changelist_instance(request) cl.list_per_page = 10 ELLIPSIS = cl.paginator.ELLIPSIS for number, pages, expected in [ (1, 1, []), (1, 2, [1, 2]), (6, 11, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]), (6, 12, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]), (6, 13, [1, 2, 3, 4, 5, 6, 7, 8, 9, ELLIPSIS, 12, 13]), (7, 12, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]), (7, 13, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]), (7, 14, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, ELLIPSIS, 13, 14]), (8, 13, [1, 2, ELLIPSIS, 5, 6, 7, 8, 9, 10, 11, 12, 13]), (8, 14, [1, 2, ELLIPSIS, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]), (8, 15, [1, 2, ELLIPSIS, 5, 6, 7, 8, 9, 10, 11, ELLIPSIS, 14, 15]), ]: with self.subTest(number=number, pages=pages): # assuming exactly `pages * cl.list_per_page` objects Group.objects.all().delete() for i in range(pages * cl.list_per_page): Group.objects.create(name='test band') # setting page number and calculating page range cl.page_num = number cl.get_results(request) self.assertEqual(list(pagination(cl)['page_range']), expected) def test_object_tools_displayed_no_add_permission(self): """ When ModelAdmin.has_add_permission() returns False, the object-tools block is still shown. """ superuser = self._create_superuser('superuser') m = EventAdmin(Event, custom_site) request = self._mocked_authenticated_request('/event/', superuser) self.assertFalse(m.has_add_permission(request)) response = m.changelist_view(request) self.assertIn('<ul class="object-tools">', response.rendered_content) # The "Add" button inside the object-tools shouldn't appear. self.assertNotIn('Add ', response.rendered_content) class GetAdminLogTests(TestCase): def test_custom_user_pk_not_named_id(self): """ {% get_admin_log %} works if the user model's primary key isn't named 'id'. """ context = Context({'user': CustomIdUser()}) template = Template('{% load log %}{% get_admin_log 10 as admin_log for_user user %}') # This template tag just logs. self.assertEqual(template.render(context), '') def test_no_user(self): """{% get_admin_log %} works without specifying a user.""" user = User(username='jondoe', password='secret', email='[email protected]') user.save() ct = ContentType.objects.get_for_model(User) LogEntry.objects.log_action(user.pk, ct.pk, user.pk, repr(user), 1) t = Template( '{% load log %}' '{% get_admin_log 100 as admin_log %}' '{% for entry in admin_log %}' '{{ entry|safe }}' '{% endfor %}' ) self.assertEqual(t.render(Context({})), 'Added “<User: jondoe>”.') def test_missing_args(self): msg = "'get_admin_log' statements require two arguments" with self.assertRaisesMessage(TemplateSyntaxError, msg): Template('{% load log %}{% get_admin_log 10 as %}') def test_non_integer_limit(self): msg = "First argument to 'get_admin_log' must be an integer" with self.assertRaisesMessage(TemplateSyntaxError, msg): Template('{% load log %}{% get_admin_log "10" as admin_log for_user user %}') def test_without_as(self): msg = "Second argument to 'get_admin_log' must be 'as'" with self.assertRaisesMessage(TemplateSyntaxError, msg): Template('{% load log %}{% get_admin_log 10 ad admin_log for_user user %}') def test_without_for_user(self): msg = "Fourth argument to 'get_admin_log' must be 'for_user'" with self.assertRaisesMessage(TemplateSyntaxError, msg): Template('{% load log %}{% get_admin_log 10 as admin_log foruser user %}') @override_settings(ROOT_URLCONF='admin_changelist.urls') class SeleniumTests(AdminSeleniumTestCase): available_apps = ['admin_changelist'] + AdminSeleniumTestCase.available_apps def setUp(self): User.objects.create_superuser(username='super', password='secret', email=None) def test_add_row_selection(self): """ The status line for selected rows gets updated correctly (#22038). """ self.admin_login(username='super', password='secret') self.selenium.get(self.live_server_url + reverse('admin:auth_user_changelist')) form_id = '#changelist-form' # Test amount of rows in the Changelist rows = self.selenium.find_elements_by_css_selector( '%s #result_list tbody tr' % form_id ) self.assertEqual(len(rows), 1) row = rows[0] selection_indicator = self.selenium.find_element_by_css_selector( '%s .action-counter' % form_id ) all_selector = self.selenium.find_element_by_id('action-toggle') row_selector = self.selenium.find_element_by_css_selector( '%s #result_list tbody tr:first-child .action-select' % form_id ) # Test current selection self.assertEqual(selection_indicator.text, "0 of 1 selected") self.assertIs(all_selector.get_property('checked'), False) self.assertEqual(row.get_attribute('class'), '') # Select a row and check again row_selector.click() self.assertEqual(selection_indicator.text, "1 of 1 selected") self.assertIs(all_selector.get_property('checked'), True) self.assertEqual(row.get_attribute('class'), 'selected') # Deselect a row and check again row_selector.click() self.assertEqual(selection_indicator.text, "0 of 1 selected") self.assertIs(all_selector.get_property('checked'), False) self.assertEqual(row.get_attribute('class'), '') def test_modifier_allows_multiple_section(self): """ Selecting a row and then selecting another row whilst holding shift should select all rows in-between. """ from selenium.webdriver.common.action_chains import ActionChains from selenium.webdriver.common.keys import Keys Parent.objects.bulk_create([Parent(name='parent%d' % i) for i in range(5)]) self.admin_login(username='super', password='secret') self.selenium.get(self.live_server_url + reverse('admin:admin_changelist_parent_changelist')) checkboxes = self.selenium.find_elements_by_css_selector('tr input.action-select') self.assertEqual(len(checkboxes), 5) for c in checkboxes: self.assertIs(c.get_property('checked'), False) # Check first row. Hold-shift and check next-to-last row. checkboxes[0].click() ActionChains(self.selenium).key_down(Keys.SHIFT).click(checkboxes[-2]).key_up(Keys.SHIFT).perform() for c in checkboxes[:-2]: self.assertIs(c.get_property('checked'), True) self.assertIs(checkboxes[-1].get_property('checked'), False) def test_select_all_across_pages(self): Parent.objects.bulk_create([Parent(name='parent%d' % i) for i in range(101)]) self.admin_login(username='super', password='secret') self.selenium.get(self.live_server_url + reverse('admin:admin_changelist_parent_changelist')) selection_indicator = self.selenium.find_element_by_css_selector('.action-counter') select_all_indicator = self.selenium.find_element_by_css_selector('.actions .all') question = self.selenium.find_element_by_css_selector('.actions > .question') clear = self.selenium.find_element_by_css_selector('.actions > .clear') select_all = self.selenium.find_element_by_id('action-toggle') select_across = self.selenium.find_element_by_name('select_across') self.assertIs(question.is_displayed(), False) self.assertIs(clear.is_displayed(), False) self.assertIs(select_all.get_property('checked'), False) self.assertEqual(select_across.get_property('value'), '0') self.assertIs(selection_indicator.is_displayed(), True) self.assertEqual(selection_indicator.text, '0 of 100 selected') self.assertIs(select_all_indicator.is_displayed(), False) select_all.click() self.assertIs(question.is_displayed(), True) self.assertIs(clear.is_displayed(), False) self.assertIs(select_all.get_property('checked'), True) self.assertEqual(select_across.get_property('value'), '0') self.assertIs(selection_indicator.is_displayed(), True) self.assertEqual(selection_indicator.text, '100 of 100 selected') self.assertIs(select_all_indicator.is_displayed(), False) question.click() self.assertIs(question.is_displayed(), False) self.assertIs(clear.is_displayed(), True) self.assertIs(select_all.get_property('checked'), True) self.assertEqual(select_across.get_property('value'), '1') self.assertIs(selection_indicator.is_displayed(), False) self.assertIs(select_all_indicator.is_displayed(), True) clear.click() self.assertIs(question.is_displayed(), False) self.assertIs(clear.is_displayed(), False) self.assertIs(select_all.get_property('checked'), False) self.assertEqual(select_across.get_property('value'), '0') self.assertIs(selection_indicator.is_displayed(), True) self.assertEqual(selection_indicator.text, '0 of 100 selected') self.assertIs(select_all_indicator.is_displayed(), False) def test_actions_warn_on_pending_edits(self): Parent.objects.create(name='foo') self.admin_login(username='super', password='secret') self.selenium.get(self.live_server_url + reverse('admin:admin_changelist_parent_changelist')) name_input = self.selenium.find_element_by_id('id_form-0-name') name_input.clear() name_input.send_keys('bar') self.selenium.find_element_by_id('action-toggle').click() self.selenium.find_element_by_name('index').click() # Go alert = self.selenium.switch_to.alert try: self.assertEqual( alert.text, 'You have unsaved changes on individual editable fields. If you ' 'run an action, your unsaved changes will be lost.' ) finally: alert.dismiss() def test_save_with_changes_warns_on_pending_action(self): from selenium.webdriver.support.ui import Select Parent.objects.create(name='parent') self.admin_login(username='super', password='secret') self.selenium.get(self.live_server_url + reverse('admin:admin_changelist_parent_changelist')) name_input = self.selenium.find_element_by_id('id_form-0-name') name_input.clear() name_input.send_keys('other name') Select( self.selenium.find_element_by_name('action') ).select_by_value('delete_selected') self.selenium.find_element_by_name('_save').click() alert = self.selenium.switch_to.alert try: self.assertEqual( alert.text, 'You have selected an action, but you haven’t saved your ' 'changes to individual fields yet. Please click OK to save. ' 'You’ll need to re-run the action.', ) finally: alert.dismiss() def test_save_without_changes_warns_on_pending_action(self): from selenium.webdriver.support.ui import Select Parent.objects.create(name='parent') self.admin_login(username='super', password='secret') self.selenium.get(self.live_server_url + reverse('admin:admin_changelist_parent_changelist')) Select( self.selenium.find_element_by_name('action') ).select_by_value('delete_selected') self.selenium.find_element_by_name('_save').click() alert = self.selenium.switch_to.alert try: self.assertEqual( alert.text, 'You have selected an action, and you haven’t made any ' 'changes on individual fields. You’re probably looking for ' 'the Go button rather than the Save button.', ) finally: alert.dismiss()
405076a9b29af9d0096836dc51f01f2052dfb3345788a6610f7fedf0c2e9d117
import importlib import inspect import os import re import sys import tempfile import threading from io import StringIO from pathlib import Path from unittest import mock from django.core import mail from django.core.files.uploadedfile import SimpleUploadedFile from django.db import DatabaseError, connection from django.http import Http404 from django.shortcuts import render from django.template import TemplateDoesNotExist from django.test import RequestFactory, SimpleTestCase, override_settings from django.test.utils import LoggingCaptureMixin from django.urls import path, reverse from django.urls.converters import IntConverter from django.utils.functional import SimpleLazyObject from django.utils.regex_helper import _lazy_re_compile from django.utils.safestring import mark_safe from django.views.debug import ( CallableSettingWrapper, ExceptionCycleWarning, ExceptionReporter, Path as DebugPath, SafeExceptionReporterFilter, default_urlconf, get_default_exception_reporter_filter, technical_404_response, technical_500_response, ) from django.views.decorators.debug import ( sensitive_post_parameters, sensitive_variables, ) from ..views import ( custom_exception_reporter_filter_view, index_page, multivalue_dict_key_error, non_sensitive_view, paranoid_view, sensitive_args_function_caller, sensitive_kwargs_function_caller, sensitive_method_view, sensitive_view, ) class User: def __str__(self): return 'jacob' class WithoutEmptyPathUrls: urlpatterns = [path('url/', index_page, name='url')] class CallableSettingWrapperTests(SimpleTestCase): """ Unittests for CallableSettingWrapper """ def test_repr(self): class WrappedCallable: def __repr__(self): return "repr from the wrapped callable" def __call__(self): pass actual = repr(CallableSettingWrapper(WrappedCallable())) self.assertEqual(actual, "repr from the wrapped callable") @override_settings(DEBUG=True, ROOT_URLCONF='view_tests.urls') class DebugViewTests(SimpleTestCase): def test_files(self): with self.assertLogs('django.request', 'ERROR'): response = self.client.get('/raises/') self.assertEqual(response.status_code, 500) data = { 'file_data.txt': SimpleUploadedFile('file_data.txt', b'haha'), } with self.assertLogs('django.request', 'ERROR'): response = self.client.post('/raises/', data) self.assertContains(response, 'file_data.txt', status_code=500) self.assertNotContains(response, 'haha', status_code=500) def test_400(self): # When DEBUG=True, technical_500_template() is called. with self.assertLogs('django.security', 'WARNING'): response = self.client.get('/raises400/') self.assertContains(response, '<div class="context" id="', status_code=400) def test_400_bad_request(self): # When DEBUG=True, technical_500_template() is called. with self.assertLogs('django.request', 'WARNING') as cm: response = self.client.get('/raises400_bad_request/') self.assertContains(response, '<div class="context" id="', status_code=400) self.assertEqual( cm.records[0].getMessage(), 'Malformed request syntax: /raises400_bad_request/', ) # Ensure no 403.html template exists to test the default case. @override_settings(TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', }]) def test_403(self): response = self.client.get('/raises403/') self.assertContains(response, '<h1>403 Forbidden</h1>', status_code=403) # Set up a test 403.html template. @override_settings(TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'OPTIONS': { 'loaders': [ ('django.template.loaders.locmem.Loader', { '403.html': 'This is a test template for a 403 error ({{ exception }}).', }), ], }, }]) def test_403_template(self): response = self.client.get('/raises403/') self.assertContains(response, 'test template', status_code=403) self.assertContains(response, '(Insufficient Permissions).', status_code=403) def test_404(self): response = self.client.get('/raises404/') self.assertNotContains( response, '<pre class="exception_value">', status_code=404, ) self.assertContains( response, '<p>The current path, <code>not-in-urls</code>, didn’t match any ' 'of these.</p>', status_code=404, html=True, ) def test_404_not_in_urls(self): response = self.client.get('/not-in-urls') self.assertNotContains(response, "Raised by:", status_code=404) self.assertNotContains( response, '<pre class="exception_value">', status_code=404, ) self.assertContains(response, "Django tried these URL patterns", status_code=404) self.assertContains( response, '<p>The current path, <code>not-in-urls</code>, didn’t match any ' 'of these.</p>', status_code=404, html=True, ) # Pattern and view name of a RegexURLPattern appear. self.assertContains(response, r"^regex-post/(?P&lt;pk&gt;[0-9]+)/$", status_code=404) self.assertContains(response, "[name='regex-post']", status_code=404) # Pattern and view name of a RoutePattern appear. self.assertContains(response, r"path-post/&lt;int:pk&gt;/", status_code=404) self.assertContains(response, "[name='path-post']", status_code=404) @override_settings(ROOT_URLCONF=WithoutEmptyPathUrls) def test_404_empty_path_not_in_urls(self): response = self.client.get('/') self.assertContains( response, '<p>The empty path didn’t match any of these.</p>', status_code=404, html=True, ) def test_technical_404(self): response = self.client.get('/technical404/') self.assertContains( response, '<pre class="exception_value">Testing technical 404.</pre>', status_code=404, html=True, ) self.assertContains(response, "Raised by:", status_code=404) self.assertContains(response, "view_tests.views.technical404", status_code=404) self.assertContains( response, '<p>The current path, <code>technical404/</code>, matched the ' 'last one.</p>', status_code=404, html=True, ) def test_classbased_technical_404(self): response = self.client.get('/classbased404/') self.assertContains(response, "Raised by:", status_code=404) self.assertContains(response, "view_tests.views.Http404View", status_code=404) def test_non_l10ned_numeric_ids(self): """ Numeric IDs and fancy traceback context blocks line numbers shouldn't be localized. """ with self.settings(DEBUG=True, USE_L10N=True): with self.assertLogs('django.request', 'ERROR'): response = self.client.get('/raises500/') # We look for a HTML fragment of the form # '<div class="context" id="c38123208">', not '<div class="context" id="c38,123,208"' self.assertContains(response, '<div class="context" id="', status_code=500) match = re.search(b'<div class="context" id="(?P<id>[^"]+)">', response.content) self.assertIsNotNone(match) id_repr = match['id'] self.assertFalse( re.search(b'[^c0-9]', id_repr), "Numeric IDs in debug response HTML page shouldn't be localized (value: %s)." % id_repr.decode() ) def test_template_exceptions(self): with self.assertLogs('django.request', 'ERROR'): try: self.client.get(reverse('template_exception')) except Exception: raising_loc = inspect.trace()[-1][-2][0].strip() self.assertNotEqual( raising_loc.find("raise Exception('boom')"), -1, "Failed to find 'raise Exception' in last frame of " "traceback, instead found: %s" % raising_loc ) def test_template_loader_postmortem(self): """Tests for not existing file""" template_name = "notfound.html" with tempfile.NamedTemporaryFile(prefix=template_name) as tmpfile: tempdir = os.path.dirname(tmpfile.name) template_path = os.path.join(tempdir, template_name) with override_settings(TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [tempdir], }]), self.assertLogs('django.request', 'ERROR'): response = self.client.get(reverse('raises_template_does_not_exist', kwargs={"path": template_name})) self.assertContains(response, "%s (Source does not exist)" % template_path, status_code=500, count=2) # Assert as HTML. self.assertContains( response, '<li><code>django.template.loaders.filesystem.Loader</code>: ' '%s (Source does not exist)</li>' % os.path.join(tempdir, 'notfound.html'), status_code=500, html=True, ) def test_no_template_source_loaders(self): """ Make sure if you don't specify a template, the debug view doesn't blow up. """ with self.assertLogs('django.request', 'ERROR'): with self.assertRaises(TemplateDoesNotExist): self.client.get('/render_no_template/') @override_settings(ROOT_URLCONF='view_tests.default_urls') def test_default_urlconf_template(self): """ Make sure that the default URLconf template is shown instead of the technical 404 page, if the user has not altered their URLconf yet. """ response = self.client.get('/') self.assertContains( response, "<h1>The install worked successfully! Congratulations!</h1>" ) @override_settings(ROOT_URLCONF='view_tests.regression_21530_urls') def test_regression_21530(self): """ Regression test for bug #21530. If the admin app include is replaced with exactly one url pattern, then the technical 404 template should be displayed. The bug here was that an AttributeError caused a 500 response. """ response = self.client.get('/') self.assertContains( response, "Page not found <span>(404)</span>", status_code=404 ) def test_template_encoding(self): """ The templates are loaded directly, not via a template loader, and should be opened as utf-8 charset as is the default specified on template engines. """ with mock.patch.object(DebugPath, 'open') as m: default_urlconf(None) m.assert_called_once_with(encoding='utf-8') m.reset_mock() technical_404_response(mock.MagicMock(), mock.Mock()) m.assert_called_once_with(encoding='utf-8') def test_technical_404_converter_raise_404(self): with mock.patch.object(IntConverter, 'to_python', side_effect=Http404): response = self.client.get('/path-post/1/') self.assertContains(response, 'Page not found', status_code=404) def test_exception_reporter_from_request(self): with self.assertLogs('django.request', 'ERROR'): response = self.client.get('/custom_reporter_class_view/') self.assertContains(response, 'custom traceback text', status_code=500) @override_settings(DEFAULT_EXCEPTION_REPORTER='view_tests.views.CustomExceptionReporter') def test_exception_reporter_from_settings(self): with self.assertLogs('django.request', 'ERROR'): response = self.client.get('/raises500/') self.assertContains(response, 'custom traceback text', status_code=500) @override_settings(DEFAULT_EXCEPTION_REPORTER='view_tests.views.TemplateOverrideExceptionReporter') def test_template_override_exception_reporter(self): with self.assertLogs('django.request', 'ERROR'): response = self.client.get('/raises500/') self.assertContains( response, '<h1>Oh no, an error occurred!</h1>', status_code=500, html=True, ) with self.assertLogs('django.request', 'ERROR'): response = self.client.get('/raises500/', HTTP_ACCEPT='text/plain') self.assertContains(response, 'Oh dear, an error occurred!', status_code=500) class DebugViewQueriesAllowedTests(SimpleTestCase): # May need a query to initialize MySQL connection databases = {'default'} def test_handle_db_exception(self): """ Ensure the debug view works when a database exception is raised by performing an invalid query and passing the exception to the debug view. """ with connection.cursor() as cursor: try: cursor.execute('INVALID SQL') except DatabaseError: exc_info = sys.exc_info() rf = RequestFactory() response = technical_500_response(rf.get('/'), *exc_info) self.assertContains(response, 'OperationalError at /', status_code=500) @override_settings( DEBUG=True, ROOT_URLCONF='view_tests.urls', # No template directories are configured, so no templates will be found. TEMPLATES=[{ 'BACKEND': 'django.template.backends.dummy.TemplateStrings', }], ) class NonDjangoTemplatesDebugViewTests(SimpleTestCase): def test_400(self): # When DEBUG=True, technical_500_template() is called. with self.assertLogs('django.security', 'WARNING'): response = self.client.get('/raises400/') self.assertContains(response, '<div class="context" id="', status_code=400) def test_400_bad_request(self): # When DEBUG=True, technical_500_template() is called. with self.assertLogs('django.request', 'WARNING') as cm: response = self.client.get('/raises400_bad_request/') self.assertContains(response, '<div class="context" id="', status_code=400) self.assertEqual( cm.records[0].getMessage(), 'Malformed request syntax: /raises400_bad_request/', ) def test_403(self): response = self.client.get('/raises403/') self.assertContains(response, '<h1>403 Forbidden</h1>', status_code=403) def test_404(self): response = self.client.get('/raises404/') self.assertEqual(response.status_code, 404) def test_template_not_found_error(self): # Raises a TemplateDoesNotExist exception and shows the debug view. url = reverse('raises_template_does_not_exist', kwargs={"path": "notfound.html"}) with self.assertLogs('django.request', 'ERROR'): response = self.client.get(url) self.assertContains(response, '<div class="context" id="', status_code=500) class ExceptionReporterTests(SimpleTestCase): rf = RequestFactory() def test_request_and_exception(self): "A simple exception report can be generated" try: request = self.rf.get('/test_view/') request.user = User() raise ValueError("Can't find my keys") except ValueError: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(request, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertInHTML('<h1>ValueError at /test_view/</h1>', html) self.assertIn('<pre class="exception_value">Can&#x27;t find my keys</pre>', html) self.assertIn('<th>Request Method:</th>', html) self.assertIn('<th>Request URL:</th>', html) self.assertIn('<h3 id="user-info">USER</h3>', html) self.assertIn('<p>jacob</p>', html) self.assertIn('<th>Exception Type:</th>', html) self.assertIn('<th>Exception Value:</th>', html) self.assertIn('<h2>Traceback ', html) self.assertIn('<h2>Request information</h2>', html) self.assertNotIn('<p>Request data not supplied</p>', html) self.assertIn('<p>No POST data</p>', html) def test_no_request(self): "An exception report can be generated without request" try: raise ValueError("Can't find my keys") except ValueError: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertInHTML('<h1>ValueError</h1>', html) self.assertIn('<pre class="exception_value">Can&#x27;t find my keys</pre>', html) self.assertNotIn('<th>Request Method:</th>', html) self.assertNotIn('<th>Request URL:</th>', html) self.assertNotIn('<h3 id="user-info">USER</h3>', html) self.assertIn('<th>Exception Type:</th>', html) self.assertIn('<th>Exception Value:</th>', html) self.assertIn('<h2>Traceback ', html) self.assertIn('<h2>Request information</h2>', html) self.assertIn('<p>Request data not supplied</p>', html) def test_sharing_traceback(self): try: raise ValueError('Oops') except ValueError: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertIn( '<form action="https://dpaste.com/" name="pasteform" ' 'id="pasteform" method="post">', html, ) def test_eol_support(self): """The ExceptionReporter supports Unix, Windows and Macintosh EOL markers""" LINES = ['print %d' % i for i in range(1, 6)] reporter = ExceptionReporter(None, None, None, None) for newline in ['\n', '\r\n', '\r']: fd, filename = tempfile.mkstemp(text=False) os.write(fd, (newline.join(LINES) + newline).encode()) os.close(fd) try: self.assertEqual( reporter._get_lines_from_file(filename, 3, 2), (1, LINES[1:3], LINES[3], LINES[4:]) ) finally: os.unlink(filename) def test_no_exception(self): "An exception report can be generated for just a request" request = self.rf.get('/test_view/') reporter = ExceptionReporter(request, None, None, None) html = reporter.get_traceback_html() self.assertInHTML('<h1>Report at /test_view/</h1>', html) self.assertIn('<pre class="exception_value">No exception message supplied</pre>', html) self.assertIn('<th>Request Method:</th>', html) self.assertIn('<th>Request URL:</th>', html) self.assertNotIn('<th>Exception Type:</th>', html) self.assertNotIn('<th>Exception Value:</th>', html) self.assertNotIn('<h2>Traceback ', html) self.assertIn('<h2>Request information</h2>', html) self.assertNotIn('<p>Request data not supplied</p>', html) def test_suppressed_context(self): try: try: raise RuntimeError("Can't find my keys") except RuntimeError: raise ValueError("Can't find my keys") from None except ValueError: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertInHTML('<h1>ValueError</h1>', html) self.assertIn('<pre class="exception_value">Can&#x27;t find my keys</pre>', html) self.assertIn('<th>Exception Type:</th>', html) self.assertIn('<th>Exception Value:</th>', html) self.assertIn('<h2>Traceback ', html) self.assertIn('<h2>Request information</h2>', html) self.assertIn('<p>Request data not supplied</p>', html) self.assertNotIn('During handling of the above exception', html) def test_innermost_exception_without_traceback(self): try: try: raise RuntimeError('Oops') except Exception as exc: new_exc = RuntimeError('My context') exc.__context__ = new_exc raise except Exception: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) frames = reporter.get_traceback_frames() self.assertEqual(len(frames), 2) html = reporter.get_traceback_html() self.assertInHTML('<h1>RuntimeError</h1>', html) self.assertIn('<pre class="exception_value">Oops</pre>', html) self.assertIn('<th>Exception Type:</th>', html) self.assertIn('<th>Exception Value:</th>', html) self.assertIn('<h2>Traceback ', html) self.assertIn('<h2>Request information</h2>', html) self.assertIn('<p>Request data not supplied</p>', html) self.assertIn( 'During handling of the above exception (My context), another ' 'exception occurred', html, ) self.assertInHTML('<li class="frame user">None</li>', html) self.assertIn('Traceback (most recent call last):\n None', html) text = reporter.get_traceback_text() self.assertIn('Exception Type: RuntimeError', text) self.assertIn('Exception Value: Oops', text) self.assertIn('Traceback (most recent call last):\n None', text) self.assertIn( 'During handling of the above exception (My context), another ' 'exception occurred', text, ) def test_mid_stack_exception_without_traceback(self): try: try: raise RuntimeError('Inner Oops') except Exception as exc: new_exc = RuntimeError('My context') new_exc.__context__ = exc raise RuntimeError('Oops') from new_exc except Exception: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertInHTML('<h1>RuntimeError</h1>', html) self.assertIn('<pre class="exception_value">Oops</pre>', html) self.assertIn('<th>Exception Type:</th>', html) self.assertIn('<th>Exception Value:</th>', html) self.assertIn('<h2>Traceback ', html) self.assertInHTML('<li class="frame user">Traceback: None</li>', html) self.assertIn( 'During handling of the above exception (Inner Oops), another ' 'exception occurred:\n Traceback: None', html, ) text = reporter.get_traceback_text() self.assertIn('Exception Type: RuntimeError', text) self.assertIn('Exception Value: Oops', text) self.assertIn('Traceback (most recent call last):', text) self.assertIn( 'During handling of the above exception (Inner Oops), another ' 'exception occurred:\n Traceback: None', text, ) def test_reporting_of_nested_exceptions(self): request = self.rf.get('/test_view/') try: try: raise AttributeError(mark_safe('<p>Top level</p>')) except AttributeError as explicit: try: raise ValueError(mark_safe('<p>Second exception</p>')) from explicit except ValueError: raise IndexError(mark_safe('<p>Final exception</p>')) except Exception: # Custom exception handler, just pass it into ExceptionReporter exc_type, exc_value, tb = sys.exc_info() explicit_exc = 'The above exception ({0}) was the direct cause of the following exception:' implicit_exc = 'During handling of the above exception ({0}), another exception occurred:' reporter = ExceptionReporter(request, exc_type, exc_value, tb) html = reporter.get_traceback_html() # Both messages are twice on page -- one rendered as html, # one as plain text (for pastebin) self.assertEqual(2, html.count(explicit_exc.format('&lt;p&gt;Top level&lt;/p&gt;'))) self.assertEqual(2, html.count(implicit_exc.format('&lt;p&gt;Second exception&lt;/p&gt;'))) self.assertEqual(10, html.count('&lt;p&gt;Final exception&lt;/p&gt;')) text = reporter.get_traceback_text() self.assertIn(explicit_exc.format('<p>Top level</p>'), text) self.assertIn(implicit_exc.format('<p>Second exception</p>'), text) self.assertEqual(3, text.count('<p>Final exception</p>')) def test_reporting_frames_without_source(self): try: source = "def funcName():\n raise Error('Whoops')\nfuncName()" namespace = {} code = compile(source, 'generated', 'exec') exec(code, namespace) except Exception: exc_type, exc_value, tb = sys.exc_info() request = self.rf.get('/test_view/') reporter = ExceptionReporter(request, exc_type, exc_value, tb) frames = reporter.get_traceback_frames() last_frame = frames[-1] self.assertEqual(last_frame['context_line'], '<source code not available>') self.assertEqual(last_frame['filename'], 'generated') self.assertEqual(last_frame['function'], 'funcName') self.assertEqual(last_frame['lineno'], 2) html = reporter.get_traceback_html() self.assertIn( '<span class="fname">generated</span>, line 2, in funcName', html, ) self.assertIn( '<code class="fname">generated</code>, line 2, in funcName', html, ) self.assertIn( '"generated", line 2, in funcName\n' ' &lt;source code not available&gt;', html, ) text = reporter.get_traceback_text() self.assertIn( '"generated", line 2, in funcName\n' ' <source code not available>', text, ) def test_reporting_frames_source_not_match(self): try: source = "def funcName():\n raise Error('Whoops')\nfuncName()" namespace = {} code = compile(source, 'generated', 'exec') exec(code, namespace) except Exception: exc_type, exc_value, tb = sys.exc_info() with mock.patch( 'django.views.debug.ExceptionReporter._get_source', return_value=['wrong source'], ): request = self.rf.get('/test_view/') reporter = ExceptionReporter(request, exc_type, exc_value, tb) frames = reporter.get_traceback_frames() last_frame = frames[-1] self.assertEqual(last_frame['context_line'], '<source code not available>') self.assertEqual(last_frame['filename'], 'generated') self.assertEqual(last_frame['function'], 'funcName') self.assertEqual(last_frame['lineno'], 2) html = reporter.get_traceback_html() self.assertIn( '<span class="fname">generated</span>, line 2, in funcName', html, ) self.assertIn( '<code class="fname">generated</code>, line 2, in funcName', html, ) self.assertIn( '"generated", line 2, in funcName\n' ' &lt;source code not available&gt;', html, ) text = reporter.get_traceback_text() self.assertIn( '"generated", line 2, in funcName\n' ' <source code not available>', text, ) def test_reporting_frames_for_cyclic_reference(self): try: def test_func(): try: raise RuntimeError('outer') from RuntimeError('inner') except RuntimeError as exc: raise exc.__cause__ test_func() except Exception: exc_type, exc_value, tb = sys.exc_info() request = self.rf.get('/test_view/') reporter = ExceptionReporter(request, exc_type, exc_value, tb) def generate_traceback_frames(*args, **kwargs): nonlocal tb_frames tb_frames = reporter.get_traceback_frames() tb_frames = None tb_generator = threading.Thread(target=generate_traceback_frames, daemon=True) msg = ( "Cycle in the exception chain detected: exception 'inner' " "encountered again." ) with self.assertWarnsMessage(ExceptionCycleWarning, msg): tb_generator.start() tb_generator.join(timeout=5) if tb_generator.is_alive(): # tb_generator is a daemon that runs until the main thread/process # exits. This is resource heavy when running the full test suite. # Setting the following values to None makes # reporter.get_traceback_frames() exit early. exc_value.__traceback__ = exc_value.__context__ = exc_value.__cause__ = None tb_generator.join() self.fail('Cyclic reference in Exception Reporter.get_traceback_frames()') if tb_frames is None: # can happen if the thread generating traceback got killed # or exception while generating the traceback self.fail('Traceback generation failed') last_frame = tb_frames[-1] self.assertIn('raise exc.__cause__', last_frame['context_line']) self.assertEqual(last_frame['filename'], __file__) self.assertEqual(last_frame['function'], 'test_func') def test_request_and_message(self): "A message can be provided in addition to a request" request = self.rf.get('/test_view/') reporter = ExceptionReporter(request, None, "I'm a little teapot", None) html = reporter.get_traceback_html() self.assertInHTML('<h1>Report at /test_view/</h1>', html) self.assertIn('<pre class="exception_value">I&#x27;m a little teapot</pre>', html) self.assertIn('<th>Request Method:</th>', html) self.assertIn('<th>Request URL:</th>', html) self.assertNotIn('<th>Exception Type:</th>', html) self.assertNotIn('<th>Exception Value:</th>', html) self.assertIn('<h2>Traceback ', html) self.assertIn('<h2>Request information</h2>', html) self.assertNotIn('<p>Request data not supplied</p>', html) def test_message_only(self): reporter = ExceptionReporter(None, None, "I'm a little teapot", None) html = reporter.get_traceback_html() self.assertInHTML('<h1>Report</h1>', html) self.assertIn('<pre class="exception_value">I&#x27;m a little teapot</pre>', html) self.assertNotIn('<th>Request Method:</th>', html) self.assertNotIn('<th>Request URL:</th>', html) self.assertNotIn('<th>Exception Type:</th>', html) self.assertNotIn('<th>Exception Value:</th>', html) self.assertIn('<h2>Traceback ', html) self.assertIn('<h2>Request information</h2>', html) self.assertIn('<p>Request data not supplied</p>', html) def test_non_utf8_values_handling(self): "Non-UTF-8 exceptions/values should not make the output generation choke." try: class NonUtf8Output(Exception): def __repr__(self): return b'EXC\xe9EXC' somevar = b'VAL\xe9VAL' # NOQA raise NonUtf8Output() except Exception: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertIn('VAL\\xe9VAL', html) self.assertIn('EXC\\xe9EXC', html) def test_local_variable_escaping(self): """Safe strings in local variables are escaped.""" try: local = mark_safe('<p>Local variable</p>') raise ValueError(local) except Exception: exc_type, exc_value, tb = sys.exc_info() html = ExceptionReporter(None, exc_type, exc_value, tb).get_traceback_html() self.assertIn('<td class="code"><pre>&#x27;&lt;p&gt;Local variable&lt;/p&gt;&#x27;</pre></td>', html) def test_unprintable_values_handling(self): "Unprintable values should not make the output generation choke." try: class OomOutput: def __repr__(self): raise MemoryError('OOM') oomvalue = OomOutput() # NOQA raise ValueError() except Exception: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertIn('<td class="code"><pre>Error in formatting', html) def test_too_large_values_handling(self): "Large values should not create a large HTML." large = 256 * 1024 repr_of_str_adds = len(repr('')) try: class LargeOutput: def __repr__(self): return repr('A' * large) largevalue = LargeOutput() # NOQA raise ValueError() except Exception: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertEqual(len(html) // 1024 // 128, 0) # still fit in 128Kb self.assertIn('&lt;trimmed %d bytes string&gt;' % (large + repr_of_str_adds,), html) def test_encoding_error(self): """ A UnicodeError displays a portion of the problematic string. HTML in safe strings is escaped. """ try: mark_safe('abcdefghijkl<p>mnὀp</p>qrstuwxyz').encode('ascii') except Exception: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertIn('<h2>Unicode error hint</h2>', html) self.assertIn('The string that could not be encoded/decoded was: ', html) self.assertIn('<strong>&lt;p&gt;mnὀp&lt;/p&gt;</strong>', html) def test_unfrozen_importlib(self): """ importlib is not a frozen app, but its loader thinks it's frozen which results in an ImportError. Refs #21443. """ try: request = self.rf.get('/test_view/') importlib.import_module('abc.def.invalid.name') except Exception: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(request, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertInHTML('<h1>ModuleNotFoundError at /test_view/</h1>', html) def test_ignore_traceback_evaluation_exceptions(self): """ Don't trip over exceptions generated by crafted objects when evaluating them while cleansing (#24455). """ class BrokenEvaluation(Exception): pass def broken_setup(): raise BrokenEvaluation request = self.rf.get('/test_view/') broken_lazy = SimpleLazyObject(broken_setup) try: bool(broken_lazy) except BrokenEvaluation: exc_type, exc_value, tb = sys.exc_info() self.assertIn( "BrokenEvaluation", ExceptionReporter(request, exc_type, exc_value, tb).get_traceback_html(), "Evaluation exception reason not mentioned in traceback" ) @override_settings(ALLOWED_HOSTS='example.com') def test_disallowed_host(self): "An exception report can be generated even for a disallowed host." request = self.rf.get('/', HTTP_HOST='evil.com') reporter = ExceptionReporter(request, None, None, None) html = reporter.get_traceback_html() self.assertIn("http://evil.com/", html) def test_request_with_items_key(self): """ An exception report can be generated for requests with 'items' in request GET, POST, FILES, or COOKIES QueryDicts. """ value = '<td>items</td><td class="code"><pre>&#x27;Oops&#x27;</pre></td>' # GET request = self.rf.get('/test_view/?items=Oops') reporter = ExceptionReporter(request, None, None, None) html = reporter.get_traceback_html() self.assertInHTML(value, html) # POST request = self.rf.post('/test_view/', data={'items': 'Oops'}) reporter = ExceptionReporter(request, None, None, None) html = reporter.get_traceback_html() self.assertInHTML(value, html) # FILES fp = StringIO('filecontent') request = self.rf.post('/test_view/', data={'name': 'filename', 'items': fp}) reporter = ExceptionReporter(request, None, None, None) html = reporter.get_traceback_html() self.assertInHTML( '<td>items</td><td class="code"><pre>&lt;InMemoryUploadedFile: ' 'items (application/octet-stream)&gt;</pre></td>', html ) # COOKIES rf = RequestFactory() rf.cookies['items'] = 'Oops' request = rf.get('/test_view/') reporter = ExceptionReporter(request, None, None, None) html = reporter.get_traceback_html() self.assertInHTML('<td>items</td><td class="code"><pre>&#x27;Oops&#x27;</pre></td>', html) def test_exception_fetching_user(self): """ The error page can be rendered if the current user can't be retrieved (such as when the database is unavailable). """ class ExceptionUser: def __str__(self): raise Exception() request = self.rf.get('/test_view/') request.user = ExceptionUser() try: raise ValueError('Oops') except ValueError: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(request, exc_type, exc_value, tb) html = reporter.get_traceback_html() self.assertInHTML('<h1>ValueError at /test_view/</h1>', html) self.assertIn('<pre class="exception_value">Oops</pre>', html) self.assertIn('<h3 id="user-info">USER</h3>', html) self.assertIn('<p>[unable to retrieve the current user]</p>', html) text = reporter.get_traceback_text() self.assertIn('USER: [unable to retrieve the current user]', text) def test_template_encoding(self): """ The templates are loaded directly, not via a template loader, and should be opened as utf-8 charset as is the default specified on template engines. """ reporter = ExceptionReporter(None, None, None, None) with mock.patch.object(DebugPath, 'open') as m: reporter.get_traceback_html() m.assert_called_once_with(encoding='utf-8') m.reset_mock() reporter.get_traceback_text() m.assert_called_once_with(encoding='utf-8') class PlainTextReportTests(SimpleTestCase): rf = RequestFactory() def test_request_and_exception(self): "A simple exception report can be generated" try: request = self.rf.get('/test_view/') request.user = User() raise ValueError("Can't find my keys") except ValueError: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(request, exc_type, exc_value, tb) text = reporter.get_traceback_text() self.assertIn('ValueError at /test_view/', text) self.assertIn("Can't find my keys", text) self.assertIn('Request Method:', text) self.assertIn('Request URL:', text) self.assertIn('USER: jacob', text) self.assertIn('Exception Type:', text) self.assertIn('Exception Value:', text) self.assertIn('Traceback (most recent call last):', text) self.assertIn('Request information:', text) self.assertNotIn('Request data not supplied', text) def test_no_request(self): "An exception report can be generated without request" try: raise ValueError("Can't find my keys") except ValueError: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(None, exc_type, exc_value, tb) text = reporter.get_traceback_text() self.assertIn('ValueError', text) self.assertIn("Can't find my keys", text) self.assertNotIn('Request Method:', text) self.assertNotIn('Request URL:', text) self.assertNotIn('USER:', text) self.assertIn('Exception Type:', text) self.assertIn('Exception Value:', text) self.assertIn('Traceback (most recent call last):', text) self.assertIn('Request data not supplied', text) def test_no_exception(self): "An exception report can be generated for just a request" request = self.rf.get('/test_view/') reporter = ExceptionReporter(request, None, None, None) reporter.get_traceback_text() def test_request_and_message(self): "A message can be provided in addition to a request" request = self.rf.get('/test_view/') reporter = ExceptionReporter(request, None, "I'm a little teapot", None) reporter.get_traceback_text() @override_settings(DEBUG=True) def test_template_exception(self): request = self.rf.get('/test_view/') try: render(request, 'debug/template_error.html') except Exception: exc_type, exc_value, tb = sys.exc_info() reporter = ExceptionReporter(request, exc_type, exc_value, tb) text = reporter.get_traceback_text() templ_path = Path(Path(__file__).parents[1], 'templates', 'debug', 'template_error.html') self.assertIn( 'Template error:\n' 'In template %(path)s, error at line 2\n' ' \'cycle\' tag requires at least two arguments\n' ' 1 : Template with error:\n' ' 2 : {%% cycle %%} \n' ' 3 : ' % {'path': templ_path}, text ) def test_request_with_items_key(self): """ An exception report can be generated for requests with 'items' in request GET, POST, FILES, or COOKIES QueryDicts. """ # GET request = self.rf.get('/test_view/?items=Oops') reporter = ExceptionReporter(request, None, None, None) text = reporter.get_traceback_text() self.assertIn("items = 'Oops'", text) # POST request = self.rf.post('/test_view/', data={'items': 'Oops'}) reporter = ExceptionReporter(request, None, None, None) text = reporter.get_traceback_text() self.assertIn("items = 'Oops'", text) # FILES fp = StringIO('filecontent') request = self.rf.post('/test_view/', data={'name': 'filename', 'items': fp}) reporter = ExceptionReporter(request, None, None, None) text = reporter.get_traceback_text() self.assertIn('items = <InMemoryUploadedFile:', text) # COOKIES rf = RequestFactory() rf.cookies['items'] = 'Oops' request = rf.get('/test_view/') reporter = ExceptionReporter(request, None, None, None) text = reporter.get_traceback_text() self.assertIn("items = 'Oops'", text) def test_message_only(self): reporter = ExceptionReporter(None, None, "I'm a little teapot", None) reporter.get_traceback_text() @override_settings(ALLOWED_HOSTS='example.com') def test_disallowed_host(self): "An exception report can be generated even for a disallowed host." request = self.rf.get('/', HTTP_HOST='evil.com') reporter = ExceptionReporter(request, None, None, None) text = reporter.get_traceback_text() self.assertIn("http://evil.com/", text) class ExceptionReportTestMixin: # Mixin used in the ExceptionReporterFilterTests and # AjaxResponseExceptionReporterFilter tests below breakfast_data = { 'sausage-key': 'sausage-value', 'baked-beans-key': 'baked-beans-value', 'hash-brown-key': 'hash-brown-value', 'bacon-key': 'bacon-value', } def verify_unsafe_response(self, view, check_for_vars=True, check_for_POST_params=True): """ Asserts that potentially sensitive info are displayed in the response. """ request = self.rf.post('/some_url/', self.breakfast_data) response = view(request) if check_for_vars: # All variables are shown. self.assertContains(response, 'cooked_eggs', status_code=500) self.assertContains(response, 'scrambled', status_code=500) self.assertContains(response, 'sauce', status_code=500) self.assertContains(response, 'worcestershire', status_code=500) if check_for_POST_params: for k, v in self.breakfast_data.items(): # All POST parameters are shown. self.assertContains(response, k, status_code=500) self.assertContains(response, v, status_code=500) def verify_safe_response(self, view, check_for_vars=True, check_for_POST_params=True): """ Asserts that certain sensitive info are not displayed in the response. """ request = self.rf.post('/some_url/', self.breakfast_data) response = view(request) if check_for_vars: # Non-sensitive variable's name and value are shown. self.assertContains(response, 'cooked_eggs', status_code=500) self.assertContains(response, 'scrambled', status_code=500) # Sensitive variable's name is shown but not its value. self.assertContains(response, 'sauce', status_code=500) self.assertNotContains(response, 'worcestershire', status_code=500) if check_for_POST_params: for k in self.breakfast_data: # All POST parameters' names are shown. self.assertContains(response, k, status_code=500) # Non-sensitive POST parameters' values are shown. self.assertContains(response, 'baked-beans-value', status_code=500) self.assertContains(response, 'hash-brown-value', status_code=500) # Sensitive POST parameters' values are not shown. self.assertNotContains(response, 'sausage-value', status_code=500) self.assertNotContains(response, 'bacon-value', status_code=500) def verify_paranoid_response(self, view, check_for_vars=True, check_for_POST_params=True): """ Asserts that no variables or POST parameters are displayed in the response. """ request = self.rf.post('/some_url/', self.breakfast_data) response = view(request) if check_for_vars: # Show variable names but not their values. self.assertContains(response, 'cooked_eggs', status_code=500) self.assertNotContains(response, 'scrambled', status_code=500) self.assertContains(response, 'sauce', status_code=500) self.assertNotContains(response, 'worcestershire', status_code=500) if check_for_POST_params: for k, v in self.breakfast_data.items(): # All POST parameters' names are shown. self.assertContains(response, k, status_code=500) # No POST parameters' values are shown. self.assertNotContains(response, v, status_code=500) def verify_unsafe_email(self, view, check_for_POST_params=True): """ Asserts that potentially sensitive info are displayed in the email report. """ with self.settings(ADMINS=[('Admin', '[email protected]')]): mail.outbox = [] # Empty outbox request = self.rf.post('/some_url/', self.breakfast_data) view(request) self.assertEqual(len(mail.outbox), 1) email = mail.outbox[0] # Frames vars are never shown in plain text email reports. body_plain = str(email.body) self.assertNotIn('cooked_eggs', body_plain) self.assertNotIn('scrambled', body_plain) self.assertNotIn('sauce', body_plain) self.assertNotIn('worcestershire', body_plain) # Frames vars are shown in html email reports. body_html = str(email.alternatives[0][0]) self.assertIn('cooked_eggs', body_html) self.assertIn('scrambled', body_html) self.assertIn('sauce', body_html) self.assertIn('worcestershire', body_html) if check_for_POST_params: for k, v in self.breakfast_data.items(): # All POST parameters are shown. self.assertIn(k, body_plain) self.assertIn(v, body_plain) self.assertIn(k, body_html) self.assertIn(v, body_html) def verify_safe_email(self, view, check_for_POST_params=True): """ Asserts that certain sensitive info are not displayed in the email report. """ with self.settings(ADMINS=[('Admin', '[email protected]')]): mail.outbox = [] # Empty outbox request = self.rf.post('/some_url/', self.breakfast_data) view(request) self.assertEqual(len(mail.outbox), 1) email = mail.outbox[0] # Frames vars are never shown in plain text email reports. body_plain = str(email.body) self.assertNotIn('cooked_eggs', body_plain) self.assertNotIn('scrambled', body_plain) self.assertNotIn('sauce', body_plain) self.assertNotIn('worcestershire', body_plain) # Frames vars are shown in html email reports. body_html = str(email.alternatives[0][0]) self.assertIn('cooked_eggs', body_html) self.assertIn('scrambled', body_html) self.assertIn('sauce', body_html) self.assertNotIn('worcestershire', body_html) if check_for_POST_params: for k in self.breakfast_data: # All POST parameters' names are shown. self.assertIn(k, body_plain) # Non-sensitive POST parameters' values are shown. self.assertIn('baked-beans-value', body_plain) self.assertIn('hash-brown-value', body_plain) self.assertIn('baked-beans-value', body_html) self.assertIn('hash-brown-value', body_html) # Sensitive POST parameters' values are not shown. self.assertNotIn('sausage-value', body_plain) self.assertNotIn('bacon-value', body_plain) self.assertNotIn('sausage-value', body_html) self.assertNotIn('bacon-value', body_html) def verify_paranoid_email(self, view): """ Asserts that no variables or POST parameters are displayed in the email report. """ with self.settings(ADMINS=[('Admin', '[email protected]')]): mail.outbox = [] # Empty outbox request = self.rf.post('/some_url/', self.breakfast_data) view(request) self.assertEqual(len(mail.outbox), 1) email = mail.outbox[0] # Frames vars are never shown in plain text email reports. body = str(email.body) self.assertNotIn('cooked_eggs', body) self.assertNotIn('scrambled', body) self.assertNotIn('sauce', body) self.assertNotIn('worcestershire', body) for k, v in self.breakfast_data.items(): # All POST parameters' names are shown. self.assertIn(k, body) # No POST parameters' values are shown. self.assertNotIn(v, body) @override_settings(ROOT_URLCONF='view_tests.urls') class ExceptionReporterFilterTests(ExceptionReportTestMixin, LoggingCaptureMixin, SimpleTestCase): """ Sensitive information can be filtered out of error reports (#14614). """ rf = RequestFactory() def test_non_sensitive_request(self): """ Everything (request info and frame variables) can bee seen in the default error reports for non-sensitive requests. """ with self.settings(DEBUG=True): self.verify_unsafe_response(non_sensitive_view) self.verify_unsafe_email(non_sensitive_view) with self.settings(DEBUG=False): self.verify_unsafe_response(non_sensitive_view) self.verify_unsafe_email(non_sensitive_view) def test_sensitive_request(self): """ Sensitive POST parameters and frame variables cannot be seen in the default error reports for sensitive requests. """ with self.settings(DEBUG=True): self.verify_unsafe_response(sensitive_view) self.verify_unsafe_email(sensitive_view) with self.settings(DEBUG=False): self.verify_safe_response(sensitive_view) self.verify_safe_email(sensitive_view) def test_paranoid_request(self): """ No POST parameters and frame variables can be seen in the default error reports for "paranoid" requests. """ with self.settings(DEBUG=True): self.verify_unsafe_response(paranoid_view) self.verify_unsafe_email(paranoid_view) with self.settings(DEBUG=False): self.verify_paranoid_response(paranoid_view) self.verify_paranoid_email(paranoid_view) def test_multivalue_dict_key_error(self): """ #21098 -- Sensitive POST parameters cannot be seen in the error reports for if request.POST['nonexistent_key'] throws an error. """ with self.settings(DEBUG=True): self.verify_unsafe_response(multivalue_dict_key_error) self.verify_unsafe_email(multivalue_dict_key_error) with self.settings(DEBUG=False): self.verify_safe_response(multivalue_dict_key_error) self.verify_safe_email(multivalue_dict_key_error) def test_custom_exception_reporter_filter(self): """ It's possible to assign an exception reporter filter to the request to bypass the one set in DEFAULT_EXCEPTION_REPORTER_FILTER. """ with self.settings(DEBUG=True): self.verify_unsafe_response(custom_exception_reporter_filter_view) self.verify_unsafe_email(custom_exception_reporter_filter_view) with self.settings(DEBUG=False): self.verify_unsafe_response(custom_exception_reporter_filter_view) self.verify_unsafe_email(custom_exception_reporter_filter_view) def test_sensitive_method(self): """ The sensitive_variables decorator works with object methods. """ with self.settings(DEBUG=True): self.verify_unsafe_response(sensitive_method_view, check_for_POST_params=False) self.verify_unsafe_email(sensitive_method_view, check_for_POST_params=False) with self.settings(DEBUG=False): self.verify_safe_response(sensitive_method_view, check_for_POST_params=False) self.verify_safe_email(sensitive_method_view, check_for_POST_params=False) def test_sensitive_function_arguments(self): """ Sensitive variables don't leak in the sensitive_variables decorator's frame, when those variables are passed as arguments to the decorated function. """ with self.settings(DEBUG=True): self.verify_unsafe_response(sensitive_args_function_caller) self.verify_unsafe_email(sensitive_args_function_caller) with self.settings(DEBUG=False): self.verify_safe_response(sensitive_args_function_caller, check_for_POST_params=False) self.verify_safe_email(sensitive_args_function_caller, check_for_POST_params=False) def test_sensitive_function_keyword_arguments(self): """ Sensitive variables don't leak in the sensitive_variables decorator's frame, when those variables are passed as keyword arguments to the decorated function. """ with self.settings(DEBUG=True): self.verify_unsafe_response(sensitive_kwargs_function_caller) self.verify_unsafe_email(sensitive_kwargs_function_caller) with self.settings(DEBUG=False): self.verify_safe_response(sensitive_kwargs_function_caller, check_for_POST_params=False) self.verify_safe_email(sensitive_kwargs_function_caller, check_for_POST_params=False) def test_callable_settings(self): """ Callable settings should not be evaluated in the debug page (#21345). """ def callable_setting(): return "This should not be displayed" with self.settings(DEBUG=True, FOOBAR=callable_setting): response = self.client.get('/raises500/') self.assertNotContains(response, "This should not be displayed", status_code=500) def test_callable_settings_forbidding_to_set_attributes(self): """ Callable settings which forbid to set attributes should not break the debug page (#23070). """ class CallableSettingWithSlots: __slots__ = [] def __call__(self): return "This should not be displayed" with self.settings(DEBUG=True, WITH_SLOTS=CallableSettingWithSlots()): response = self.client.get('/raises500/') self.assertNotContains(response, "This should not be displayed", status_code=500) def test_dict_setting_with_non_str_key(self): """ A dict setting containing a non-string key should not break the debug page (#12744). """ with self.settings(DEBUG=True, FOOBAR={42: None}): response = self.client.get('/raises500/') self.assertContains(response, 'FOOBAR', status_code=500) def test_sensitive_settings(self): """ The debug page should not show some sensitive settings (password, secret key, ...). """ sensitive_settings = [ 'SECRET_KEY', 'PASSWORD', 'API_KEY', 'AUTH_TOKEN', ] for setting in sensitive_settings: with self.settings(DEBUG=True, **{setting: "should not be displayed"}): response = self.client.get('/raises500/') self.assertNotContains(response, 'should not be displayed', status_code=500) def test_settings_with_sensitive_keys(self): """ The debug page should filter out some sensitive information found in dict settings. """ sensitive_settings = [ 'SECRET_KEY', 'PASSWORD', 'API_KEY', 'AUTH_TOKEN', ] for setting in sensitive_settings: FOOBAR = { setting: "should not be displayed", 'recursive': {setting: "should not be displayed"}, } with self.settings(DEBUG=True, FOOBAR=FOOBAR): response = self.client.get('/raises500/') self.assertNotContains(response, 'should not be displayed', status_code=500) def test_cleanse_setting_basic(self): reporter_filter = SafeExceptionReporterFilter() self.assertEqual(reporter_filter.cleanse_setting('TEST', 'TEST'), 'TEST') self.assertEqual( reporter_filter.cleanse_setting('PASSWORD', 'super_secret'), reporter_filter.cleansed_substitute, ) def test_cleanse_setting_ignore_case(self): reporter_filter = SafeExceptionReporterFilter() self.assertEqual( reporter_filter.cleanse_setting('password', 'super_secret'), reporter_filter.cleansed_substitute, ) def test_cleanse_setting_recurses_in_dictionary(self): reporter_filter = SafeExceptionReporterFilter() initial = {'login': 'cooper', 'password': 'secret'} self.assertEqual( reporter_filter.cleanse_setting('SETTING_NAME', initial), {'login': 'cooper', 'password': reporter_filter.cleansed_substitute}, ) def test_cleanse_setting_recurses_in_dictionary_with_non_string_key(self): reporter_filter = SafeExceptionReporterFilter() initial = {('localhost', 8000): {'login': 'cooper', 'password': 'secret'}} self.assertEqual( reporter_filter.cleanse_setting('SETTING_NAME', initial), { ('localhost', 8000): { 'login': 'cooper', 'password': reporter_filter.cleansed_substitute, }, }, ) def test_cleanse_setting_recurses_in_list_tuples(self): reporter_filter = SafeExceptionReporterFilter() initial = [ { 'login': 'cooper', 'password': 'secret', 'apps': ( {'name': 'app1', 'api_key': 'a06b-c462cffae87a'}, {'name': 'app2', 'api_key': 'a9f4-f152e97ad808'}, ), 'tokens': ['98b37c57-ec62-4e39', '8690ef7d-8004-4916'], }, {'SECRET_KEY': 'c4d77c62-6196-4f17-a06b-c462cffae87a'}, ] cleansed = [ { 'login': 'cooper', 'password': reporter_filter.cleansed_substitute, 'apps': ( {'name': 'app1', 'api_key': reporter_filter.cleansed_substitute}, {'name': 'app2', 'api_key': reporter_filter.cleansed_substitute}, ), 'tokens': reporter_filter.cleansed_substitute, }, {'SECRET_KEY': reporter_filter.cleansed_substitute}, ] self.assertEqual( reporter_filter.cleanse_setting('SETTING_NAME', initial), cleansed, ) self.assertEqual( reporter_filter.cleanse_setting('SETTING_NAME', tuple(initial)), tuple(cleansed), ) def test_request_meta_filtering(self): request = self.rf.get('/', HTTP_SECRET_HEADER='super_secret') reporter_filter = SafeExceptionReporterFilter() self.assertEqual( reporter_filter.get_safe_request_meta(request)['HTTP_SECRET_HEADER'], reporter_filter.cleansed_substitute, ) def test_exception_report_uses_meta_filtering(self): response = self.client.get('/raises500/', HTTP_SECRET_HEADER='super_secret') self.assertNotIn(b'super_secret', response.content) response = self.client.get( '/raises500/', HTTP_SECRET_HEADER='super_secret', HTTP_ACCEPT='application/json', ) self.assertNotIn(b'super_secret', response.content) class CustomExceptionReporterFilter(SafeExceptionReporterFilter): cleansed_substitute = 'XXXXXXXXXXXXXXXXXXXX' hidden_settings = _lazy_re_compile('API|TOKEN|KEY|SECRET|PASS|SIGNATURE|DATABASE_URL', flags=re.I) @override_settings( ROOT_URLCONF='view_tests.urls', DEFAULT_EXCEPTION_REPORTER_FILTER='%s.CustomExceptionReporterFilter' % __name__, ) class CustomExceptionReporterFilterTests(SimpleTestCase): def setUp(self): get_default_exception_reporter_filter.cache_clear() def tearDown(self): get_default_exception_reporter_filter.cache_clear() def test_setting_allows_custom_subclass(self): self.assertIsInstance( get_default_exception_reporter_filter(), CustomExceptionReporterFilter, ) def test_cleansed_substitute_override(self): reporter_filter = get_default_exception_reporter_filter() self.assertEqual( reporter_filter.cleanse_setting('password', 'super_secret'), reporter_filter.cleansed_substitute, ) def test_hidden_settings_override(self): reporter_filter = get_default_exception_reporter_filter() self.assertEqual( reporter_filter.cleanse_setting('database_url', 'super_secret'), reporter_filter.cleansed_substitute, ) class NonHTMLResponseExceptionReporterFilter(ExceptionReportTestMixin, LoggingCaptureMixin, SimpleTestCase): """ Sensitive information can be filtered out of error reports. The plain text 500 debug-only error page is served when it has been detected the request doesn't accept HTML content. Don't check for (non)existence of frames vars in the traceback information section of the response content because they're not included in these error pages. Refs #14614. """ rf = RequestFactory(HTTP_ACCEPT='application/json') def test_non_sensitive_request(self): """ Request info can bee seen in the default error reports for non-sensitive requests. """ with self.settings(DEBUG=True): self.verify_unsafe_response(non_sensitive_view, check_for_vars=False) with self.settings(DEBUG=False): self.verify_unsafe_response(non_sensitive_view, check_for_vars=False) def test_sensitive_request(self): """ Sensitive POST parameters cannot be seen in the default error reports for sensitive requests. """ with self.settings(DEBUG=True): self.verify_unsafe_response(sensitive_view, check_for_vars=False) with self.settings(DEBUG=False): self.verify_safe_response(sensitive_view, check_for_vars=False) def test_paranoid_request(self): """ No POST parameters can be seen in the default error reports for "paranoid" requests. """ with self.settings(DEBUG=True): self.verify_unsafe_response(paranoid_view, check_for_vars=False) with self.settings(DEBUG=False): self.verify_paranoid_response(paranoid_view, check_for_vars=False) def test_custom_exception_reporter_filter(self): """ It's possible to assign an exception reporter filter to the request to bypass the one set in DEFAULT_EXCEPTION_REPORTER_FILTER. """ with self.settings(DEBUG=True): self.verify_unsafe_response(custom_exception_reporter_filter_view, check_for_vars=False) with self.settings(DEBUG=False): self.verify_unsafe_response(custom_exception_reporter_filter_view, check_for_vars=False) @override_settings(DEBUG=True, ROOT_URLCONF='view_tests.urls') def test_non_html_response_encoding(self): response = self.client.get('/raises500/', HTTP_ACCEPT='application/json') self.assertEqual(response.headers['Content-Type'], 'text/plain; charset=utf-8') class DecoratorsTests(SimpleTestCase): def test_sensitive_variables_not_called(self): msg = ( 'sensitive_variables() must be called to use it as a decorator, ' 'e.g., use @sensitive_variables(), not @sensitive_variables.' ) with self.assertRaisesMessage(TypeError, msg): @sensitive_variables def test_func(password): pass def test_sensitive_post_parameters_not_called(self): msg = ( 'sensitive_post_parameters() must be called to use it as a ' 'decorator, e.g., use @sensitive_post_parameters(), not ' '@sensitive_post_parameters.' ) with self.assertRaisesMessage(TypeError, msg): @sensitive_post_parameters def test_func(request): return index_page(request)
4c42d7b1809b6a2b2cbcf9920a6bb747ab0be2f5473a869d93402615b6e4ee2e
import functools import itertools import logging import os import signal import subprocess import sys import threading import time import traceback import weakref from collections import defaultdict from pathlib import Path from types import ModuleType from zipimport import zipimporter import django from django.apps import apps from django.core.signals import request_finished from django.dispatch import Signal from django.utils.functional import cached_property from django.utils.version import get_version_tuple autoreload_started = Signal() file_changed = Signal() DJANGO_AUTORELOAD_ENV = 'RUN_MAIN' logger = logging.getLogger('django.utils.autoreload') # If an error is raised while importing a file, it's not placed in sys.modules. # This means that any future modifications aren't caught. Keep a list of these # file paths to allow watching them in the future. _error_files = [] _exception = None try: import termios except ImportError: termios = None try: import pywatchman except ImportError: pywatchman = None def is_django_module(module): """Return True if the given module is nested under Django.""" return module.__name__.startswith('django.') def is_django_path(path): """Return True if the given file path is nested under Django.""" return Path(django.__file__).parent in Path(path).parents def check_errors(fn): @functools.wraps(fn) def wrapper(*args, **kwargs): global _exception try: fn(*args, **kwargs) except Exception: _exception = sys.exc_info() et, ev, tb = _exception if getattr(ev, 'filename', None) is None: # get the filename from the last item in the stack filename = traceback.extract_tb(tb)[-1][0] else: filename = ev.filename if filename not in _error_files: _error_files.append(filename) raise return wrapper def raise_last_exception(): global _exception if _exception is not None: raise _exception[1] def ensure_echo_on(): """ Ensure that echo mode is enabled. Some tools such as PDB disable it which causes usability issues after reload. """ if not termios or not sys.stdin.isatty(): return attr_list = termios.tcgetattr(sys.stdin) if not attr_list[3] & termios.ECHO: attr_list[3] |= termios.ECHO if hasattr(signal, 'SIGTTOU'): old_handler = signal.signal(signal.SIGTTOU, signal.SIG_IGN) else: old_handler = None termios.tcsetattr(sys.stdin, termios.TCSANOW, attr_list) if old_handler is not None: signal.signal(signal.SIGTTOU, old_handler) def iter_all_python_module_files(): # This is a hot path during reloading. Create a stable sorted list of # modules based on the module name and pass it to iter_modules_and_files(). # This ensures cached results are returned in the usual case that modules # aren't loaded on the fly. keys = sorted(sys.modules) modules = tuple(m for m in map(sys.modules.__getitem__, keys) if not isinstance(m, weakref.ProxyTypes)) return iter_modules_and_files(modules, frozenset(_error_files)) @functools.lru_cache(maxsize=1) def iter_modules_and_files(modules, extra_files): """Iterate through all modules needed to be watched.""" sys_file_paths = [] for module in modules: # During debugging (with PyDev) the 'typing.io' and 'typing.re' objects # are added to sys.modules, however they are types not modules and so # cause issues here. if not isinstance(module, ModuleType): continue if module.__name__ == '__main__': # __main__ (usually manage.py) doesn't always have a __spec__ set. # Handle this by falling back to using __file__, resolved below. # See https://docs.python.org/reference/import.html#main-spec # __file__ may not exists, e.g. when running ipdb debugger. if hasattr(module, '__file__'): sys_file_paths.append(module.__file__) continue if getattr(module, '__spec__', None) is None: continue spec = module.__spec__ # Modules could be loaded from places without a concrete location. If # this is the case, skip them. if spec.has_location: origin = spec.loader.archive if isinstance(spec.loader, zipimporter) else spec.origin sys_file_paths.append(origin) results = set() for filename in itertools.chain(sys_file_paths, extra_files): if not filename: continue path = Path(filename) try: if not path.exists(): # The module could have been removed, don't fail loudly if this # is the case. continue except ValueError as e: # Network filesystems may return null bytes in file paths. logger.debug('"%s" raised when resolving path: "%s"', e, path) continue resolved_path = path.resolve().absolute() results.add(resolved_path) return frozenset(results) @functools.lru_cache(maxsize=1) def common_roots(paths): """ Return a tuple of common roots that are shared between the given paths. File system watchers operate on directories and aren't cheap to create. Try to find the minimum set of directories to watch that encompass all of the files that need to be watched. """ # Inspired from Werkzeug: # https://github.com/pallets/werkzeug/blob/7477be2853df70a022d9613e765581b9411c3c39/werkzeug/_reloader.py # Create a sorted list of the path components, longest first. path_parts = sorted([x.parts for x in paths], key=len, reverse=True) tree = {} for chunks in path_parts: node = tree # Add each part of the path to the tree. for chunk in chunks: node = node.setdefault(chunk, {}) # Clear the last leaf in the tree. node.clear() # Turn the tree into a list of Path instances. def _walk(node, path): for prefix, child in node.items(): yield from _walk(child, path + (prefix,)) if not node: yield Path(*path) return tuple(_walk(tree, ())) def sys_path_directories(): """ Yield absolute directories from sys.path, ignoring entries that don't exist. """ for path in sys.path: path = Path(path) if not path.exists(): continue resolved_path = path.resolve().absolute() # If the path is a file (like a zip file), watch the parent directory. if resolved_path.is_file(): yield resolved_path.parent else: yield resolved_path def get_child_arguments(): """ Return the executable. This contains a workaround for Windows if the executable is reported to not have the .exe extension which can cause bugs on reloading. """ import __main__ py_script = Path(sys.argv[0]) args = [sys.executable] + ['-W%s' % o for o in sys.warnoptions] # __spec__ is set when the server was started with the `-m` option, # see https://docs.python.org/3/reference/import.html#main-spec if __main__.__spec__ is not None and __main__.__spec__.parent: args += ['-m', __main__.__spec__.parent] args += sys.argv[1:] elif not py_script.exists(): # sys.argv[0] may not exist for several reasons on Windows. # It may exist with a .exe extension or have a -script.py suffix. exe_entrypoint = py_script.with_suffix('.exe') if exe_entrypoint.exists(): # Should be executed directly, ignoring sys.executable. return [exe_entrypoint, *sys.argv[1:]] script_entrypoint = py_script.with_name('%s-script.py' % py_script.name) if script_entrypoint.exists(): # Should be executed as usual. return [*args, script_entrypoint, *sys.argv[1:]] raise RuntimeError('Script %s does not exist.' % py_script) else: args += sys.argv return args def trigger_reload(filename): logger.info('%s changed, reloading.', filename) sys.exit(3) def restart_with_reloader(): new_environ = {**os.environ, DJANGO_AUTORELOAD_ENV: 'true'} args = get_child_arguments() while True: p = subprocess.run(args, env=new_environ, close_fds=False) if p.returncode != 3: return p.returncode class BaseReloader: def __init__(self): self.extra_files = set() self.directory_globs = defaultdict(set) self._stop_condition = threading.Event() def watch_dir(self, path, glob): path = Path(path) try: path = path.absolute() except FileNotFoundError: logger.debug( 'Unable to watch directory %s as it cannot be resolved.', path, exc_info=True, ) return logger.debug('Watching dir %s with glob %s.', path, glob) self.directory_globs[path].add(glob) def watched_files(self, include_globs=True): """ Yield all files that need to be watched, including module files and files within globs. """ yield from iter_all_python_module_files() yield from self.extra_files if include_globs: for directory, patterns in self.directory_globs.items(): for pattern in patterns: yield from directory.glob(pattern) def wait_for_apps_ready(self, app_reg, django_main_thread): """ Wait until Django reports that the apps have been loaded. If the given thread has terminated before the apps are ready, then a SyntaxError or other non-recoverable error has been raised. In that case, stop waiting for the apps_ready event and continue processing. Return True if the thread is alive and the ready event has been triggered, or False if the thread is terminated while waiting for the event. """ while django_main_thread.is_alive(): if app_reg.ready_event.wait(timeout=0.1): return True else: logger.debug('Main Django thread has terminated before apps are ready.') return False def run(self, django_main_thread): logger.debug('Waiting for apps ready_event.') self.wait_for_apps_ready(apps, django_main_thread) from django.urls import get_resolver # Prevent a race condition where URL modules aren't loaded when the # reloader starts by accessing the urlconf_module property. try: get_resolver().urlconf_module except Exception: # Loading the urlconf can result in errors during development. # If this occurs then swallow the error and continue. pass logger.debug('Apps ready_event triggered. Sending autoreload_started signal.') autoreload_started.send(sender=self) self.run_loop() def run_loop(self): ticker = self.tick() while not self.should_stop: try: next(ticker) except StopIteration: break self.stop() def tick(self): """ This generator is called in a loop from run_loop. It's important that the method takes care of pausing or otherwise waiting for a period of time. This split between run_loop() and tick() is to improve the testability of the reloader implementations by decoupling the work they do from the loop. """ raise NotImplementedError('subclasses must implement tick().') @classmethod def check_availability(cls): raise NotImplementedError('subclasses must implement check_availability().') def notify_file_changed(self, path): results = file_changed.send(sender=self, file_path=path) logger.debug('%s notified as changed. Signal results: %s.', path, results) if not any(res[1] for res in results): trigger_reload(path) # These are primarily used for testing. @property def should_stop(self): return self._stop_condition.is_set() def stop(self): self._stop_condition.set() class StatReloader(BaseReloader): SLEEP_TIME = 1 # Check for changes once per second. def tick(self): mtimes = {} while True: for filepath, mtime in self.snapshot_files(): old_time = mtimes.get(filepath) mtimes[filepath] = mtime if old_time is None: logger.debug('File %s first seen with mtime %s', filepath, mtime) continue elif mtime > old_time: logger.debug('File %s previous mtime: %s, current mtime: %s', filepath, old_time, mtime) self.notify_file_changed(filepath) time.sleep(self.SLEEP_TIME) yield def snapshot_files(self): # watched_files may produce duplicate paths if globs overlap. seen_files = set() for file in self.watched_files(): if file in seen_files: continue try: mtime = file.stat().st_mtime except OSError: # This is thrown when the file does not exist. continue seen_files.add(file) yield file, mtime @classmethod def check_availability(cls): return True class WatchmanUnavailable(RuntimeError): pass class WatchmanReloader(BaseReloader): def __init__(self): self.roots = defaultdict(set) self.processed_request = threading.Event() self.client_timeout = int(os.environ.get('DJANGO_WATCHMAN_TIMEOUT', 5)) super().__init__() @cached_property def client(self): return pywatchman.client(timeout=self.client_timeout) def _watch_root(self, root): # In practice this shouldn't occur, however, it's possible that a # directory that doesn't exist yet is being watched. If it's outside of # sys.path then this will end up a new root. How to handle this isn't # clear: Not adding the root will likely break when subscribing to the # changes, however, as this is currently an internal API, no files # will be being watched outside of sys.path. Fixing this by checking # inside watch_glob() and watch_dir() is expensive, instead this could # could fall back to the StatReloader if this case is detected? For # now, watching its parent, if possible, is sufficient. if not root.exists(): if not root.parent.exists(): logger.warning('Unable to watch root dir %s as neither it or its parent exist.', root) return root = root.parent result = self.client.query('watch-project', str(root.absolute())) if 'warning' in result: logger.warning('Watchman warning: %s', result['warning']) logger.debug('Watchman watch-project result: %s', result) return result['watch'], result.get('relative_path') @functools.lru_cache() def _get_clock(self, root): return self.client.query('clock', root)['clock'] def _subscribe(self, directory, name, expression): root, rel_path = self._watch_root(directory) # Only receive notifications of files changing, filtering out other types # like special files: https://facebook.github.io/watchman/docs/type only_files_expression = [ 'allof', ['anyof', ['type', 'f'], ['type', 'l']], expression ] query = { 'expression': only_files_expression, 'fields': ['name'], 'since': self._get_clock(root), 'dedup_results': True, } if rel_path: query['relative_root'] = rel_path logger.debug('Issuing watchman subscription %s, for root %s. Query: %s', name, root, query) self.client.query('subscribe', root, name, query) def _subscribe_dir(self, directory, filenames): if not directory.exists(): if not directory.parent.exists(): logger.warning('Unable to watch directory %s as neither it or its parent exist.', directory) return prefix = 'files-parent-%s' % directory.name filenames = ['%s/%s' % (directory.name, filename) for filename in filenames] directory = directory.parent expression = ['name', filenames, 'wholename'] else: prefix = 'files' expression = ['name', filenames] self._subscribe(directory, '%s:%s' % (prefix, directory), expression) def _watch_glob(self, directory, patterns): """ Watch a directory with a specific glob. If the directory doesn't yet exist, attempt to watch the parent directory and amend the patterns to include this. It's important this method isn't called more than one per directory when updating all subscriptions. Subsequent calls will overwrite the named subscription, so it must include all possible glob expressions. """ prefix = 'glob' if not directory.exists(): if not directory.parent.exists(): logger.warning('Unable to watch directory %s as neither it or its parent exist.', directory) return prefix = 'glob-parent-%s' % directory.name patterns = ['%s/%s' % (directory.name, pattern) for pattern in patterns] directory = directory.parent expression = ['anyof'] for pattern in patterns: expression.append(['match', pattern, 'wholename']) self._subscribe(directory, '%s:%s' % (prefix, directory), expression) def watched_roots(self, watched_files): extra_directories = self.directory_globs.keys() watched_file_dirs = [f.parent for f in watched_files] sys_paths = list(sys_path_directories()) return frozenset((*extra_directories, *watched_file_dirs, *sys_paths)) def _update_watches(self): watched_files = list(self.watched_files(include_globs=False)) found_roots = common_roots(self.watched_roots(watched_files)) logger.debug('Watching %s files', len(watched_files)) logger.debug('Found common roots: %s', found_roots) # Setup initial roots for performance, shortest roots first. for root in sorted(found_roots): self._watch_root(root) for directory, patterns in self.directory_globs.items(): self._watch_glob(directory, patterns) # Group sorted watched_files by their parent directory. sorted_files = sorted(watched_files, key=lambda p: p.parent) for directory, group in itertools.groupby(sorted_files, key=lambda p: p.parent): # These paths need to be relative to the parent directory. self._subscribe_dir(directory, [str(p.relative_to(directory)) for p in group]) def update_watches(self): try: self._update_watches() except Exception as ex: # If the service is still available, raise the original exception. if self.check_server_status(ex): raise def _check_subscription(self, sub): subscription = self.client.getSubscription(sub) if not subscription: return logger.debug('Watchman subscription %s has results.', sub) for result in subscription: # When using watch-project, it's not simple to get the relative # directory without storing some specific state. Store the full # path to the directory in the subscription name, prefixed by its # type (glob, files). root_directory = Path(result['subscription'].split(':', 1)[1]) logger.debug('Found root directory %s', root_directory) for file in result.get('files', []): self.notify_file_changed(root_directory / file) def request_processed(self, **kwargs): logger.debug('Request processed. Setting update_watches event.') self.processed_request.set() def tick(self): request_finished.connect(self.request_processed) self.update_watches() while True: if self.processed_request.is_set(): self.update_watches() self.processed_request.clear() try: self.client.receive() except pywatchman.SocketTimeout: pass except pywatchman.WatchmanError as ex: logger.debug('Watchman error: %s, checking server status.', ex) self.check_server_status(ex) else: for sub in list(self.client.subs.keys()): self._check_subscription(sub) yield # Protect against busy loops. time.sleep(0.1) def stop(self): self.client.close() super().stop() def check_server_status(self, inner_ex=None): """Return True if the server is available.""" try: self.client.query('version') except Exception: raise WatchmanUnavailable(str(inner_ex)) from inner_ex return True @classmethod def check_availability(cls): if not pywatchman: raise WatchmanUnavailable('pywatchman not installed.') client = pywatchman.client(timeout=0.1) try: result = client.capabilityCheck() except Exception: # The service is down? raise WatchmanUnavailable('Cannot connect to the watchman service.') version = get_version_tuple(result['version']) # Watchman 4.9 includes multiple improvements to watching project # directories as well as case insensitive filesystems. logger.debug('Watchman version %s', version) if version < (4, 9): raise WatchmanUnavailable('Watchman 4.9 or later is required.') def get_reloader(): """Return the most suitable reloader for this environment.""" try: WatchmanReloader.check_availability() except WatchmanUnavailable: return StatReloader() return WatchmanReloader() def start_django(reloader, main_func, *args, **kwargs): ensure_echo_on() main_func = check_errors(main_func) django_main_thread = threading.Thread(target=main_func, args=args, kwargs=kwargs, name='django-main-thread') django_main_thread.daemon = True django_main_thread.start() while not reloader.should_stop: try: reloader.run(django_main_thread) except WatchmanUnavailable as ex: # It's possible that the watchman service shuts down or otherwise # becomes unavailable. In that case, use the StatReloader. reloader = StatReloader() logger.error('Error connecting to Watchman: %s', ex) logger.info('Watching for file changes with %s', reloader.__class__.__name__) def run_with_reloader(main_func, *args, **kwargs): signal.signal(signal.SIGTERM, lambda *args: sys.exit(0)) try: if os.environ.get(DJANGO_AUTORELOAD_ENV) == 'true': reloader = get_reloader() logger.info('Watching for file changes with %s', reloader.__class__.__name__) start_django(reloader, main_func, *args, **kwargs) else: exit_code = restart_with_reloader() sys.exit(exit_code) except KeyboardInterrupt: pass
eb32591bf4b696927d5a8e9d55181cd013c710624c631b01121f854fec561703
# Copyright (c) 2010 Guilherme Gondim. All rights reserved. # Copyright (c) 2009 Simon Willison. All rights reserved. # Copyright (c) 2002 Drew Perttula. All rights reserved. # # License: # Python Software Foundation License version 2 # # See the file "LICENSE" for terms & conditions for usage, and a DISCLAIMER OF # ALL WARRANTIES. # # This Baseconv distribution contains no GNU General Public Licensed (GPLed) # code so it may be used in proprietary projects just like prior ``baseconv`` # distributions. # # All trademarks referenced herein are property of their respective holders. # """ Convert numbers from base 10 integers to base X strings and back again. Sample usage:: >>> base20 = BaseConverter('0123456789abcdefghij') >>> base20.encode(1234) '31e' >>> base20.decode('31e') 1234 >>> base20.encode(-1234) '-31e' >>> base20.decode('-31e') -1234 >>> base11 = BaseConverter('0123456789-', sign='$') >>> base11.encode(-1234) '$-22' >>> base11.decode('$-22') -1234 """ BASE2_ALPHABET = '01' BASE16_ALPHABET = '0123456789ABCDEF' BASE56_ALPHABET = '23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnpqrstuvwxyz' BASE36_ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyz' BASE62_ALPHABET = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz' BASE64_ALPHABET = BASE62_ALPHABET + '-_' class BaseConverter: decimal_digits = '0123456789' def __init__(self, digits, sign='-'): self.sign = sign self.digits = digits if sign in self.digits: raise ValueError('Sign character found in converter base digits.') def __repr__(self): return "<%s: base%s (%s)>" % (self.__class__.__name__, len(self.digits), self.digits) def encode(self, i): neg, value = self.convert(i, self.decimal_digits, self.digits, '-') if neg: return self.sign + value return value def decode(self, s): neg, value = self.convert(s, self.digits, self.decimal_digits, self.sign) if neg: value = '-' + value return int(value) def convert(self, number, from_digits, to_digits, sign): if str(number)[0] == sign: number = str(number)[1:] neg = 1 else: neg = 0 # make an integer out of the number x = 0 for digit in str(number): x = x * len(from_digits) + from_digits.index(digit) # create the result in base 'len(to_digits)' if x == 0: res = to_digits[0] else: res = '' while x > 0: digit = x % len(to_digits) res = to_digits[digit] + res x = int(x // len(to_digits)) return neg, res base2 = BaseConverter(BASE2_ALPHABET) base16 = BaseConverter(BASE16_ALPHABET) base36 = BaseConverter(BASE36_ALPHABET) base56 = BaseConverter(BASE56_ALPHABET) base62 = BaseConverter(BASE62_ALPHABET) base64 = BaseConverter(BASE64_ALPHABET, sign='$')
c30986a75ff77f14969245473d42434fb96e60748efeafd2bf593459f5936c7d
import datetime import json import mimetypes import os import re import sys import time from collections.abc import Mapping from email.header import Header from http.client import responses from urllib.parse import quote, urlparse from django.conf import settings from django.core import signals, signing from django.core.exceptions import DisallowedRedirect from django.core.serializers.json import DjangoJSONEncoder from django.http.cookie import SimpleCookie from django.utils import timezone from django.utils.datastructures import ( CaseInsensitiveMapping, _destruct_iterable_mapping_values, ) from django.utils.encoding import iri_to_uri from django.utils.http import http_date from django.utils.regex_helper import _lazy_re_compile _charset_from_content_type_re = _lazy_re_compile(r';\s*charset=(?P<charset>[^\s;]+)', re.I) class ResponseHeaders(CaseInsensitiveMapping): def __init__(self, data): """ Populate the initial data using __setitem__ to ensure values are correctly encoded. """ if not isinstance(data, Mapping): data = {k: v for k, v in _destruct_iterable_mapping_values(data)} self._store = {} for header, value in data.items(): self[header] = value def _convert_to_charset(self, value, charset, mime_encode=False): """ Convert headers key/value to ascii/latin-1 native strings. `charset` must be 'ascii' or 'latin-1'. If `mime_encode` is True and `value` can't be represented in the given charset, apply MIME-encoding. """ if not isinstance(value, (bytes, str)): value = str(value) if ( (isinstance(value, bytes) and (b'\n' in value or b'\r' in value)) or (isinstance(value, str) and ('\n' in value or '\r' in value)) ): raise BadHeaderError("Header values can't contain newlines (got %r)" % value) try: if isinstance(value, str): # Ensure string is valid in given charset value.encode(charset) else: # Convert bytestring using given charset value = value.decode(charset) except UnicodeError as e: if mime_encode: value = Header(value, 'utf-8', maxlinelen=sys.maxsize).encode() else: e.reason += ', HTTP response headers must be in %s format' % charset raise return value def __delitem__(self, key): self.pop(key) def __setitem__(self, key, value): key = self._convert_to_charset(key, 'ascii') value = self._convert_to_charset(value, 'latin-1', mime_encode=True) self._store[key.lower()] = (key, value) def pop(self, key, default=None): return self._store.pop(key.lower(), default) def setdefault(self, key, value): if key not in self: self[key] = value class BadHeaderError(ValueError): pass class HttpResponseBase: """ An HTTP response base class with dictionary-accessed headers. This class doesn't handle content. It should not be used directly. Use the HttpResponse and StreamingHttpResponse subclasses instead. """ status_code = 200 def __init__(self, content_type=None, status=None, reason=None, charset=None, headers=None): self.headers = ResponseHeaders(headers or {}) self._charset = charset if content_type and 'Content-Type' in self.headers: raise ValueError( "'headers' must not contain 'Content-Type' when the " "'content_type' parameter is provided." ) if 'Content-Type' not in self.headers: if content_type is None: content_type = 'text/html; charset=%s' % self.charset self.headers['Content-Type'] = content_type self._resource_closers = [] # This parameter is set by the handler. It's necessary to preserve the # historical behavior of request_finished. self._handler_class = None self.cookies = SimpleCookie() self.closed = False if status is not None: try: self.status_code = int(status) except (ValueError, TypeError): raise TypeError('HTTP status code must be an integer.') if not 100 <= self.status_code <= 599: raise ValueError('HTTP status code must be an integer from 100 to 599.') self._reason_phrase = reason @property def reason_phrase(self): if self._reason_phrase is not None: return self._reason_phrase # Leave self._reason_phrase unset in order to use the default # reason phrase for status code. return responses.get(self.status_code, 'Unknown Status Code') @reason_phrase.setter def reason_phrase(self, value): self._reason_phrase = value @property def charset(self): if self._charset is not None: return self._charset content_type = self.get('Content-Type', '') matched = _charset_from_content_type_re.search(content_type) if matched: # Extract the charset and strip its double quotes return matched['charset'].replace('"', '') return settings.DEFAULT_CHARSET @charset.setter def charset(self, value): self._charset = value def serialize_headers(self): """HTTP headers as a bytestring.""" def to_bytes(val, encoding): return val if isinstance(val, bytes) else val.encode(encoding) headers = [ (to_bytes(key, 'ascii') + b': ' + to_bytes(value, 'latin-1')) for key, value in self.headers.items() ] return b'\r\n'.join(headers) __bytes__ = serialize_headers @property def _content_type_for_repr(self): return ', "%s"' % self.headers['Content-Type'] if 'Content-Type' in self.headers else '' def __setitem__(self, header, value): self.headers[header] = value def __delitem__(self, header): del self.headers[header] def __getitem__(self, header): return self.headers[header] def has_header(self, header): """Case-insensitive check for a header.""" return header in self.headers __contains__ = has_header def items(self): return self.headers.items() def get(self, header, alternate=None): return self.headers.get(header, alternate) def set_cookie(self, key, value='', max_age=None, expires=None, path='/', domain=None, secure=False, httponly=False, samesite=None): """ Set a cookie. ``expires`` can be: - a string in the correct format, - a naive ``datetime.datetime`` object in UTC, - an aware ``datetime.datetime`` object in any time zone. If it is a ``datetime.datetime`` object then calculate ``max_age``. """ self.cookies[key] = value if expires is not None: if isinstance(expires, datetime.datetime): if timezone.is_aware(expires): expires = timezone.make_naive(expires, timezone.utc) delta = expires - expires.utcnow() # Add one second so the date matches exactly (a fraction of # time gets lost between converting to a timedelta and # then the date string). delta = delta + datetime.timedelta(seconds=1) # Just set max_age - the max_age logic will set expires. expires = None max_age = max(0, delta.days * 86400 + delta.seconds) else: self.cookies[key]['expires'] = expires else: self.cookies[key]['expires'] = '' if max_age is not None: self.cookies[key]['max-age'] = int(max_age) # IE requires expires, so set it if hasn't been already. if not expires: self.cookies[key]['expires'] = http_date(time.time() + max_age) if path is not None: self.cookies[key]['path'] = path if domain is not None: self.cookies[key]['domain'] = domain if secure: self.cookies[key]['secure'] = True if httponly: self.cookies[key]['httponly'] = True if samesite: if samesite.lower() not in ('lax', 'none', 'strict'): raise ValueError('samesite must be "lax", "none", or "strict".') self.cookies[key]['samesite'] = samesite def setdefault(self, key, value): """Set a header unless it has already been set.""" self.headers.setdefault(key, value) def set_signed_cookie(self, key, value, salt='', **kwargs): value = signing.get_cookie_signer(salt=key + salt).sign(value) return self.set_cookie(key, value, **kwargs) def delete_cookie(self, key, path='/', domain=None, samesite=None): # Browsers can ignore the Set-Cookie header if the cookie doesn't use # the secure flag and: # - the cookie name starts with "__Host-" or "__Secure-", or # - the samesite is "none". secure = ( key.startswith(('__Secure-', '__Host-')) or (samesite and samesite.lower() == 'none') ) self.set_cookie( key, max_age=0, path=path, domain=domain, secure=secure, expires='Thu, 01 Jan 1970 00:00:00 GMT', samesite=samesite, ) # Common methods used by subclasses def make_bytes(self, value): """Turn a value into a bytestring encoded in the output charset.""" # Per PEP 3333, this response body must be bytes. To avoid returning # an instance of a subclass, this function returns `bytes(value)`. # This doesn't make a copy when `value` already contains bytes. # Handle string types -- we can't rely on force_bytes here because: # - Python attempts str conversion first # - when self._charset != 'utf-8' it re-encodes the content if isinstance(value, (bytes, memoryview)): return bytes(value) if isinstance(value, str): return bytes(value.encode(self.charset)) # Handle non-string types. return str(value).encode(self.charset) # These methods partially implement the file-like object interface. # See https://docs.python.org/library/io.html#io.IOBase # The WSGI server must call this method upon completion of the request. # See http://blog.dscpl.com.au/2012/10/obligations-for-calling-close-on.html def close(self): for closer in self._resource_closers: try: closer() except Exception: pass # Free resources that were still referenced. self._resource_closers.clear() self.closed = True signals.request_finished.send(sender=self._handler_class) def write(self, content): raise OSError('This %s instance is not writable' % self.__class__.__name__) def flush(self): pass def tell(self): raise OSError('This %s instance cannot tell its position' % self.__class__.__name__) # These methods partially implement a stream-like object interface. # See https://docs.python.org/library/io.html#io.IOBase def readable(self): return False def seekable(self): return False def writable(self): return False def writelines(self, lines): raise OSError('This %s instance is not writable' % self.__class__.__name__) class HttpResponse(HttpResponseBase): """ An HTTP response class with a string as content. This content can be read, appended to, or replaced. """ streaming = False def __init__(self, content=b'', *args, **kwargs): super().__init__(*args, **kwargs) # Content is a bytestring. See the `content` property methods. self.content = content def __repr__(self): return '<%(cls)s status_code=%(status_code)d%(content_type)s>' % { 'cls': self.__class__.__name__, 'status_code': self.status_code, 'content_type': self._content_type_for_repr, } def serialize(self): """Full HTTP message, including headers, as a bytestring.""" return self.serialize_headers() + b'\r\n\r\n' + self.content __bytes__ = serialize @property def content(self): return b''.join(self._container) @content.setter def content(self, value): # Consume iterators upon assignment to allow repeated iteration. if ( hasattr(value, '__iter__') and not isinstance(value, (bytes, memoryview, str)) ): content = b''.join(self.make_bytes(chunk) for chunk in value) if hasattr(value, 'close'): try: value.close() except Exception: pass else: content = self.make_bytes(value) # Create a list of properly encoded bytestrings to support write(). self._container = [content] def __iter__(self): return iter(self._container) def write(self, content): self._container.append(self.make_bytes(content)) def tell(self): return len(self.content) def getvalue(self): return self.content def writable(self): return True def writelines(self, lines): for line in lines: self.write(line) class StreamingHttpResponse(HttpResponseBase): """ A streaming HTTP response class with an iterator as content. This should only be iterated once, when the response is streamed to the client. However, it can be appended to or replaced with a new iterator that wraps the original content (or yields entirely new content). """ streaming = True def __init__(self, streaming_content=(), *args, **kwargs): super().__init__(*args, **kwargs) # `streaming_content` should be an iterable of bytestrings. # See the `streaming_content` property methods. self.streaming_content = streaming_content @property def content(self): raise AttributeError( "This %s instance has no `content` attribute. Use " "`streaming_content` instead." % self.__class__.__name__ ) @property def streaming_content(self): return map(self.make_bytes, self._iterator) @streaming_content.setter def streaming_content(self, value): self._set_streaming_content(value) def _set_streaming_content(self, value): # Ensure we can never iterate on "value" more than once. self._iterator = iter(value) if hasattr(value, 'close'): self._resource_closers.append(value.close) def __iter__(self): return self.streaming_content def getvalue(self): return b''.join(self.streaming_content) class FileResponse(StreamingHttpResponse): """ A streaming HTTP response class optimized for files. """ block_size = 4096 def __init__(self, *args, as_attachment=False, filename='', **kwargs): self.as_attachment = as_attachment self.filename = filename super().__init__(*args, **kwargs) def _set_streaming_content(self, value): if not hasattr(value, 'read'): self.file_to_stream = None return super()._set_streaming_content(value) self.file_to_stream = filelike = value if hasattr(filelike, 'close'): self._resource_closers.append(filelike.close) value = iter(lambda: filelike.read(self.block_size), b'') self.set_headers(filelike) super()._set_streaming_content(value) def set_headers(self, filelike): """ Set some common response headers (Content-Length, Content-Type, and Content-Disposition) based on the `filelike` response content. """ encoding_map = { 'bzip2': 'application/x-bzip', 'gzip': 'application/gzip', 'xz': 'application/x-xz', } filename = getattr(filelike, 'name', None) filename = filename if (isinstance(filename, str) and filename) else self.filename if os.path.isabs(filename): self.headers['Content-Length'] = os.path.getsize(filelike.name) elif hasattr(filelike, 'getbuffer'): self.headers['Content-Length'] = filelike.getbuffer().nbytes if self.headers.get('Content-Type', '').startswith('text/html'): if filename: content_type, encoding = mimetypes.guess_type(filename) # Encoding isn't set to prevent browsers from automatically # uncompressing files. content_type = encoding_map.get(encoding, content_type) self.headers['Content-Type'] = content_type or 'application/octet-stream' else: self.headers['Content-Type'] = 'application/octet-stream' filename = self.filename or os.path.basename(filename) if filename: disposition = 'attachment' if self.as_attachment else 'inline' try: filename.encode('ascii') file_expr = 'filename="{}"'.format(filename) except UnicodeEncodeError: file_expr = "filename*=utf-8''{}".format(quote(filename)) self.headers['Content-Disposition'] = '{}; {}'.format(disposition, file_expr) elif self.as_attachment: self.headers['Content-Disposition'] = 'attachment' class HttpResponseRedirectBase(HttpResponse): allowed_schemes = ['http', 'https', 'ftp'] def __init__(self, redirect_to, *args, **kwargs): super().__init__(*args, **kwargs) self['Location'] = iri_to_uri(redirect_to) parsed = urlparse(str(redirect_to)) if parsed.scheme and parsed.scheme not in self.allowed_schemes: raise DisallowedRedirect("Unsafe redirect to URL with protocol '%s'" % parsed.scheme) url = property(lambda self: self['Location']) def __repr__(self): return '<%(cls)s status_code=%(status_code)d%(content_type)s, url="%(url)s">' % { 'cls': self.__class__.__name__, 'status_code': self.status_code, 'content_type': self._content_type_for_repr, 'url': self.url, } class HttpResponseRedirect(HttpResponseRedirectBase): status_code = 302 class HttpResponsePermanentRedirect(HttpResponseRedirectBase): status_code = 301 class HttpResponseNotModified(HttpResponse): status_code = 304 def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) del self['content-type'] @HttpResponse.content.setter def content(self, value): if value: raise AttributeError("You cannot set content to a 304 (Not Modified) response") self._container = [] class HttpResponseBadRequest(HttpResponse): status_code = 400 class HttpResponseNotFound(HttpResponse): status_code = 404 class HttpResponseForbidden(HttpResponse): status_code = 403 class HttpResponseNotAllowed(HttpResponse): status_code = 405 def __init__(self, permitted_methods, *args, **kwargs): super().__init__(*args, **kwargs) self['Allow'] = ', '.join(permitted_methods) def __repr__(self): return '<%(cls)s [%(methods)s] status_code=%(status_code)d%(content_type)s>' % { 'cls': self.__class__.__name__, 'status_code': self.status_code, 'content_type': self._content_type_for_repr, 'methods': self['Allow'], } class HttpResponseGone(HttpResponse): status_code = 410 class HttpResponseServerError(HttpResponse): status_code = 500 class Http404(Exception): pass class JsonResponse(HttpResponse): """ An HTTP response class that consumes data to be serialized to JSON. :param data: Data to be dumped into json. By default only ``dict`` objects are allowed to be passed due to a security flaw before ECMAScript 5. See the ``safe`` parameter for more information. :param encoder: Should be a json encoder class. Defaults to ``django.core.serializers.json.DjangoJSONEncoder``. :param safe: Controls if only ``dict`` objects may be serialized. Defaults to ``True``. :param json_dumps_params: A dictionary of kwargs passed to json.dumps(). """ def __init__(self, data, encoder=DjangoJSONEncoder, safe=True, json_dumps_params=None, **kwargs): if safe and not isinstance(data, dict): raise TypeError( 'In order to allow non-dict objects to be serialized set the ' 'safe parameter to False.' ) if json_dumps_params is None: json_dumps_params = {} kwargs.setdefault('content_type', 'application/json') data = json.dumps(data, cls=encoder, **json_dumps_params) super().__init__(content=data, **kwargs)
42a8d6d6718072e40993b7cd81ce9c186e5dc91b8ab48ec5a033e42a7d3a980d
import copy import datetime import functools import inspect from decimal import Decimal from uuid import UUID from django.core.exceptions import EmptyResultSet, FieldError from django.db import DatabaseError, NotSupportedError, connection from django.db.models import fields from django.db.models.constants import LOOKUP_SEP from django.db.models.query_utils import Q from django.utils.deconstruct import deconstructible from django.utils.functional import cached_property from django.utils.hashable import make_hashable class SQLiteNumericMixin: """ Some expressions with output_field=DecimalField() must be cast to numeric to be properly filtered. """ def as_sqlite(self, compiler, connection, **extra_context): sql, params = self.as_sql(compiler, connection, **extra_context) try: if self.output_field.get_internal_type() == 'DecimalField': sql = 'CAST(%s AS NUMERIC)' % sql except FieldError: pass return sql, params class Combinable: """ Provide the ability to combine one or two objects with some connector. For example F('foo') + F('bar'). """ # Arithmetic connectors ADD = '+' SUB = '-' MUL = '*' DIV = '/' POW = '^' # The following is a quoted % operator - it is quoted because it can be # used in strings that also have parameter substitution. MOD = '%%' # Bitwise operators - note that these are generated by .bitand() # and .bitor(), the '&' and '|' are reserved for boolean operator # usage. BITAND = '&' BITOR = '|' BITLEFTSHIFT = '<<' BITRIGHTSHIFT = '>>' BITXOR = '#' def _combine(self, other, connector, reversed): if not hasattr(other, 'resolve_expression'): # everything must be resolvable to an expression other = Value(other) if reversed: return CombinedExpression(other, connector, self) return CombinedExpression(self, connector, other) ############# # OPERATORS # ############# def __neg__(self): return self._combine(-1, self.MUL, False) def __add__(self, other): return self._combine(other, self.ADD, False) def __sub__(self, other): return self._combine(other, self.SUB, False) def __mul__(self, other): return self._combine(other, self.MUL, False) def __truediv__(self, other): return self._combine(other, self.DIV, False) def __mod__(self, other): return self._combine(other, self.MOD, False) def __pow__(self, other): return self._combine(other, self.POW, False) def __and__(self, other): if getattr(self, 'conditional', False) and getattr(other, 'conditional', False): return Q(self) & Q(other) raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) def bitand(self, other): return self._combine(other, self.BITAND, False) def bitleftshift(self, other): return self._combine(other, self.BITLEFTSHIFT, False) def bitrightshift(self, other): return self._combine(other, self.BITRIGHTSHIFT, False) def bitxor(self, other): return self._combine(other, self.BITXOR, False) def __or__(self, other): if getattr(self, 'conditional', False) and getattr(other, 'conditional', False): return Q(self) | Q(other) raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) def bitor(self, other): return self._combine(other, self.BITOR, False) def __radd__(self, other): return self._combine(other, self.ADD, True) def __rsub__(self, other): return self._combine(other, self.SUB, True) def __rmul__(self, other): return self._combine(other, self.MUL, True) def __rtruediv__(self, other): return self._combine(other, self.DIV, True) def __rmod__(self, other): return self._combine(other, self.MOD, True) def __rpow__(self, other): return self._combine(other, self.POW, True) def __rand__(self, other): raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) def __ror__(self, other): raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) class BaseExpression: """Base class for all query expressions.""" # aggregate specific fields is_summary = False _output_field_resolved_to_none = False # Can the expression be used in a WHERE clause? filterable = True # Can the expression can be used as a source expression in Window? window_compatible = False def __init__(self, output_field=None): if output_field is not None: self.output_field = output_field def __getstate__(self): state = self.__dict__.copy() state.pop('convert_value', None) return state def get_db_converters(self, connection): return ( [] if self.convert_value is self._convert_value_noop else [self.convert_value] ) + self.output_field.get_db_converters(connection) def get_source_expressions(self): return [] def set_source_expressions(self, exprs): assert not exprs def _parse_expressions(self, *expressions): return [ arg if hasattr(arg, 'resolve_expression') else ( F(arg) if isinstance(arg, str) else Value(arg) ) for arg in expressions ] def as_sql(self, compiler, connection): """ Responsible for returning a (sql, [params]) tuple to be included in the current query. Different backends can provide their own implementation, by providing an `as_{vendor}` method and patching the Expression: ``` def override_as_sql(self, compiler, connection): # custom logic return super().as_sql(compiler, connection) setattr(Expression, 'as_' + connection.vendor, override_as_sql) ``` Arguments: * compiler: the query compiler responsible for generating the query. Must have a compile method, returning a (sql, [params]) tuple. Calling compiler(value) will return a quoted `value`. * connection: the database connection used for the current query. Return: (sql, params) Where `sql` is a string containing ordered sql parameters to be replaced with the elements of the list `params`. """ raise NotImplementedError("Subclasses must implement as_sql()") @cached_property def contains_aggregate(self): return any(expr and expr.contains_aggregate for expr in self.get_source_expressions()) @cached_property def contains_over_clause(self): return any(expr and expr.contains_over_clause for expr in self.get_source_expressions()) @cached_property def contains_column_references(self): return any(expr and expr.contains_column_references for expr in self.get_source_expressions()) def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): """ Provide the chance to do any preprocessing or validation before being added to the query. Arguments: * query: the backend query implementation * allow_joins: boolean allowing or denying use of joins in this query * reuse: a set of reusable joins for multijoins * summarize: a terminal aggregate clause * for_save: whether this expression about to be used in a save or update Return: an Expression to be added to the query. """ c = self.copy() c.is_summary = summarize c.set_source_expressions([ expr.resolve_expression(query, allow_joins, reuse, summarize) if expr else None for expr in c.get_source_expressions() ]) return c @property def conditional(self): return isinstance(self.output_field, fields.BooleanField) @property def field(self): return self.output_field @cached_property def output_field(self): """Return the output type of this expressions.""" output_field = self._resolve_output_field() if output_field is None: self._output_field_resolved_to_none = True raise FieldError('Cannot resolve expression type, unknown output_field') return output_field @cached_property def _output_field_or_none(self): """ Return the output field of this expression, or None if _resolve_output_field() didn't return an output type. """ try: return self.output_field except FieldError: if not self._output_field_resolved_to_none: raise def _resolve_output_field(self): """ Attempt to infer the output type of the expression. If the output fields of all source fields match then, simply infer the same type here. This isn't always correct, but it makes sense most of the time. Consider the difference between `2 + 2` and `2 / 3`. Inferring the type here is a convenience for the common case. The user should supply their own output_field with more complex computations. If a source's output field resolves to None, exclude it from this check. If all sources are None, then an error is raised higher up the stack in the output_field property. """ sources_iter = (source for source in self.get_source_fields() if source is not None) for output_field in sources_iter: for source in sources_iter: if not isinstance(output_field, source.__class__): raise FieldError( 'Expression contains mixed types: %s, %s. You must ' 'set output_field.' % ( output_field.__class__.__name__, source.__class__.__name__, ) ) return output_field @staticmethod def _convert_value_noop(value, expression, connection): return value @cached_property def convert_value(self): """ Expressions provide their own converters because users have the option of manually specifying the output_field which may be a different type from the one the database returns. """ field = self.output_field internal_type = field.get_internal_type() if internal_type == 'FloatField': return lambda value, expression, connection: None if value is None else float(value) elif internal_type.endswith('IntegerField'): return lambda value, expression, connection: None if value is None else int(value) elif internal_type == 'DecimalField': return lambda value, expression, connection: None if value is None else Decimal(value) return self._convert_value_noop def get_lookup(self, lookup): return self.output_field.get_lookup(lookup) def get_transform(self, name): return self.output_field.get_transform(name) def relabeled_clone(self, change_map): clone = self.copy() clone.set_source_expressions([ e.relabeled_clone(change_map) if e is not None else None for e in self.get_source_expressions() ]) return clone def copy(self): return copy.copy(self) def get_group_by_cols(self, alias=None): if not self.contains_aggregate: return [self] cols = [] for source in self.get_source_expressions(): cols.extend(source.get_group_by_cols()) return cols def get_source_fields(self): """Return the underlying field types used by this aggregate.""" return [e._output_field_or_none for e in self.get_source_expressions()] def asc(self, **kwargs): return OrderBy(self, **kwargs) def desc(self, **kwargs): return OrderBy(self, descending=True, **kwargs) def reverse_ordering(self): return self def flatten(self): """ Recursively yield this expression and all subexpressions, in depth-first order. """ yield self for expr in self.get_source_expressions(): if expr: if hasattr(expr, 'flatten'): yield from expr.flatten() else: yield expr def select_format(self, compiler, sql, params): """ Custom format for select clauses. For example, EXISTS expressions need to be wrapped in CASE WHEN on Oracle. """ if hasattr(self.output_field, 'select_format'): return self.output_field.select_format(compiler, sql, params) return sql, params @deconstructible class Expression(BaseExpression, Combinable): """An expression that can be combined with other expressions.""" @cached_property def identity(self): constructor_signature = inspect.signature(self.__init__) args, kwargs = self._constructor_args signature = constructor_signature.bind_partial(*args, **kwargs) signature.apply_defaults() arguments = signature.arguments.items() identity = [self.__class__] for arg, value in arguments: if isinstance(value, fields.Field): if value.name and value.model: value = (value.model._meta.label, value.name) else: value = type(value) else: value = make_hashable(value) identity.append((arg, value)) return tuple(identity) def __eq__(self, other): if not isinstance(other, Expression): return NotImplemented return other.identity == self.identity def __hash__(self): return hash(self.identity) _connector_combinators = { connector: [ (fields.IntegerField, fields.IntegerField, fields.IntegerField), (fields.IntegerField, fields.DecimalField, fields.DecimalField), (fields.DecimalField, fields.IntegerField, fields.DecimalField), (fields.IntegerField, fields.FloatField, fields.FloatField), (fields.FloatField, fields.IntegerField, fields.FloatField), ] for connector in (Combinable.ADD, Combinable.SUB, Combinable.MUL, Combinable.DIV) } @functools.lru_cache(maxsize=128) def _resolve_combined_type(connector, lhs_type, rhs_type): combinators = _connector_combinators.get(connector, ()) for combinator_lhs_type, combinator_rhs_type, combined_type in combinators: if issubclass(lhs_type, combinator_lhs_type) and issubclass(rhs_type, combinator_rhs_type): return combined_type class CombinedExpression(SQLiteNumericMixin, Expression): def __init__(self, lhs, connector, rhs, output_field=None): super().__init__(output_field=output_field) self.connector = connector self.lhs = lhs self.rhs = rhs def __repr__(self): return "<{}: {}>".format(self.__class__.__name__, self) def __str__(self): return "{} {} {}".format(self.lhs, self.connector, self.rhs) def get_source_expressions(self): return [self.lhs, self.rhs] def set_source_expressions(self, exprs): self.lhs, self.rhs = exprs def _resolve_output_field(self): try: return super()._resolve_output_field() except FieldError: combined_type = _resolve_combined_type( self.connector, type(self.lhs.output_field), type(self.rhs.output_field), ) if combined_type is None: raise return combined_type() def as_sql(self, compiler, connection): expressions = [] expression_params = [] sql, params = compiler.compile(self.lhs) expressions.append(sql) expression_params.extend(params) sql, params = compiler.compile(self.rhs) expressions.append(sql) expression_params.extend(params) # order of precedence expression_wrapper = '(%s)' sql = connection.ops.combine_expression(self.connector, expressions) return expression_wrapper % sql, expression_params def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): lhs = self.lhs.resolve_expression(query, allow_joins, reuse, summarize, for_save) rhs = self.rhs.resolve_expression(query, allow_joins, reuse, summarize, for_save) if not isinstance(self, (DurationExpression, TemporalSubtraction)): try: lhs_type = lhs.output_field.get_internal_type() except (AttributeError, FieldError): lhs_type = None try: rhs_type = rhs.output_field.get_internal_type() except (AttributeError, FieldError): rhs_type = None if 'DurationField' in {lhs_type, rhs_type} and lhs_type != rhs_type: return DurationExpression(self.lhs, self.connector, self.rhs).resolve_expression( query, allow_joins, reuse, summarize, for_save, ) datetime_fields = {'DateField', 'DateTimeField', 'TimeField'} if self.connector == self.SUB and lhs_type in datetime_fields and lhs_type == rhs_type: return TemporalSubtraction(self.lhs, self.rhs).resolve_expression( query, allow_joins, reuse, summarize, for_save, ) c = self.copy() c.is_summary = summarize c.lhs = lhs c.rhs = rhs return c class DurationExpression(CombinedExpression): def compile(self, side, compiler, connection): try: output = side.output_field except FieldError: pass else: if output.get_internal_type() == 'DurationField': sql, params = compiler.compile(side) return connection.ops.format_for_duration_arithmetic(sql), params return compiler.compile(side) def as_sql(self, compiler, connection): if connection.features.has_native_duration_field: return super().as_sql(compiler, connection) connection.ops.check_expression_support(self) expressions = [] expression_params = [] sql, params = self.compile(self.lhs, compiler, connection) expressions.append(sql) expression_params.extend(params) sql, params = self.compile(self.rhs, compiler, connection) expressions.append(sql) expression_params.extend(params) # order of precedence expression_wrapper = '(%s)' sql = connection.ops.combine_duration_expression(self.connector, expressions) return expression_wrapper % sql, expression_params def as_sqlite(self, compiler, connection, **extra_context): sql, params = self.as_sql(compiler, connection, **extra_context) if self.connector in {Combinable.MUL, Combinable.DIV}: try: lhs_type = self.lhs.output_field.get_internal_type() rhs_type = self.rhs.output_field.get_internal_type() except (AttributeError, FieldError): pass else: allowed_fields = { 'DecimalField', 'DurationField', 'FloatField', 'IntegerField', } if lhs_type not in allowed_fields or rhs_type not in allowed_fields: raise DatabaseError( f'Invalid arguments for operator {self.connector}.' ) return sql, params class TemporalSubtraction(CombinedExpression): output_field = fields.DurationField() def __init__(self, lhs, rhs): super().__init__(lhs, self.SUB, rhs) def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) lhs = compiler.compile(self.lhs) rhs = compiler.compile(self.rhs) return connection.ops.subtract_temporals(self.lhs.output_field.get_internal_type(), lhs, rhs) @deconstructible class F(Combinable): """An object capable of resolving references to existing query objects.""" def __init__(self, name): """ Arguments: * name: the name of the field this expression references """ self.name = name def __repr__(self): return "{}({})".format(self.__class__.__name__, self.name) def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): return query.resolve_ref(self.name, allow_joins, reuse, summarize) def asc(self, **kwargs): return OrderBy(self, **kwargs) def desc(self, **kwargs): return OrderBy(self, descending=True, **kwargs) def __eq__(self, other): return self.__class__ == other.__class__ and self.name == other.name def __hash__(self): return hash(self.name) class ResolvedOuterRef(F): """ An object that contains a reference to an outer query. In this case, the reference to the outer query has been resolved because the inner query has been used as a subquery. """ contains_aggregate = False def as_sql(self, *args, **kwargs): raise ValueError( 'This queryset contains a reference to an outer query and may ' 'only be used in a subquery.' ) def resolve_expression(self, *args, **kwargs): col = super().resolve_expression(*args, **kwargs) # FIXME: Rename possibly_multivalued to multivalued and fix detection # for non-multivalued JOINs (e.g. foreign key fields). This should take # into account only many-to-many and one-to-many relationships. col.possibly_multivalued = LOOKUP_SEP in self.name return col def relabeled_clone(self, relabels): return self def get_group_by_cols(self, alias=None): return [] class OuterRef(F): contains_aggregate = False def resolve_expression(self, *args, **kwargs): if isinstance(self.name, self.__class__): return self.name return ResolvedOuterRef(self.name) def relabeled_clone(self, relabels): return self class Func(SQLiteNumericMixin, Expression): """An SQL function call.""" function = None template = '%(function)s(%(expressions)s)' arg_joiner = ', ' arity = None # The number of arguments the function accepts. def __init__(self, *expressions, output_field=None, **extra): if self.arity is not None and len(expressions) != self.arity: raise TypeError( "'%s' takes exactly %s %s (%s given)" % ( self.__class__.__name__, self.arity, "argument" if self.arity == 1 else "arguments", len(expressions), ) ) super().__init__(output_field=output_field) self.source_expressions = self._parse_expressions(*expressions) self.extra = extra def __repr__(self): args = self.arg_joiner.join(str(arg) for arg in self.source_expressions) extra = {**self.extra, **self._get_repr_options()} if extra: extra = ', '.join(str(key) + '=' + str(val) for key, val in sorted(extra.items())) return "{}({}, {})".format(self.__class__.__name__, args, extra) return "{}({})".format(self.__class__.__name__, args) def _get_repr_options(self): """Return a dict of extra __init__() options to include in the repr.""" return {} def get_source_expressions(self): return self.source_expressions def set_source_expressions(self, exprs): self.source_expressions = exprs def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = self.copy() c.is_summary = summarize for pos, arg in enumerate(c.source_expressions): c.source_expressions[pos] = arg.resolve_expression(query, allow_joins, reuse, summarize, for_save) return c def as_sql(self, compiler, connection, function=None, template=None, arg_joiner=None, **extra_context): connection.ops.check_expression_support(self) sql_parts = [] params = [] for arg in self.source_expressions: arg_sql, arg_params = compiler.compile(arg) sql_parts.append(arg_sql) params.extend(arg_params) data = {**self.extra, **extra_context} # Use the first supplied value in this order: the parameter to this # method, a value supplied in __init__()'s **extra (the value in # `data`), or the value defined on the class. if function is not None: data['function'] = function else: data.setdefault('function', self.function) template = template or data.get('template', self.template) arg_joiner = arg_joiner or data.get('arg_joiner', self.arg_joiner) data['expressions'] = data['field'] = arg_joiner.join(sql_parts) return template % data, params def copy(self): copy = super().copy() copy.source_expressions = self.source_expressions[:] copy.extra = self.extra.copy() return copy class Value(SQLiteNumericMixin, Expression): """Represent a wrapped value as a node within an expression.""" # Provide a default value for `for_save` in order to allow unresolved # instances to be compiled until a decision is taken in #25425. for_save = False def __init__(self, value, output_field=None): """ Arguments: * value: the value this expression represents. The value will be added into the sql parameter list and properly quoted. * output_field: an instance of the model field type that this expression will return, such as IntegerField() or CharField(). """ super().__init__(output_field=output_field) self.value = value def __repr__(self): return "{}({})".format(self.__class__.__name__, self.value) def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) val = self.value output_field = self._output_field_or_none if output_field is not None: if self.for_save: val = output_field.get_db_prep_save(val, connection=connection) else: val = output_field.get_db_prep_value(val, connection=connection) if hasattr(output_field, 'get_placeholder'): return output_field.get_placeholder(val, compiler, connection), [val] if val is None: # cx_Oracle does not always convert None to the appropriate # NULL type (like in case expressions using numbers), so we # use a literal SQL NULL return 'NULL', [] return '%s', [val] def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = super().resolve_expression(query, allow_joins, reuse, summarize, for_save) c.for_save = for_save return c def get_group_by_cols(self, alias=None): return [] def _resolve_output_field(self): if isinstance(self.value, str): return fields.CharField() if isinstance(self.value, bool): return fields.BooleanField() if isinstance(self.value, int): return fields.IntegerField() if isinstance(self.value, float): return fields.FloatField() if isinstance(self.value, datetime.datetime): return fields.DateTimeField() if isinstance(self.value, datetime.date): return fields.DateField() if isinstance(self.value, datetime.time): return fields.TimeField() if isinstance(self.value, datetime.timedelta): return fields.DurationField() if isinstance(self.value, Decimal): return fields.DecimalField() if isinstance(self.value, bytes): return fields.BinaryField() if isinstance(self.value, UUID): return fields.UUIDField() class RawSQL(Expression): def __init__(self, sql, params, output_field=None): if output_field is None: output_field = fields.Field() self.sql, self.params = sql, params super().__init__(output_field=output_field) def __repr__(self): return "{}({}, {})".format(self.__class__.__name__, self.sql, self.params) def as_sql(self, compiler, connection): return '(%s)' % self.sql, self.params def get_group_by_cols(self, alias=None): return [self] def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): # Resolve parents fields used in raw SQL. for parent in query.model._meta.get_parent_list(): for parent_field in parent._meta.local_fields: _, column_name = parent_field.get_attname_column() if column_name.lower() in self.sql.lower(): query.resolve_ref(parent_field.name, allow_joins, reuse, summarize) break return super().resolve_expression(query, allow_joins, reuse, summarize, for_save) class Star(Expression): def __repr__(self): return "'*'" def as_sql(self, compiler, connection): return '*', [] class Col(Expression): contains_column_references = True possibly_multivalued = False def __init__(self, alias, target, output_field=None): if output_field is None: output_field = target super().__init__(output_field=output_field) self.alias, self.target = alias, target def __repr__(self): alias, target = self.alias, self.target identifiers = (alias, str(target)) if alias else (str(target),) return '{}({})'.format(self.__class__.__name__, ', '.join(identifiers)) def as_sql(self, compiler, connection): alias, column = self.alias, self.target.column identifiers = (alias, column) if alias else (column,) sql = '.'.join(map(compiler.quote_name_unless_alias, identifiers)) return sql, [] def relabeled_clone(self, relabels): if self.alias is None: return self return self.__class__(relabels.get(self.alias, self.alias), self.target, self.output_field) def get_group_by_cols(self, alias=None): return [self] def get_db_converters(self, connection): if self.target == self.output_field: return self.output_field.get_db_converters(connection) return (self.output_field.get_db_converters(connection) + self.target.get_db_converters(connection)) class Ref(Expression): """ Reference to column alias of the query. For example, Ref('sum_cost') in qs.annotate(sum_cost=Sum('cost')) query. """ def __init__(self, refs, source): super().__init__() self.refs, self.source = refs, source def __repr__(self): return "{}({}, {})".format(self.__class__.__name__, self.refs, self.source) def get_source_expressions(self): return [self.source] def set_source_expressions(self, exprs): self.source, = exprs def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): # The sub-expression `source` has already been resolved, as this is # just a reference to the name of `source`. return self def relabeled_clone(self, relabels): return self def as_sql(self, compiler, connection): return connection.ops.quote_name(self.refs), [] def get_group_by_cols(self, alias=None): return [self] class ExpressionList(Func): """ An expression containing multiple expressions. Can be used to provide a list of expressions as an argument to another expression, like an ordering clause. """ template = '%(expressions)s' def __init__(self, *expressions, **extra): if not expressions: raise ValueError('%s requires at least one expression.' % self.__class__.__name__) super().__init__(*expressions, **extra) def __str__(self): return self.arg_joiner.join(str(arg) for arg in self.source_expressions) def as_sqlite(self, compiler, connection, **extra_context): # Casting to numeric is unnecessary. return self.as_sql(compiler, connection, **extra_context) class ExpressionWrapper(Expression): """ An expression that can wrap another expression so that it can provide extra context to the inner expression, such as the output_field. """ def __init__(self, expression, output_field): super().__init__(output_field=output_field) self.expression = expression def set_source_expressions(self, exprs): self.expression = exprs[0] def get_source_expressions(self): return [self.expression] def get_group_by_cols(self, alias=None): if isinstance(self.expression, Expression): expression = self.expression.copy() expression.output_field = self.output_field return expression.get_group_by_cols(alias=alias) # For non-expressions e.g. an SQL WHERE clause, the entire # `expression` must be included in the GROUP BY clause. return super().get_group_by_cols() def as_sql(self, compiler, connection): return compiler.compile(self.expression) def __repr__(self): return "{}({})".format(self.__class__.__name__, self.expression) class When(Expression): template = 'WHEN %(condition)s THEN %(result)s' # This isn't a complete conditional expression, must be used in Case(). conditional = False def __init__(self, condition=None, then=None, **lookups): if lookups: if condition is None: condition, lookups = Q(**lookups), None elif getattr(condition, 'conditional', False): condition, lookups = Q(condition, **lookups), None if condition is None or not getattr(condition, 'conditional', False) or lookups: raise TypeError( 'When() supports a Q object, a boolean expression, or lookups ' 'as a condition.' ) if isinstance(condition, Q) and not condition: raise ValueError("An empty Q() can't be used as a When() condition.") super().__init__(output_field=None) self.condition = condition self.result = self._parse_expressions(then)[0] def __str__(self): return "WHEN %r THEN %r" % (self.condition, self.result) def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, self) def get_source_expressions(self): return [self.condition, self.result] def set_source_expressions(self, exprs): self.condition, self.result = exprs def get_source_fields(self): # We're only interested in the fields of the result expressions. return [self.result._output_field_or_none] def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = self.copy() c.is_summary = summarize if hasattr(c.condition, 'resolve_expression'): c.condition = c.condition.resolve_expression(query, allow_joins, reuse, summarize, False) c.result = c.result.resolve_expression(query, allow_joins, reuse, summarize, for_save) return c def as_sql(self, compiler, connection, template=None, **extra_context): connection.ops.check_expression_support(self) template_params = extra_context sql_params = [] condition_sql, condition_params = compiler.compile(self.condition) template_params['condition'] = condition_sql sql_params.extend(condition_params) result_sql, result_params = compiler.compile(self.result) template_params['result'] = result_sql sql_params.extend(result_params) template = template or self.template return template % template_params, sql_params def get_group_by_cols(self, alias=None): # This is not a complete expression and cannot be used in GROUP BY. cols = [] for source in self.get_source_expressions(): cols.extend(source.get_group_by_cols()) return cols class Case(Expression): """ An SQL searched CASE expression: CASE WHEN n > 0 THEN 'positive' WHEN n < 0 THEN 'negative' ELSE 'zero' END """ template = 'CASE %(cases)s ELSE %(default)s END' case_joiner = ' ' def __init__(self, *cases, default=None, output_field=None, **extra): if not all(isinstance(case, When) for case in cases): raise TypeError("Positional arguments must all be When objects.") super().__init__(output_field) self.cases = list(cases) self.default = self._parse_expressions(default)[0] self.extra = extra def __str__(self): return "CASE %s, ELSE %r" % (', '.join(str(c) for c in self.cases), self.default) def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, self) def get_source_expressions(self): return self.cases + [self.default] def set_source_expressions(self, exprs): *self.cases, self.default = exprs def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = self.copy() c.is_summary = summarize for pos, case in enumerate(c.cases): c.cases[pos] = case.resolve_expression(query, allow_joins, reuse, summarize, for_save) c.default = c.default.resolve_expression(query, allow_joins, reuse, summarize, for_save) return c def copy(self): c = super().copy() c.cases = c.cases[:] return c def as_sql(self, compiler, connection, template=None, case_joiner=None, **extra_context): connection.ops.check_expression_support(self) if not self.cases: return compiler.compile(self.default) template_params = {**self.extra, **extra_context} case_parts = [] sql_params = [] for case in self.cases: try: case_sql, case_params = compiler.compile(case) except EmptyResultSet: continue case_parts.append(case_sql) sql_params.extend(case_params) default_sql, default_params = compiler.compile(self.default) if not case_parts: return default_sql, default_params case_joiner = case_joiner or self.case_joiner template_params['cases'] = case_joiner.join(case_parts) template_params['default'] = default_sql sql_params.extend(default_params) template = template or template_params.get('template', self.template) sql = template % template_params if self._output_field_or_none is not None: sql = connection.ops.unification_cast_sql(self.output_field) % sql return sql, sql_params def get_group_by_cols(self, alias=None): if not self.cases: return self.default.get_group_by_cols(alias) return super().get_group_by_cols(alias) class Subquery(BaseExpression, Combinable): """ An explicit subquery. It may contain OuterRef() references to the outer query which will be resolved when it is applied to that query. """ template = '(%(subquery)s)' contains_aggregate = False def __init__(self, queryset, output_field=None, **extra): # Allow the usage of both QuerySet and sql.Query objects. self.query = getattr(queryset, 'query', queryset) self.extra = extra super().__init__(output_field) def get_source_expressions(self): return [self.query] def set_source_expressions(self, exprs): self.query = exprs[0] def _resolve_output_field(self): return self.query.output_field def copy(self): clone = super().copy() clone.query = clone.query.clone() return clone @property def external_aliases(self): return self.query.external_aliases def get_external_cols(self): return self.query.get_external_cols() def as_sql(self, compiler, connection, template=None, query=None, **extra_context): connection.ops.check_expression_support(self) template_params = {**self.extra, **extra_context} query = query or self.query subquery_sql, sql_params = query.as_sql(compiler, connection) template_params['subquery'] = subquery_sql[1:-1] template = template or template_params.get('template', self.template) sql = template % template_params return sql, sql_params def get_group_by_cols(self, alias=None): if alias: return [Ref(alias, self)] external_cols = self.get_external_cols() if any(col.possibly_multivalued for col in external_cols): return [self] return external_cols class Exists(Subquery): template = 'EXISTS(%(subquery)s)' output_field = fields.BooleanField() def __init__(self, queryset, negated=False, **kwargs): self.negated = negated super().__init__(queryset, **kwargs) def __invert__(self): clone = self.copy() clone.negated = not self.negated return clone def as_sql(self, compiler, connection, template=None, **extra_context): query = self.query.exists(using=connection.alias) sql, params = super().as_sql( compiler, connection, template=template, query=query, **extra_context, ) if self.negated: sql = 'NOT {}'.format(sql) return sql, params def select_format(self, compiler, sql, params): # Wrap EXISTS() with a CASE WHEN expression if a database backend # (e.g. Oracle) doesn't support boolean expression in SELECT or GROUP # BY list. if not compiler.connection.features.supports_boolean_expr_in_select_clause: sql = 'CASE WHEN {} THEN 1 ELSE 0 END'.format(sql) return sql, params class OrderBy(Expression): template = '%(expression)s %(ordering)s' conditional = False def __init__(self, expression, descending=False, nulls_first=False, nulls_last=False): if nulls_first and nulls_last: raise ValueError('nulls_first and nulls_last are mutually exclusive') self.nulls_first = nulls_first self.nulls_last = nulls_last self.descending = descending if not hasattr(expression, 'resolve_expression'): raise ValueError('expression must be an expression type') self.expression = expression def __repr__(self): return "{}({}, descending={})".format( self.__class__.__name__, self.expression, self.descending) def set_source_expressions(self, exprs): self.expression = exprs[0] def get_source_expressions(self): return [self.expression] def as_sql(self, compiler, connection, template=None, **extra_context): template = template or self.template if connection.features.supports_order_by_nulls_modifier: if self.nulls_last: template = '%s NULLS LAST' % template elif self.nulls_first: template = '%s NULLS FIRST' % template else: if self.nulls_last and not ( self.descending and connection.features.order_by_nulls_first ): template = '%%(expression)s IS NULL, %s' % template elif self.nulls_first and not ( not self.descending and connection.features.order_by_nulls_first ): template = '%%(expression)s IS NOT NULL, %s' % template connection.ops.check_expression_support(self) expression_sql, params = compiler.compile(self.expression) placeholders = { 'expression': expression_sql, 'ordering': 'DESC' if self.descending else 'ASC', **extra_context, } params *= template.count('%(expression)s') return (template % placeholders).rstrip(), params def as_oracle(self, compiler, connection): # Oracle doesn't allow ORDER BY EXISTS() unless it's wrapped in # a CASE WHEN. if isinstance(self.expression, Exists): copy = self.copy() copy.expression = Case( When(self.expression, then=True), default=False, ) return copy.as_sql(compiler, connection) return self.as_sql(compiler, connection) def get_group_by_cols(self, alias=None): cols = [] for source in self.get_source_expressions(): cols.extend(source.get_group_by_cols()) return cols def reverse_ordering(self): self.descending = not self.descending if self.nulls_first or self.nulls_last: self.nulls_first = not self.nulls_first self.nulls_last = not self.nulls_last return self def asc(self): self.descending = False def desc(self): self.descending = True class Window(SQLiteNumericMixin, Expression): template = '%(expression)s OVER (%(window)s)' # Although the main expression may either be an aggregate or an # expression with an aggregate function, the GROUP BY that will # be introduced in the query as a result is not desired. contains_aggregate = False contains_over_clause = True filterable = False def __init__(self, expression, partition_by=None, order_by=None, frame=None, output_field=None): self.partition_by = partition_by self.order_by = order_by self.frame = frame if not getattr(expression, 'window_compatible', False): raise ValueError( "Expression '%s' isn't compatible with OVER clauses." % expression.__class__.__name__ ) if self.partition_by is not None: if not isinstance(self.partition_by, (tuple, list)): self.partition_by = (self.partition_by,) self.partition_by = ExpressionList(*self.partition_by) if self.order_by is not None: if isinstance(self.order_by, (list, tuple)): self.order_by = ExpressionList(*self.order_by) elif not isinstance(self.order_by, BaseExpression): raise ValueError( 'order_by must be either an Expression or a sequence of ' 'expressions.' ) super().__init__(output_field=output_field) self.source_expression = self._parse_expressions(expression)[0] def _resolve_output_field(self): return self.source_expression.output_field def get_source_expressions(self): return [self.source_expression, self.partition_by, self.order_by, self.frame] def set_source_expressions(self, exprs): self.source_expression, self.partition_by, self.order_by, self.frame = exprs def as_sql(self, compiler, connection, template=None): connection.ops.check_expression_support(self) if not connection.features.supports_over_clause: raise NotSupportedError('This backend does not support window expressions.') expr_sql, params = compiler.compile(self.source_expression) window_sql, window_params = [], [] if self.partition_by is not None: sql_expr, sql_params = self.partition_by.as_sql( compiler=compiler, connection=connection, template='PARTITION BY %(expressions)s', ) window_sql.extend(sql_expr) window_params.extend(sql_params) if self.order_by is not None: window_sql.append(' ORDER BY ') order_sql, order_params = compiler.compile(self.order_by) window_sql.extend(order_sql) window_params.extend(order_params) if self.frame: frame_sql, frame_params = compiler.compile(self.frame) window_sql.append(' ' + frame_sql) window_params.extend(frame_params) params.extend(window_params) template = template or self.template return template % { 'expression': expr_sql, 'window': ''.join(window_sql).strip() }, params def as_sqlite(self, compiler, connection): if isinstance(self.output_field, fields.DecimalField): # Casting to numeric must be outside of the window expression. copy = self.copy() source_expressions = copy.get_source_expressions() source_expressions[0].output_field = fields.FloatField() copy.set_source_expressions(source_expressions) return super(Window, copy).as_sqlite(compiler, connection) return self.as_sql(compiler, connection) def __str__(self): return '{} OVER ({}{}{})'.format( str(self.source_expression), 'PARTITION BY ' + str(self.partition_by) if self.partition_by else '', 'ORDER BY ' + str(self.order_by) if self.order_by else '', str(self.frame or ''), ) def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self) def get_group_by_cols(self, alias=None): return [] class WindowFrame(Expression): """ Model the frame clause in window expressions. There are two types of frame clauses which are subclasses, however, all processing and validation (by no means intended to be complete) is done here. Thus, providing an end for a frame is optional (the default is UNBOUNDED FOLLOWING, which is the last row in the frame). """ template = '%(frame_type)s BETWEEN %(start)s AND %(end)s' def __init__(self, start=None, end=None): self.start = Value(start) self.end = Value(end) def set_source_expressions(self, exprs): self.start, self.end = exprs def get_source_expressions(self): return [self.start, self.end] def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) start, end = self.window_frame_start_end(connection, self.start.value, self.end.value) return self.template % { 'frame_type': self.frame_type, 'start': start, 'end': end, }, [] def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self) def get_group_by_cols(self, alias=None): return [] def __str__(self): if self.start.value is not None and self.start.value < 0: start = '%d %s' % (abs(self.start.value), connection.ops.PRECEDING) elif self.start.value is not None and self.start.value == 0: start = connection.ops.CURRENT_ROW else: start = connection.ops.UNBOUNDED_PRECEDING if self.end.value is not None and self.end.value > 0: end = '%d %s' % (self.end.value, connection.ops.FOLLOWING) elif self.end.value is not None and self.end.value == 0: end = connection.ops.CURRENT_ROW else: end = connection.ops.UNBOUNDED_FOLLOWING return self.template % { 'frame_type': self.frame_type, 'start': start, 'end': end, } def window_frame_start_end(self, connection, start, end): raise NotImplementedError('Subclasses must implement window_frame_start_end().') class RowRange(WindowFrame): frame_type = 'ROWS' def window_frame_start_end(self, connection, start, end): return connection.ops.window_frame_rows_start_end(start, end) class ValueRange(WindowFrame): frame_type = 'RANGE' def window_frame_start_end(self, connection, start, end): return connection.ops.window_frame_range_start_end(start, end)
005e698a6a23891ebfadf7ec2490f96a365a7c9aa22cab28cf44f4b498df20e2
import itertools import math from copy import copy from django.core.exceptions import EmptyResultSet from django.db.models.expressions import Case, Func, Value, When from django.db.models.fields import ( CharField, DateTimeField, Field, IntegerField, UUIDField, ) from django.db.models.query_utils import RegisterLookupMixin from django.utils.datastructures import OrderedSet from django.utils.functional import cached_property from django.utils.hashable import make_hashable class Lookup: lookup_name = None prepare_rhs = True can_use_none_as_rhs = False def __init__(self, lhs, rhs): self.lhs, self.rhs = lhs, rhs self.rhs = self.get_prep_lookup() if hasattr(self.lhs, 'get_bilateral_transforms'): bilateral_transforms = self.lhs.get_bilateral_transforms() else: bilateral_transforms = [] if bilateral_transforms: # Warn the user as soon as possible if they are trying to apply # a bilateral transformation on a nested QuerySet: that won't work. from django.db.models.sql.query import ( # avoid circular import Query, ) if isinstance(rhs, Query): raise NotImplementedError("Bilateral transformations on nested querysets are not implemented.") self.bilateral_transforms = bilateral_transforms def apply_bilateral_transforms(self, value): for transform in self.bilateral_transforms: value = transform(value) return value def batch_process_rhs(self, compiler, connection, rhs=None): if rhs is None: rhs = self.rhs if self.bilateral_transforms: sqls, sqls_params = [], [] for p in rhs: value = Value(p, output_field=self.lhs.output_field) value = self.apply_bilateral_transforms(value) value = value.resolve_expression(compiler.query) sql, sql_params = compiler.compile(value) sqls.append(sql) sqls_params.extend(sql_params) else: _, params = self.get_db_prep_lookup(rhs, connection) sqls, sqls_params = ['%s'] * len(params), params return sqls, sqls_params def get_source_expressions(self): if self.rhs_is_direct_value(): return [self.lhs] return [self.lhs, self.rhs] def set_source_expressions(self, new_exprs): if len(new_exprs) == 1: self.lhs = new_exprs[0] else: self.lhs, self.rhs = new_exprs def get_prep_lookup(self): if hasattr(self.rhs, 'resolve_expression'): return self.rhs if self.prepare_rhs and hasattr(self.lhs.output_field, 'get_prep_value'): return self.lhs.output_field.get_prep_value(self.rhs) return self.rhs def get_db_prep_lookup(self, value, connection): return ('%s', [value]) def process_lhs(self, compiler, connection, lhs=None): lhs = lhs or self.lhs if hasattr(lhs, 'resolve_expression'): lhs = lhs.resolve_expression(compiler.query) return compiler.compile(lhs) def process_rhs(self, compiler, connection): value = self.rhs if self.bilateral_transforms: if self.rhs_is_direct_value(): # Do not call get_db_prep_lookup here as the value will be # transformed before being used for lookup value = Value(value, output_field=self.lhs.output_field) value = self.apply_bilateral_transforms(value) value = value.resolve_expression(compiler.query) if hasattr(value, 'as_sql'): sql, params = compiler.compile(value) # Ensure expression is wrapped in parentheses to respect operator # precedence but avoid double wrapping as it can be misinterpreted # on some backends (e.g. subqueries on SQLite). if sql and sql[0] != '(': sql = '(%s)' % sql return sql, params else: return self.get_db_prep_lookup(value, connection) def rhs_is_direct_value(self): return not hasattr(self.rhs, 'as_sql') def relabeled_clone(self, relabels): new = copy(self) new.lhs = new.lhs.relabeled_clone(relabels) if hasattr(new.rhs, 'relabeled_clone'): new.rhs = new.rhs.relabeled_clone(relabels) return new def get_group_by_cols(self, alias=None): cols = self.lhs.get_group_by_cols() if hasattr(self.rhs, 'get_group_by_cols'): cols.extend(self.rhs.get_group_by_cols()) return cols def as_sql(self, compiler, connection): raise NotImplementedError def as_oracle(self, compiler, connection): # Oracle doesn't allow EXISTS() and filters to be compared to another # expression unless they're wrapped in a CASE WHEN. wrapped = False exprs = [] for expr in (self.lhs, self.rhs): if connection.ops.conditional_expression_supported_in_where_clause(expr): expr = Case(When(expr, then=True), default=False) wrapped = True exprs.append(expr) lookup = type(self)(*exprs) if wrapped else self return lookup.as_sql(compiler, connection) @cached_property def contains_aggregate(self): return self.lhs.contains_aggregate or getattr(self.rhs, 'contains_aggregate', False) @cached_property def contains_over_clause(self): return self.lhs.contains_over_clause or getattr(self.rhs, 'contains_over_clause', False) @property def is_summary(self): return self.lhs.is_summary or getattr(self.rhs, 'is_summary', False) @property def identity(self): return self.__class__, self.lhs, self.rhs def __eq__(self, other): if not isinstance(other, Lookup): return NotImplemented return self.identity == other.identity def __hash__(self): return hash(make_hashable(self.identity)) class Transform(RegisterLookupMixin, Func): """ RegisterLookupMixin() is first so that get_lookup() and get_transform() first examine self and then check output_field. """ bilateral = False arity = 1 @property def lhs(self): return self.get_source_expressions()[0] def get_bilateral_transforms(self): if hasattr(self.lhs, 'get_bilateral_transforms'): bilateral_transforms = self.lhs.get_bilateral_transforms() else: bilateral_transforms = [] if self.bilateral: bilateral_transforms.append(self.__class__) return bilateral_transforms class BuiltinLookup(Lookup): def process_lhs(self, compiler, connection, lhs=None): lhs_sql, params = super().process_lhs(compiler, connection, lhs) field_internal_type = self.lhs.output_field.get_internal_type() db_type = self.lhs.output_field.db_type(connection=connection) lhs_sql = connection.ops.field_cast_sql( db_type, field_internal_type) % lhs_sql lhs_sql = connection.ops.lookup_cast(self.lookup_name, field_internal_type) % lhs_sql return lhs_sql, list(params) def as_sql(self, compiler, connection): lhs_sql, params = self.process_lhs(compiler, connection) rhs_sql, rhs_params = self.process_rhs(compiler, connection) params.extend(rhs_params) rhs_sql = self.get_rhs_op(connection, rhs_sql) return '%s %s' % (lhs_sql, rhs_sql), params def get_rhs_op(self, connection, rhs): return connection.operators[self.lookup_name] % rhs class FieldGetDbPrepValueMixin: """ Some lookups require Field.get_db_prep_value() to be called on their inputs. """ get_db_prep_lookup_value_is_iterable = False def get_db_prep_lookup(self, value, connection): # For relational fields, use the 'target_field' attribute of the # output_field. field = getattr(self.lhs.output_field, 'target_field', None) get_db_prep_value = getattr(field, 'get_db_prep_value', None) or self.lhs.output_field.get_db_prep_value return ( '%s', [get_db_prep_value(v, connection, prepared=True) for v in value] if self.get_db_prep_lookup_value_is_iterable else [get_db_prep_value(value, connection, prepared=True)] ) class FieldGetDbPrepValueIterableMixin(FieldGetDbPrepValueMixin): """ Some lookups require Field.get_db_prep_value() to be called on each value in an iterable. """ get_db_prep_lookup_value_is_iterable = True def get_prep_lookup(self): if hasattr(self.rhs, 'resolve_expression'): return self.rhs prepared_values = [] for rhs_value in self.rhs: if hasattr(rhs_value, 'resolve_expression'): # An expression will be handled by the database but can coexist # alongside real values. pass elif self.prepare_rhs and hasattr(self.lhs.output_field, 'get_prep_value'): rhs_value = self.lhs.output_field.get_prep_value(rhs_value) prepared_values.append(rhs_value) return prepared_values def process_rhs(self, compiler, connection): if self.rhs_is_direct_value(): # rhs should be an iterable of values. Use batch_process_rhs() # to prepare/transform those values. return self.batch_process_rhs(compiler, connection) else: return super().process_rhs(compiler, connection) def resolve_expression_parameter(self, compiler, connection, sql, param): params = [param] if hasattr(param, 'resolve_expression'): param = param.resolve_expression(compiler.query) if hasattr(param, 'as_sql'): sql, params = compiler.compile(param) return sql, params def batch_process_rhs(self, compiler, connection, rhs=None): pre_processed = super().batch_process_rhs(compiler, connection, rhs) # The params list may contain expressions which compile to a # sql/param pair. Zip them to get sql and param pairs that refer to the # same argument and attempt to replace them with the result of # compiling the param step. sql, params = zip(*( self.resolve_expression_parameter(compiler, connection, sql, param) for sql, param in zip(*pre_processed) )) params = itertools.chain.from_iterable(params) return sql, tuple(params) class PostgresOperatorLookup(FieldGetDbPrepValueMixin, Lookup): """Lookup defined by operators on PostgreSQL.""" postgres_operator = None def as_postgresql(self, compiler, connection): lhs, lhs_params = self.process_lhs(compiler, connection) rhs, rhs_params = self.process_rhs(compiler, connection) params = tuple(lhs_params) + tuple(rhs_params) return '%s %s %s' % (lhs, self.postgres_operator, rhs), params @Field.register_lookup class Exact(FieldGetDbPrepValueMixin, BuiltinLookup): lookup_name = 'exact' def process_rhs(self, compiler, connection): from django.db.models.sql.query import Query if isinstance(self.rhs, Query): if self.rhs.has_limit_one(): if not self.rhs.has_select_fields: self.rhs.clear_select_clause() self.rhs.add_fields(['pk']) else: raise ValueError( 'The QuerySet value for an exact lookup must be limited to ' 'one result using slicing.' ) return super().process_rhs(compiler, connection) def as_sql(self, compiler, connection): # Avoid comparison against direct rhs if lhs is a boolean value. That # turns "boolfield__exact=True" into "WHERE boolean_field" instead of # "WHERE boolean_field = True" when allowed. if ( isinstance(self.rhs, bool) and getattr(self.lhs, 'conditional', False) and connection.ops.conditional_expression_supported_in_where_clause(self.lhs) ): lhs_sql, params = self.process_lhs(compiler, connection) template = '%s' if self.rhs else 'NOT %s' return template % lhs_sql, params return super().as_sql(compiler, connection) @Field.register_lookup class IExact(BuiltinLookup): lookup_name = 'iexact' prepare_rhs = False def process_rhs(self, qn, connection): rhs, params = super().process_rhs(qn, connection) if params: params[0] = connection.ops.prep_for_iexact_query(params[0]) return rhs, params @Field.register_lookup class GreaterThan(FieldGetDbPrepValueMixin, BuiltinLookup): lookup_name = 'gt' @Field.register_lookup class GreaterThanOrEqual(FieldGetDbPrepValueMixin, BuiltinLookup): lookup_name = 'gte' @Field.register_lookup class LessThan(FieldGetDbPrepValueMixin, BuiltinLookup): lookup_name = 'lt' @Field.register_lookup class LessThanOrEqual(FieldGetDbPrepValueMixin, BuiltinLookup): lookup_name = 'lte' class IntegerFieldFloatRounding: """ Allow floats to work as query values for IntegerField. Without this, the decimal portion of the float would always be discarded. """ def get_prep_lookup(self): if isinstance(self.rhs, float): self.rhs = math.ceil(self.rhs) return super().get_prep_lookup() @IntegerField.register_lookup class IntegerGreaterThanOrEqual(IntegerFieldFloatRounding, GreaterThanOrEqual): pass @IntegerField.register_lookup class IntegerLessThan(IntegerFieldFloatRounding, LessThan): pass @Field.register_lookup class In(FieldGetDbPrepValueIterableMixin, BuiltinLookup): lookup_name = 'in' def process_rhs(self, compiler, connection): db_rhs = getattr(self.rhs, '_db', None) if db_rhs is not None and db_rhs != connection.alias: raise ValueError( "Subqueries aren't allowed across different databases. Force " "the inner query to be evaluated using `list(inner_query)`." ) if self.rhs_is_direct_value(): # Remove None from the list as NULL is never equal to anything. try: rhs = OrderedSet(self.rhs) rhs.discard(None) except TypeError: # Unhashable items in self.rhs rhs = [r for r in self.rhs if r is not None] if not rhs: raise EmptyResultSet # rhs should be an iterable; use batch_process_rhs() to # prepare/transform those values. sqls, sqls_params = self.batch_process_rhs(compiler, connection, rhs) placeholder = '(' + ', '.join(sqls) + ')' return (placeholder, sqls_params) else: if not getattr(self.rhs, 'has_select_fields', True): self.rhs.clear_select_clause() self.rhs.add_fields(['pk']) return super().process_rhs(compiler, connection) def get_group_by_cols(self, alias=None): cols = self.lhs.get_group_by_cols() if hasattr(self.rhs, 'get_group_by_cols'): if not getattr(self.rhs, 'has_select_fields', True): self.rhs.clear_select_clause() self.rhs.add_fields(['pk']) cols.extend(self.rhs.get_group_by_cols()) return cols def get_rhs_op(self, connection, rhs): return 'IN %s' % rhs def as_sql(self, compiler, connection): max_in_list_size = connection.ops.max_in_list_size() if self.rhs_is_direct_value() and max_in_list_size and len(self.rhs) > max_in_list_size: return self.split_parameter_list_as_sql(compiler, connection) return super().as_sql(compiler, connection) def split_parameter_list_as_sql(self, compiler, connection): # This is a special case for databases which limit the number of # elements which can appear in an 'IN' clause. max_in_list_size = connection.ops.max_in_list_size() lhs, lhs_params = self.process_lhs(compiler, connection) rhs, rhs_params = self.batch_process_rhs(compiler, connection) in_clause_elements = ['('] params = [] for offset in range(0, len(rhs_params), max_in_list_size): if offset > 0: in_clause_elements.append(' OR ') in_clause_elements.append('%s IN (' % lhs) params.extend(lhs_params) sqls = rhs[offset: offset + max_in_list_size] sqls_params = rhs_params[offset: offset + max_in_list_size] param_group = ', '.join(sqls) in_clause_elements.append(param_group) in_clause_elements.append(')') params.extend(sqls_params) in_clause_elements.append(')') return ''.join(in_clause_elements), params class PatternLookup(BuiltinLookup): param_pattern = '%%%s%%' prepare_rhs = False def get_rhs_op(self, connection, rhs): # Assume we are in startswith. We need to produce SQL like: # col LIKE %s, ['thevalue%'] # For python values we can (and should) do that directly in Python, # but if the value is for example reference to other column, then # we need to add the % pattern match to the lookup by something like # col LIKE othercol || '%%' # So, for Python values we don't need any special pattern, but for # SQL reference values or SQL transformations we need the correct # pattern added. if hasattr(self.rhs, 'as_sql') or self.bilateral_transforms: pattern = connection.pattern_ops[self.lookup_name].format(connection.pattern_esc) return pattern.format(rhs) else: return super().get_rhs_op(connection, rhs) def process_rhs(self, qn, connection): rhs, params = super().process_rhs(qn, connection) if self.rhs_is_direct_value() and params and not self.bilateral_transforms: params[0] = self.param_pattern % connection.ops.prep_for_like_query(params[0]) return rhs, params @Field.register_lookup class Contains(PatternLookup): lookup_name = 'contains' @Field.register_lookup class IContains(Contains): lookup_name = 'icontains' @Field.register_lookup class StartsWith(PatternLookup): lookup_name = 'startswith' param_pattern = '%s%%' @Field.register_lookup class IStartsWith(StartsWith): lookup_name = 'istartswith' @Field.register_lookup class EndsWith(PatternLookup): lookup_name = 'endswith' param_pattern = '%%%s' @Field.register_lookup class IEndsWith(EndsWith): lookup_name = 'iendswith' @Field.register_lookup class Range(FieldGetDbPrepValueIterableMixin, BuiltinLookup): lookup_name = 'range' def get_rhs_op(self, connection, rhs): return "BETWEEN %s AND %s" % (rhs[0], rhs[1]) @Field.register_lookup class IsNull(BuiltinLookup): lookup_name = 'isnull' prepare_rhs = False def as_sql(self, compiler, connection): if not isinstance(self.rhs, bool): raise ValueError( 'The QuerySet value for an isnull lookup must be True or ' 'False.' ) sql, params = compiler.compile(self.lhs) if self.rhs: return "%s IS NULL" % sql, params else: return "%s IS NOT NULL" % sql, params @Field.register_lookup class Regex(BuiltinLookup): lookup_name = 'regex' prepare_rhs = False def as_sql(self, compiler, connection): if self.lookup_name in connection.operators: return super().as_sql(compiler, connection) else: lhs, lhs_params = self.process_lhs(compiler, connection) rhs, rhs_params = self.process_rhs(compiler, connection) sql_template = connection.ops.regex_lookup(self.lookup_name) return sql_template % (lhs, rhs), lhs_params + rhs_params @Field.register_lookup class IRegex(Regex): lookup_name = 'iregex' class YearLookup(Lookup): def year_lookup_bounds(self, connection, year): from django.db.models.functions import ExtractIsoYear iso_year = isinstance(self.lhs, ExtractIsoYear) output_field = self.lhs.lhs.output_field if isinstance(output_field, DateTimeField): bounds = connection.ops.year_lookup_bounds_for_datetime_field( year, iso_year=iso_year, ) else: bounds = connection.ops.year_lookup_bounds_for_date_field( year, iso_year=iso_year, ) return bounds def as_sql(self, compiler, connection): # Avoid the extract operation if the rhs is a direct value to allow # indexes to be used. if self.rhs_is_direct_value(): # Skip the extract part by directly using the originating field, # that is self.lhs.lhs. lhs_sql, params = self.process_lhs(compiler, connection, self.lhs.lhs) rhs_sql, _ = self.process_rhs(compiler, connection) rhs_sql = self.get_direct_rhs_sql(connection, rhs_sql) start, finish = self.year_lookup_bounds(connection, self.rhs) params.extend(self.get_bound_params(start, finish)) return '%s %s' % (lhs_sql, rhs_sql), params return super().as_sql(compiler, connection) def get_direct_rhs_sql(self, connection, rhs): return connection.operators[self.lookup_name] % rhs def get_bound_params(self, start, finish): raise NotImplementedError( 'subclasses of YearLookup must provide a get_bound_params() method' ) class YearExact(YearLookup, Exact): def get_direct_rhs_sql(self, connection, rhs): return 'BETWEEN %s AND %s' def get_bound_params(self, start, finish): return (start, finish) class YearGt(YearLookup, GreaterThan): def get_bound_params(self, start, finish): return (finish,) class YearGte(YearLookup, GreaterThanOrEqual): def get_bound_params(self, start, finish): return (start,) class YearLt(YearLookup, LessThan): def get_bound_params(self, start, finish): return (start,) class YearLte(YearLookup, LessThanOrEqual): def get_bound_params(self, start, finish): return (finish,) class UUIDTextMixin: """ Strip hyphens from a value when filtering a UUIDField on backends without a native datatype for UUID. """ def process_rhs(self, qn, connection): if not connection.features.has_native_uuid_field: from django.db.models.functions import Replace if self.rhs_is_direct_value(): self.rhs = Value(self.rhs) self.rhs = Replace(self.rhs, Value('-'), Value(''), output_field=CharField()) rhs, params = super().process_rhs(qn, connection) return rhs, params @UUIDField.register_lookup class UUIDIExact(UUIDTextMixin, IExact): pass @UUIDField.register_lookup class UUIDContains(UUIDTextMixin, Contains): pass @UUIDField.register_lookup class UUIDIContains(UUIDTextMixin, IContains): pass @UUIDField.register_lookup class UUIDStartsWith(UUIDTextMixin, StartsWith): pass @UUIDField.register_lookup class UUIDIStartsWith(UUIDTextMixin, IStartsWith): pass @UUIDField.register_lookup class UUIDEndsWith(UUIDTextMixin, EndsWith): pass @UUIDField.register_lookup class UUIDIEndsWith(UUIDTextMixin, IEndsWith): pass
ecd9fa7ff99cd6b795176244b201a37fe12b0c4cea562a42c11212f36c1fb07d
import datetime import posixpath from django import forms from django.core import checks from django.core.files.base import File from django.core.files.images import ImageFile from django.core.files.storage import Storage, default_storage from django.core.files.utils import validate_file_name from django.db.models import signals from django.db.models.fields import Field from django.db.models.query_utils import DeferredAttribute from django.utils.translation import gettext_lazy as _ class FieldFile(File): def __init__(self, instance, field, name): super().__init__(None, name) self.instance = instance self.field = field self.storage = field.storage self._committed = True def __eq__(self, other): # Older code may be expecting FileField values to be simple strings. # By overriding the == operator, it can remain backwards compatibility. if hasattr(other, 'name'): return self.name == other.name return self.name == other def __hash__(self): return hash(self.name) # The standard File contains most of the necessary properties, but # FieldFiles can be instantiated without a name, so that needs to # be checked for here. def _require_file(self): if not self: raise ValueError("The '%s' attribute has no file associated with it." % self.field.name) def _get_file(self): self._require_file() if getattr(self, '_file', None) is None: self._file = self.storage.open(self.name, 'rb') return self._file def _set_file(self, file): self._file = file def _del_file(self): del self._file file = property(_get_file, _set_file, _del_file) @property def path(self): self._require_file() return self.storage.path(self.name) @property def url(self): self._require_file() return self.storage.url(self.name) @property def size(self): self._require_file() if not self._committed: return self.file.size return self.storage.size(self.name) def open(self, mode='rb'): self._require_file() if getattr(self, '_file', None) is None: self.file = self.storage.open(self.name, mode) else: self.file.open(mode) return self # open() doesn't alter the file's contents, but it does reset the pointer open.alters_data = True # In addition to the standard File API, FieldFiles have extra methods # to further manipulate the underlying file, as well as update the # associated model instance. def save(self, name, content, save=True): name = self.field.generate_filename(self.instance, name) self.name = self.storage.save(name, content, max_length=self.field.max_length) setattr(self.instance, self.field.attname, self.name) self._committed = True # Save the object because it has changed, unless save is False if save: self.instance.save() save.alters_data = True def delete(self, save=True): if not self: return # Only close the file if it's already open, which we know by the # presence of self._file if hasattr(self, '_file'): self.close() del self.file self.storage.delete(self.name) self.name = None setattr(self.instance, self.field.attname, self.name) self._committed = False if save: self.instance.save() delete.alters_data = True @property def closed(self): file = getattr(self, '_file', None) return file is None or file.closed def close(self): file = getattr(self, '_file', None) if file is not None: file.close() def __getstate__(self): # FieldFile needs access to its associated model field, an instance and # the file's name. Everything else will be restored later, by # FileDescriptor below. return { 'name': self.name, 'closed': False, '_committed': True, '_file': None, 'instance': self.instance, 'field': self.field, } def __setstate__(self, state): self.__dict__.update(state) self.storage = self.field.storage class FileDescriptor(DeferredAttribute): """ The descriptor for the file attribute on the model instance. Return a FieldFile when accessed so you can write code like:: >>> from myapp.models import MyModel >>> instance = MyModel.objects.get(pk=1) >>> instance.file.size Assign a file object on assignment so you can do:: >>> with open('/path/to/hello.world') as f: ... instance.file = File(f) """ def __get__(self, instance, cls=None): if instance is None: return self # This is slightly complicated, so worth an explanation. # instance.file`needs to ultimately return some instance of `File`, # probably a subclass. Additionally, this returned object needs to have # the FieldFile API so that users can easily do things like # instance.file.path and have that delegated to the file storage engine. # Easy enough if we're strict about assignment in __set__, but if you # peek below you can see that we're not. So depending on the current # value of the field we have to dynamically construct some sort of # "thing" to return. # The instance dict contains whatever was originally assigned # in __set__. file = super().__get__(instance, cls) # If this value is a string (instance.file = "path/to/file") or None # then we simply wrap it with the appropriate attribute class according # to the file field. [This is FieldFile for FileFields and # ImageFieldFile for ImageFields; it's also conceivable that user # subclasses might also want to subclass the attribute class]. This # object understands how to convert a path to a file, and also how to # handle None. if isinstance(file, str) or file is None: attr = self.field.attr_class(instance, self.field, file) instance.__dict__[self.field.attname] = attr # Other types of files may be assigned as well, but they need to have # the FieldFile interface added to them. Thus, we wrap any other type of # File inside a FieldFile (well, the field's attr_class, which is # usually FieldFile). elif isinstance(file, File) and not isinstance(file, FieldFile): file_copy = self.field.attr_class(instance, self.field, file.name) file_copy.file = file file_copy._committed = False instance.__dict__[self.field.attname] = file_copy # Finally, because of the (some would say boneheaded) way pickle works, # the underlying FieldFile might not actually itself have an associated # file. So we need to reset the details of the FieldFile in those cases. elif isinstance(file, FieldFile) and not hasattr(file, 'field'): file.instance = instance file.field = self.field file.storage = self.field.storage # Make sure that the instance is correct. elif isinstance(file, FieldFile) and instance is not file.instance: file.instance = instance # That was fun, wasn't it? return instance.__dict__[self.field.attname] def __set__(self, instance, value): instance.__dict__[self.field.attname] = value class FileField(Field): # The class to wrap instance attributes in. Accessing the file object off # the instance will always return an instance of attr_class. attr_class = FieldFile # The descriptor to use for accessing the attribute off of the class. descriptor_class = FileDescriptor description = _("File") def __init__(self, verbose_name=None, name=None, upload_to='', storage=None, **kwargs): self._primary_key_set_explicitly = 'primary_key' in kwargs self.storage = storage or default_storage if callable(self.storage): # Hold a reference to the callable for deconstruct(). self._storage_callable = self.storage self.storage = self.storage() if not isinstance(self.storage, Storage): raise TypeError( "%s.storage must be a subclass/instance of %s.%s" % (self.__class__.__qualname__, Storage.__module__, Storage.__qualname__) ) self.upload_to = upload_to kwargs.setdefault('max_length', 100) super().__init__(verbose_name, name, **kwargs) def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_primary_key(), *self._check_upload_to(), ] def _check_primary_key(self): if self._primary_key_set_explicitly: return [ checks.Error( "'primary_key' is not a valid argument for a %s." % self.__class__.__name__, obj=self, id='fields.E201', ) ] else: return [] def _check_upload_to(self): if isinstance(self.upload_to, str) and self.upload_to.startswith('/'): return [ checks.Error( "%s's 'upload_to' argument must be a relative path, not an " "absolute path." % self.__class__.__name__, obj=self, id='fields.E202', hint='Remove the leading slash.', ) ] else: return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() if kwargs.get("max_length") == 100: del kwargs["max_length"] kwargs['upload_to'] = self.upload_to if self.storage is not default_storage: kwargs['storage'] = getattr(self, '_storage_callable', self.storage) return name, path, args, kwargs def get_internal_type(self): return "FileField" def get_prep_value(self, value): value = super().get_prep_value(value) # Need to convert File objects provided via a form to string for database insertion if value is None: return None return str(value) def pre_save(self, model_instance, add): file = super().pre_save(model_instance, add) if file and not file._committed: # Commit the file to storage prior to saving the model file.save(file.name, file.file, save=False) return file def contribute_to_class(self, cls, name, **kwargs): super().contribute_to_class(cls, name, **kwargs) setattr(cls, self.attname, self.descriptor_class(self)) def generate_filename(self, instance, filename): """ Apply (if callable) or prepend (if a string) upload_to to the filename, then delegate further processing of the name to the storage backend. Until the storage layer, all file paths are expected to be Unix style (with forward slashes). """ filename = validate_file_name(filename) if callable(self.upload_to): filename = self.upload_to(instance, filename) else: dirname = datetime.datetime.now().strftime(str(self.upload_to)) filename = posixpath.join(dirname, filename) return self.storage.generate_filename(filename) def save_form_data(self, instance, data): # Important: None means "no change", other false value means "clear" # This subtle distinction (rather than a more explicit marker) is # needed because we need to consume values that are also sane for a # regular (non Model-) Form to find in its cleaned_data dictionary. if data is not None: # This value will be converted to str and stored in the # database, so leaving False as-is is not acceptable. setattr(instance, self.name, data or '') def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.FileField, 'max_length': self.max_length, **kwargs, }) class ImageFileDescriptor(FileDescriptor): """ Just like the FileDescriptor, but for ImageFields. The only difference is assigning the width/height to the width_field/height_field, if appropriate. """ def __set__(self, instance, value): previous_file = instance.__dict__.get(self.field.attname) super().__set__(instance, value) # To prevent recalculating image dimensions when we are instantiating # an object from the database (bug #11084), only update dimensions if # the field had a value before this assignment. Since the default # value for FileField subclasses is an instance of field.attr_class, # previous_file will only be None when we are called from # Model.__init__(). The ImageField.update_dimension_fields method # hooked up to the post_init signal handles the Model.__init__() cases. # Assignment happening outside of Model.__init__() will trigger the # update right here. if previous_file is not None: self.field.update_dimension_fields(instance, force=True) class ImageFieldFile(ImageFile, FieldFile): def delete(self, save=True): # Clear the image dimensions cache if hasattr(self, '_dimensions_cache'): del self._dimensions_cache super().delete(save) class ImageField(FileField): attr_class = ImageFieldFile descriptor_class = ImageFileDescriptor description = _("Image") def __init__(self, verbose_name=None, name=None, width_field=None, height_field=None, **kwargs): self.width_field, self.height_field = width_field, height_field super().__init__(verbose_name, name, **kwargs) def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_image_library_installed(), ] def _check_image_library_installed(self): try: from PIL import Image # NOQA except ImportError: return [ checks.Error( 'Cannot use ImageField because Pillow is not installed.', hint=('Get Pillow at https://pypi.org/project/Pillow/ ' 'or run command "python -m pip install Pillow".'), obj=self, id='fields.E210', ) ] else: return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.width_field: kwargs['width_field'] = self.width_field if self.height_field: kwargs['height_field'] = self.height_field return name, path, args, kwargs def contribute_to_class(self, cls, name, **kwargs): super().contribute_to_class(cls, name, **kwargs) # Attach update_dimension_fields so that dimension fields declared # after their corresponding image field don't stay cleared by # Model.__init__, see bug #11196. # Only run post-initialization dimension update on non-abstract models if not cls._meta.abstract: signals.post_init.connect(self.update_dimension_fields, sender=cls) def update_dimension_fields(self, instance, force=False, *args, **kwargs): """ Update field's width and height fields, if defined. This method is hooked up to model's post_init signal to update dimensions after instantiating a model instance. However, dimensions won't be updated if the dimensions fields are already populated. This avoids unnecessary recalculation when loading an object from the database. Dimensions can be forced to update with force=True, which is how ImageFileDescriptor.__set__ calls this method. """ # Nothing to update if the field doesn't have dimension fields or if # the field is deferred. has_dimension_fields = self.width_field or self.height_field if not has_dimension_fields or self.attname not in instance.__dict__: return # getattr will call the ImageFileDescriptor's __get__ method, which # coerces the assigned value into an instance of self.attr_class # (ImageFieldFile in this case). file = getattr(instance, self.attname) # Nothing to update if we have no file and not being forced to update. if not file and not force: return dimension_fields_filled = not( (self.width_field and not getattr(instance, self.width_field)) or (self.height_field and not getattr(instance, self.height_field)) ) # When both dimension fields have values, we are most likely loading # data from the database or updating an image field that already had # an image stored. In the first case, we don't want to update the # dimension fields because we are already getting their values from the # database. In the second case, we do want to update the dimensions # fields and will skip this return because force will be True since we # were called from ImageFileDescriptor.__set__. if dimension_fields_filled and not force: return # file should be an instance of ImageFieldFile or should be None. if file: width = file.width height = file.height else: # No file, so clear dimensions fields. width = None height = None # Update the width and height fields. if self.width_field: setattr(instance, self.width_field, width) if self.height_field: setattr(instance, self.height_field, height) def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.ImageField, **kwargs, })
9041fe3da2d9f42ddd159ebe5fede61bd8c61ae8b487d6c38208b0fe8520e8ab
import datetime import uuid from functools import lru_cache from django.conf import settings from django.db import DatabaseError, NotSupportedError from django.db.backends.base.operations import BaseDatabaseOperations from django.db.backends.utils import strip_quotes, truncate_name from django.db.models import AutoField, Exists, ExpressionWrapper from django.db.models.expressions import RawSQL from django.db.models.sql.where import WhereNode from django.utils import timezone from django.utils.encoding import force_bytes, force_str from django.utils.functional import cached_property from django.utils.regex_helper import _lazy_re_compile from .base import Database from .utils import BulkInsertMapper, InsertVar, Oracle_datetime class DatabaseOperations(BaseDatabaseOperations): # Oracle uses NUMBER(5), NUMBER(11), and NUMBER(19) for integer fields. # SmallIntegerField uses NUMBER(11) instead of NUMBER(5), which is used by # SmallAutoField, to preserve backward compatibility. integer_field_ranges = { 'SmallIntegerField': (-99999999999, 99999999999), 'IntegerField': (-99999999999, 99999999999), 'BigIntegerField': (-9999999999999999999, 9999999999999999999), 'PositiveBigIntegerField': (0, 9999999999999999999), 'PositiveSmallIntegerField': (0, 99999999999), 'PositiveIntegerField': (0, 99999999999), 'SmallAutoField': (-99999, 99999), 'AutoField': (-99999999999, 99999999999), 'BigAutoField': (-9999999999999999999, 9999999999999999999), } set_operators = {**BaseDatabaseOperations.set_operators, 'difference': 'MINUS'} # TODO: colorize this SQL code with style.SQL_KEYWORD(), etc. _sequence_reset_sql = """ DECLARE table_value integer; seq_value integer; seq_name user_tab_identity_cols.sequence_name%%TYPE; BEGIN BEGIN SELECT sequence_name INTO seq_name FROM user_tab_identity_cols WHERE table_name = '%(table_name)s' AND column_name = '%(column_name)s'; EXCEPTION WHEN NO_DATA_FOUND THEN seq_name := '%(no_autofield_sequence_name)s'; END; SELECT NVL(MAX(%(column)s), 0) INTO table_value FROM %(table)s; SELECT NVL(last_number - cache_size, 0) INTO seq_value FROM user_sequences WHERE sequence_name = seq_name; WHILE table_value > seq_value LOOP EXECUTE IMMEDIATE 'SELECT "'||seq_name||'".nextval FROM DUAL' INTO seq_value; END LOOP; END; /""" # Oracle doesn't support string without precision; use the max string size. cast_char_field_without_max_length = 'NVARCHAR2(2000)' cast_data_types = { 'AutoField': 'NUMBER(11)', 'BigAutoField': 'NUMBER(19)', 'SmallAutoField': 'NUMBER(5)', 'TextField': cast_char_field_without_max_length, } def cache_key_culling_sql(self): return 'SELECT cache_key FROM %s ORDER BY cache_key OFFSET %%s ROWS FETCH FIRST 1 ROWS ONLY' def date_extract_sql(self, lookup_type, field_name): if lookup_type == 'week_day': # TO_CHAR(field, 'D') returns an integer from 1-7, where 1=Sunday. return "TO_CHAR(%s, 'D')" % field_name elif lookup_type == 'iso_week_day': return "TO_CHAR(%s - 1, 'D')" % field_name elif lookup_type == 'week': # IW = ISO week number return "TO_CHAR(%s, 'IW')" % field_name elif lookup_type == 'quarter': return "TO_CHAR(%s, 'Q')" % field_name elif lookup_type == 'iso_year': return "TO_CHAR(%s, 'IYYY')" % field_name else: # https://docs.oracle.com/en/database/oracle/oracle-database/18/sqlrf/EXTRACT-datetime.html return "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name) def date_trunc_sql(self, lookup_type, field_name, tzname=None): field_name = self._convert_field_to_tz(field_name, tzname) # https://docs.oracle.com/en/database/oracle/oracle-database/18/sqlrf/ROUND-and-TRUNC-Date-Functions.html if lookup_type in ('year', 'month'): return "TRUNC(%s, '%s')" % (field_name, lookup_type.upper()) elif lookup_type == 'quarter': return "TRUNC(%s, 'Q')" % field_name elif lookup_type == 'week': return "TRUNC(%s, 'IW')" % field_name else: return "TRUNC(%s)" % field_name # Oracle crashes with "ORA-03113: end-of-file on communication channel" # if the time zone name is passed in parameter. Use interpolation instead. # https://groups.google.com/forum/#!msg/django-developers/zwQju7hbG78/9l934yelwfsJ # This regexp matches all time zone names from the zoneinfo database. _tzname_re = _lazy_re_compile(r'^[\w/:+-]+$') def _prepare_tzname_delta(self, tzname): if '+' in tzname: return tzname[tzname.find('+'):] elif '-' in tzname: return tzname[tzname.find('-'):] return tzname def _convert_field_to_tz(self, field_name, tzname): if not (settings.USE_TZ and tzname): return field_name if not self._tzname_re.match(tzname): raise ValueError("Invalid time zone name: %s" % tzname) # Convert from connection timezone to the local time, returning # TIMESTAMP WITH TIME ZONE and cast it back to TIMESTAMP to strip the # TIME ZONE details. if self.connection.timezone_name != tzname: return "CAST((FROM_TZ(%s, '%s') AT TIME ZONE '%s') AS TIMESTAMP)" % ( field_name, self.connection.timezone_name, self._prepare_tzname_delta(tzname), ) return field_name def datetime_cast_date_sql(self, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) return 'TRUNC(%s)' % field_name def datetime_cast_time_sql(self, field_name, tzname): # Since `TimeField` values are stored as TIMESTAMP where only the date # part is ignored, convert the field to the specified timezone. return self._convert_field_to_tz(field_name, tzname) def datetime_extract_sql(self, lookup_type, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) return self.date_extract_sql(lookup_type, field_name) def datetime_trunc_sql(self, lookup_type, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) # https://docs.oracle.com/en/database/oracle/oracle-database/18/sqlrf/ROUND-and-TRUNC-Date-Functions.html if lookup_type in ('year', 'month'): sql = "TRUNC(%s, '%s')" % (field_name, lookup_type.upper()) elif lookup_type == 'quarter': sql = "TRUNC(%s, 'Q')" % field_name elif lookup_type == 'week': sql = "TRUNC(%s, 'IW')" % field_name elif lookup_type == 'day': sql = "TRUNC(%s)" % field_name elif lookup_type == 'hour': sql = "TRUNC(%s, 'HH24')" % field_name elif lookup_type == 'minute': sql = "TRUNC(%s, 'MI')" % field_name else: sql = "CAST(%s AS DATE)" % field_name # Cast to DATE removes sub-second precision. return sql def time_trunc_sql(self, lookup_type, field_name, tzname=None): # The implementation is similar to `datetime_trunc_sql` as both # `DateTimeField` and `TimeField` are stored as TIMESTAMP where # the date part of the later is ignored. field_name = self._convert_field_to_tz(field_name, tzname) if lookup_type == 'hour': sql = "TRUNC(%s, 'HH24')" % field_name elif lookup_type == 'minute': sql = "TRUNC(%s, 'MI')" % field_name elif lookup_type == 'second': sql = "CAST(%s AS DATE)" % field_name # Cast to DATE removes sub-second precision. return sql def get_db_converters(self, expression): converters = super().get_db_converters(expression) internal_type = expression.output_field.get_internal_type() if internal_type in ['JSONField', 'TextField']: converters.append(self.convert_textfield_value) elif internal_type == 'BinaryField': converters.append(self.convert_binaryfield_value) elif internal_type == 'BooleanField': converters.append(self.convert_booleanfield_value) elif internal_type == 'DateTimeField': if settings.USE_TZ: converters.append(self.convert_datetimefield_value) elif internal_type == 'DateField': converters.append(self.convert_datefield_value) elif internal_type == 'TimeField': converters.append(self.convert_timefield_value) elif internal_type == 'UUIDField': converters.append(self.convert_uuidfield_value) # Oracle stores empty strings as null. If the field accepts the empty # string, undo this to adhere to the Django convention of using # the empty string instead of null. if expression.field.empty_strings_allowed: converters.append( self.convert_empty_bytes if internal_type == 'BinaryField' else self.convert_empty_string ) return converters def convert_textfield_value(self, value, expression, connection): if isinstance(value, Database.LOB): value = value.read() return value def convert_binaryfield_value(self, value, expression, connection): if isinstance(value, Database.LOB): value = force_bytes(value.read()) return value def convert_booleanfield_value(self, value, expression, connection): if value in (0, 1): value = bool(value) return value # cx_Oracle always returns datetime.datetime objects for # DATE and TIMESTAMP columns, but Django wants to see a # python datetime.date, .time, or .datetime. def convert_datetimefield_value(self, value, expression, connection): if value is not None: value = timezone.make_aware(value, self.connection.timezone) return value def convert_datefield_value(self, value, expression, connection): if isinstance(value, Database.Timestamp): value = value.date() return value def convert_timefield_value(self, value, expression, connection): if isinstance(value, Database.Timestamp): value = value.time() return value def convert_uuidfield_value(self, value, expression, connection): if value is not None: value = uuid.UUID(value) return value @staticmethod def convert_empty_string(value, expression, connection): return '' if value is None else value @staticmethod def convert_empty_bytes(value, expression, connection): return b'' if value is None else value def deferrable_sql(self): return " DEFERRABLE INITIALLY DEFERRED" def fetch_returned_insert_columns(self, cursor, returning_params): columns = [] for param in returning_params: value = param.get_value() if value == []: raise DatabaseError( 'The database did not return a new row id. Probably ' '"ORA-1403: no data found" was raised internally but was ' 'hidden by the Oracle OCI library (see ' 'https://code.djangoproject.com/ticket/28859).' ) columns.append(value[0]) return tuple(columns) def field_cast_sql(self, db_type, internal_type): if db_type and db_type.endswith('LOB') and internal_type != 'JSONField': return "DBMS_LOB.SUBSTR(%s)" else: return "%s" def no_limit_value(self): return None def limit_offset_sql(self, low_mark, high_mark): fetch, offset = self._get_limit_offset_params(low_mark, high_mark) return ' '.join(sql for sql in ( ('OFFSET %d ROWS' % offset) if offset else None, ('FETCH FIRST %d ROWS ONLY' % fetch) if fetch else None, ) if sql) def last_executed_query(self, cursor, sql, params): # https://cx-oracle.readthedocs.io/en/latest/cursor.html#Cursor.statement # The DB API definition does not define this attribute. statement = cursor.statement # Unlike Psycopg's `query` and MySQLdb`'s `_executed`, cx_Oracle's # `statement` doesn't contain the query parameters. Substitute # parameters manually. if isinstance(params, (tuple, list)): for i, param in enumerate(params): statement = statement.replace(':arg%d' % i, force_str(param, errors='replace')) elif isinstance(params, dict): for key, param in params.items(): statement = statement.replace(':%s' % key, force_str(param, errors='replace')) return statement def last_insert_id(self, cursor, table_name, pk_name): sq_name = self._get_sequence_name(cursor, strip_quotes(table_name), pk_name) cursor.execute('"%s".currval' % sq_name) return cursor.fetchone()[0] def lookup_cast(self, lookup_type, internal_type=None): if lookup_type in ('iexact', 'icontains', 'istartswith', 'iendswith'): return "UPPER(%s)" if internal_type == 'JSONField' and lookup_type == 'exact': return 'DBMS_LOB.SUBSTR(%s)' return "%s" def max_in_list_size(self): return 1000 def max_name_length(self): return 30 def pk_default_value(self): return "NULL" def prep_for_iexact_query(self, x): return x def process_clob(self, value): if value is None: return '' return value.read() def quote_name(self, name): # SQL92 requires delimited (quoted) names to be case-sensitive. When # not quoted, Oracle has case-insensitive behavior for identifiers, but # always defaults to uppercase. # We simplify things by making Oracle identifiers always uppercase. if not name.startswith('"') and not name.endswith('"'): name = '"%s"' % truncate_name(name, self.max_name_length()) # Oracle puts the query text into a (query % args) construct, so % signs # in names need to be escaped. The '%%' will be collapsed back to '%' at # that stage so we aren't really making the name longer here. name = name.replace('%', '%%') return name.upper() def regex_lookup(self, lookup_type): if lookup_type == 'regex': match_option = "'c'" else: match_option = "'i'" return 'REGEXP_LIKE(%%s, %%s, %s)' % match_option def return_insert_columns(self, fields): if not fields: return '', () field_names = [] params = [] for field in fields: field_names.append('%s.%s' % ( self.quote_name(field.model._meta.db_table), self.quote_name(field.column), )) params.append(InsertVar(field)) return 'RETURNING %s INTO %s' % ( ', '.join(field_names), ', '.join(['%s'] * len(params)), ), tuple(params) def __foreign_key_constraints(self, table_name, recursive): with self.connection.cursor() as cursor: if recursive: cursor.execute(""" SELECT user_tables.table_name, rcons.constraint_name FROM user_tables JOIN user_constraints cons ON (user_tables.table_name = cons.table_name AND cons.constraint_type = ANY('P', 'U')) LEFT JOIN user_constraints rcons ON (user_tables.table_name = rcons.table_name AND rcons.constraint_type = 'R') START WITH user_tables.table_name = UPPER(%s) CONNECT BY NOCYCLE PRIOR cons.constraint_name = rcons.r_constraint_name GROUP BY user_tables.table_name, rcons.constraint_name HAVING user_tables.table_name != UPPER(%s) ORDER BY MAX(level) DESC """, (table_name, table_name)) else: cursor.execute(""" SELECT cons.table_name, cons.constraint_name FROM user_constraints cons WHERE cons.constraint_type = 'R' AND cons.table_name = UPPER(%s) """, (table_name,)) return cursor.fetchall() @cached_property def _foreign_key_constraints(self): # 512 is large enough to fit the ~330 tables (as of this writing) in # Django's test suite. return lru_cache(maxsize=512)(self.__foreign_key_constraints) def sql_flush(self, style, tables, *, reset_sequences=False, allow_cascade=False): if not tables: return [] truncated_tables = {table.upper() for table in tables} constraints = set() # Oracle's TRUNCATE CASCADE only works with ON DELETE CASCADE foreign # keys which Django doesn't define. Emulate the PostgreSQL behavior # which truncates all dependent tables by manually retrieving all # foreign key constraints and resolving dependencies. for table in tables: for foreign_table, constraint in self._foreign_key_constraints(table, recursive=allow_cascade): if allow_cascade: truncated_tables.add(foreign_table) constraints.add((foreign_table, constraint)) sql = [ '%s %s %s %s %s %s %s %s;' % ( style.SQL_KEYWORD('ALTER'), style.SQL_KEYWORD('TABLE'), style.SQL_FIELD(self.quote_name(table)), style.SQL_KEYWORD('DISABLE'), style.SQL_KEYWORD('CONSTRAINT'), style.SQL_FIELD(self.quote_name(constraint)), style.SQL_KEYWORD('KEEP'), style.SQL_KEYWORD('INDEX'), ) for table, constraint in constraints ] + [ '%s %s %s;' % ( style.SQL_KEYWORD('TRUNCATE'), style.SQL_KEYWORD('TABLE'), style.SQL_FIELD(self.quote_name(table)), ) for table in truncated_tables ] + [ '%s %s %s %s %s %s;' % ( style.SQL_KEYWORD('ALTER'), style.SQL_KEYWORD('TABLE'), style.SQL_FIELD(self.quote_name(table)), style.SQL_KEYWORD('ENABLE'), style.SQL_KEYWORD('CONSTRAINT'), style.SQL_FIELD(self.quote_name(constraint)), ) for table, constraint in constraints ] if reset_sequences: sequences = [ sequence for sequence in self.connection.introspection.sequence_list() if sequence['table'].upper() in truncated_tables ] # Since we've just deleted all the rows, running our sequence ALTER # code will reset the sequence to 0. sql.extend(self.sequence_reset_by_name_sql(style, sequences)) return sql def sequence_reset_by_name_sql(self, style, sequences): sql = [] for sequence_info in sequences: no_autofield_sequence_name = self._get_no_autofield_sequence_name(sequence_info['table']) table = self.quote_name(sequence_info['table']) column = self.quote_name(sequence_info['column'] or 'id') query = self._sequence_reset_sql % { 'no_autofield_sequence_name': no_autofield_sequence_name, 'table': table, 'column': column, 'table_name': strip_quotes(table), 'column_name': strip_quotes(column), } sql.append(query) return sql def sequence_reset_sql(self, style, model_list): output = [] query = self._sequence_reset_sql for model in model_list: for f in model._meta.local_fields: if isinstance(f, AutoField): no_autofield_sequence_name = self._get_no_autofield_sequence_name(model._meta.db_table) table = self.quote_name(model._meta.db_table) column = self.quote_name(f.column) output.append(query % { 'no_autofield_sequence_name': no_autofield_sequence_name, 'table': table, 'column': column, 'table_name': strip_quotes(table), 'column_name': strip_quotes(column), }) # Only one AutoField is allowed per model, so don't # continue to loop break return output def start_transaction_sql(self): return '' def tablespace_sql(self, tablespace, inline=False): if inline: return "USING INDEX TABLESPACE %s" % self.quote_name(tablespace) else: return "TABLESPACE %s" % self.quote_name(tablespace) def adapt_datefield_value(self, value): """ Transform a date value to an object compatible with what is expected by the backend driver for date columns. The default implementation transforms the date to text, but that is not necessary for Oracle. """ return value def adapt_datetimefield_value(self, value): """ Transform a datetime value to an object compatible with what is expected by the backend driver for datetime columns. If naive datetime is passed assumes that is in UTC. Normally Django models.DateTimeField makes sure that if USE_TZ is True passed datetime is timezone aware. """ if value is None: return None # Expression values are adapted by the database. if hasattr(value, 'resolve_expression'): return value # cx_Oracle doesn't support tz-aware datetimes if timezone.is_aware(value): if settings.USE_TZ: value = timezone.make_naive(value, self.connection.timezone) else: raise ValueError("Oracle backend does not support timezone-aware datetimes when USE_TZ is False.") return Oracle_datetime.from_datetime(value) def adapt_timefield_value(self, value): if value is None: return None # Expression values are adapted by the database. if hasattr(value, 'resolve_expression'): return value if isinstance(value, str): return datetime.datetime.strptime(value, '%H:%M:%S') # Oracle doesn't support tz-aware times if timezone.is_aware(value): raise ValueError("Oracle backend does not support timezone-aware times.") return Oracle_datetime(1900, 1, 1, value.hour, value.minute, value.second, value.microsecond) def adapt_decimalfield_value(self, value, max_digits=None, decimal_places=None): return value def combine_expression(self, connector, sub_expressions): lhs, rhs = sub_expressions if connector == '%%': return 'MOD(%s)' % ','.join(sub_expressions) elif connector == '&': return 'BITAND(%s)' % ','.join(sub_expressions) elif connector == '|': return 'BITAND(-%(lhs)s-1,%(rhs)s)+%(lhs)s' % {'lhs': lhs, 'rhs': rhs} elif connector == '<<': return '(%(lhs)s * POWER(2, %(rhs)s))' % {'lhs': lhs, 'rhs': rhs} elif connector == '>>': return 'FLOOR(%(lhs)s / POWER(2, %(rhs)s))' % {'lhs': lhs, 'rhs': rhs} elif connector == '^': return 'POWER(%s)' % ','.join(sub_expressions) elif connector == '#': raise NotSupportedError('Bitwise XOR is not supported in Oracle.') return super().combine_expression(connector, sub_expressions) def _get_no_autofield_sequence_name(self, table): """ Manually created sequence name to keep backward compatibility for AutoFields that aren't Oracle identity columns. """ name_length = self.max_name_length() - 3 return '%s_SQ' % truncate_name(strip_quotes(table), name_length).upper() def _get_sequence_name(self, cursor, table, pk_name): cursor.execute(""" SELECT sequence_name FROM user_tab_identity_cols WHERE table_name = UPPER(%s) AND column_name = UPPER(%s)""", [table, pk_name]) row = cursor.fetchone() return self._get_no_autofield_sequence_name(table) if row is None else row[0] def bulk_insert_sql(self, fields, placeholder_rows): query = [] for row in placeholder_rows: select = [] for i, placeholder in enumerate(row): # A model without any fields has fields=[None]. if fields[i]: internal_type = getattr(fields[i], 'target_field', fields[i]).get_internal_type() placeholder = BulkInsertMapper.types.get(internal_type, '%s') % placeholder # Add columns aliases to the first select to avoid "ORA-00918: # column ambiguously defined" when two or more columns in the # first select have the same value. if not query: placeholder = '%s col_%s' % (placeholder, i) select.append(placeholder) query.append('SELECT %s FROM DUAL' % ', '.join(select)) # Bulk insert to tables with Oracle identity columns causes Oracle to # add sequence.nextval to it. Sequence.nextval cannot be used with the # UNION operator. To prevent incorrect SQL, move UNION to a subquery. return 'SELECT * FROM (%s)' % ' UNION ALL '.join(query) def subtract_temporals(self, internal_type, lhs, rhs): if internal_type == 'DateField': lhs_sql, lhs_params = lhs rhs_sql, rhs_params = rhs params = (*lhs_params, *rhs_params) return "NUMTODSINTERVAL(TO_NUMBER(%s - %s), 'DAY')" % (lhs_sql, rhs_sql), params return super().subtract_temporals(internal_type, lhs, rhs) def bulk_batch_size(self, fields, objs): """Oracle restricts the number of parameters in a query.""" if fields: return self.connection.features.max_query_params // len(fields) return len(objs) def conditional_expression_supported_in_where_clause(self, expression): """ Oracle supports only EXISTS(...) or filters in the WHERE clause, others must be compared with True. """ if isinstance(expression, (Exists, WhereNode)): return True if isinstance(expression, ExpressionWrapper) and expression.conditional: return self.conditional_expression_supported_in_where_clause(expression.expression) if isinstance(expression, RawSQL) and expression.conditional: return True return False
a674796a46ed147ac31f44e32ac344ebcc34e7c924d1befdaa15556df24a2429
import os from django.core.exceptions import SuspiciousFileOperation def validate_file_name(name): if name != os.path.basename(name): raise SuspiciousFileOperation("File name '%s' includes path elements" % name) # Remove potentially dangerous names if name in {'', '.', '..'}: raise SuspiciousFileOperation("Could not derive file name from '%s'" % name) return name class FileProxyMixin: """ A mixin class used to forward file methods to an underlaying file object. The internal file object has to be called "file":: class FileProxy(FileProxyMixin): def __init__(self, file): self.file = file """ encoding = property(lambda self: self.file.encoding) fileno = property(lambda self: self.file.fileno) flush = property(lambda self: self.file.flush) isatty = property(lambda self: self.file.isatty) newlines = property(lambda self: self.file.newlines) read = property(lambda self: self.file.read) readinto = property(lambda self: self.file.readinto) readline = property(lambda self: self.file.readline) readlines = property(lambda self: self.file.readlines) seek = property(lambda self: self.file.seek) tell = property(lambda self: self.file.tell) truncate = property(lambda self: self.file.truncate) write = property(lambda self: self.file.write) writelines = property(lambda self: self.file.writelines) @property def closed(self): return not self.file or self.file.closed def readable(self): if self.closed: return False if hasattr(self.file, 'readable'): return self.file.readable() return True def writable(self): if self.closed: return False if hasattr(self.file, 'writable'): return self.file.writable() return 'w' in getattr(self.file, 'mode', '') def seekable(self): if self.closed: return False if hasattr(self.file, 'seekable'): return self.file.seekable() return True def __iter__(self): return iter(self.file)
abde1bd0b160dfae6b0522b56825cc448875955685f97605a8b11fe9f31b4745
import os import pathlib from datetime import datetime from urllib.parse import urljoin from django.conf import settings from django.core.exceptions import SuspiciousFileOperation from django.core.files import File, locks from django.core.files.move import file_move_safe from django.core.files.utils import validate_file_name from django.core.signals import setting_changed from django.utils import timezone from django.utils._os import safe_join from django.utils.crypto import get_random_string from django.utils.deconstruct import deconstructible from django.utils.encoding import filepath_to_uri from django.utils.functional import LazyObject, cached_property from django.utils.module_loading import import_string from django.utils.text import get_valid_filename __all__ = ( 'Storage', 'FileSystemStorage', 'DefaultStorage', 'default_storage', 'get_storage_class', ) class Storage: """ A base storage class, providing some default behaviors that all other storage systems can inherit or override, as necessary. """ # The following methods represent a public interface to private methods. # These shouldn't be overridden by subclasses unless absolutely necessary. def open(self, name, mode='rb'): """Retrieve the specified file from storage.""" return self._open(name, mode) def save(self, name, content, max_length=None): """ Save new content to the file specified by name. The content should be a proper File object or any Python file-like object, ready to be read from the beginning. """ # Get the proper name for the file, as it will actually be saved. if name is None: name = content.name if not hasattr(content, 'chunks'): content = File(content, name) name = self.get_available_name(name, max_length=max_length) return self._save(name, content) # These methods are part of the public API, with default implementations. def get_valid_name(self, name): """ Return a filename, based on the provided filename, that's suitable for use in the target storage system. """ return get_valid_filename(name) def get_alternative_name(self, file_root, file_ext): """ Return an alternative filename, by adding an underscore and a random 7 character alphanumeric string (before the file extension, if one exists) to the filename. """ return '%s_%s%s' % (file_root, get_random_string(7), file_ext) def get_available_name(self, name, max_length=None): """ Return a filename that's free on the target storage system and available for new content to be written to. """ dir_name, file_name = os.path.split(name) if '..' in pathlib.PurePath(dir_name).parts: raise SuspiciousFileOperation("Detected path traversal attempt in '%s'" % dir_name) validate_file_name(file_name) file_root, file_ext = os.path.splitext(file_name) # If the filename already exists, generate an alternative filename # until it doesn't exist. # Truncate original name if required, so the new filename does not # exceed the max_length. while self.exists(name) or (max_length and len(name) > max_length): # file_ext includes the dot. name = os.path.join(dir_name, self.get_alternative_name(file_root, file_ext)) if max_length is None: continue # Truncate file_root if max_length exceeded. truncation = len(name) - max_length if truncation > 0: file_root = file_root[:-truncation] # Entire file_root was truncated in attempt to find an available filename. if not file_root: raise SuspiciousFileOperation( 'Storage can not find an available filename for "%s". ' 'Please make sure that the corresponding file field ' 'allows sufficient "max_length".' % name ) name = os.path.join(dir_name, self.get_alternative_name(file_root, file_ext)) return name def generate_filename(self, filename): """ Validate the filename by calling get_valid_name() and return a filename to be passed to the save() method. """ # `filename` may include a path as returned by FileField.upload_to. dirname, filename = os.path.split(filename) if '..' in pathlib.PurePath(dirname).parts: raise SuspiciousFileOperation("Detected path traversal attempt in '%s'" % dirname) return os.path.normpath(os.path.join(dirname, self.get_valid_name(filename))) def path(self, name): """ Return a local filesystem path where the file can be retrieved using Python's built-in open() function. Storage systems that can't be accessed using open() should *not* implement this method. """ raise NotImplementedError("This backend doesn't support absolute paths.") # The following methods form the public API for storage systems, but with # no default implementations. Subclasses must implement *all* of these. def delete(self, name): """ Delete the specified file from the storage system. """ raise NotImplementedError('subclasses of Storage must provide a delete() method') def exists(self, name): """ Return True if a file referenced by the given name already exists in the storage system, or False if the name is available for a new file. """ raise NotImplementedError('subclasses of Storage must provide an exists() method') def listdir(self, path): """ List the contents of the specified path. Return a 2-tuple of lists: the first item being directories, the second item being files. """ raise NotImplementedError('subclasses of Storage must provide a listdir() method') def size(self, name): """ Return the total size, in bytes, of the file specified by name. """ raise NotImplementedError('subclasses of Storage must provide a size() method') def url(self, name): """ Return an absolute URL where the file's contents can be accessed directly by a Web browser. """ raise NotImplementedError('subclasses of Storage must provide a url() method') def get_accessed_time(self, name): """ Return the last accessed time (as a datetime) of the file specified by name. The datetime will be timezone-aware if USE_TZ=True. """ raise NotImplementedError('subclasses of Storage must provide a get_accessed_time() method') def get_created_time(self, name): """ Return the creation time (as a datetime) of the file specified by name. The datetime will be timezone-aware if USE_TZ=True. """ raise NotImplementedError('subclasses of Storage must provide a get_created_time() method') def get_modified_time(self, name): """ Return the last modified time (as a datetime) of the file specified by name. The datetime will be timezone-aware if USE_TZ=True. """ raise NotImplementedError('subclasses of Storage must provide a get_modified_time() method') @deconstructible class FileSystemStorage(Storage): """ Standard filesystem storage """ # The combination of O_CREAT and O_EXCL makes os.open() raise OSError if # the file already exists before it's opened. OS_OPEN_FLAGS = os.O_WRONLY | os.O_CREAT | os.O_EXCL | getattr(os, 'O_BINARY', 0) def __init__(self, location=None, base_url=None, file_permissions_mode=None, directory_permissions_mode=None): self._location = location self._base_url = base_url self._file_permissions_mode = file_permissions_mode self._directory_permissions_mode = directory_permissions_mode setting_changed.connect(self._clear_cached_properties) def _clear_cached_properties(self, setting, **kwargs): """Reset setting based property values.""" if setting == 'MEDIA_ROOT': self.__dict__.pop('base_location', None) self.__dict__.pop('location', None) elif setting == 'MEDIA_URL': self.__dict__.pop('base_url', None) elif setting == 'FILE_UPLOAD_PERMISSIONS': self.__dict__.pop('file_permissions_mode', None) elif setting == 'FILE_UPLOAD_DIRECTORY_PERMISSIONS': self.__dict__.pop('directory_permissions_mode', None) def _value_or_setting(self, value, setting): return setting if value is None else value @cached_property def base_location(self): return self._value_or_setting(self._location, settings.MEDIA_ROOT) @cached_property def location(self): return os.path.abspath(self.base_location) @cached_property def base_url(self): if self._base_url is not None and not self._base_url.endswith('/'): self._base_url += '/' return self._value_or_setting(self._base_url, settings.MEDIA_URL) @cached_property def file_permissions_mode(self): return self._value_or_setting(self._file_permissions_mode, settings.FILE_UPLOAD_PERMISSIONS) @cached_property def directory_permissions_mode(self): return self._value_or_setting(self._directory_permissions_mode, settings.FILE_UPLOAD_DIRECTORY_PERMISSIONS) def _open(self, name, mode='rb'): return File(open(self.path(name), mode)) def _save(self, name, content): full_path = self.path(name) # Create any intermediate directories that do not exist. directory = os.path.dirname(full_path) try: if self.directory_permissions_mode is not None: # Set the umask because os.makedirs() doesn't apply the "mode" # argument to intermediate-level directories. old_umask = os.umask(0o777 & ~self.directory_permissions_mode) try: os.makedirs(directory, self.directory_permissions_mode, exist_ok=True) finally: os.umask(old_umask) else: os.makedirs(directory, exist_ok=True) except FileExistsError: raise FileExistsError('%s exists and is not a directory.' % directory) # There's a potential race condition between get_available_name and # saving the file; it's possible that two threads might return the # same name, at which point all sorts of fun happens. So we need to # try to create the file, but if it already exists we have to go back # to get_available_name() and try again. while True: try: # This file has a file path that we can move. if hasattr(content, 'temporary_file_path'): file_move_safe(content.temporary_file_path(), full_path) # This is a normal uploadedfile that we can stream. else: # The current umask value is masked out by os.open! fd = os.open(full_path, self.OS_OPEN_FLAGS, 0o666) _file = None try: locks.lock(fd, locks.LOCK_EX) for chunk in content.chunks(): if _file is None: mode = 'wb' if isinstance(chunk, bytes) else 'wt' _file = os.fdopen(fd, mode) _file.write(chunk) finally: locks.unlock(fd) if _file is not None: _file.close() else: os.close(fd) except FileExistsError: # A new name is needed if the file exists. name = self.get_available_name(name) full_path = self.path(name) else: # OK, the file save worked. Break out of the loop. break if self.file_permissions_mode is not None: os.chmod(full_path, self.file_permissions_mode) # Store filenames with forward slashes, even on Windows. return str(name).replace('\\', '/') def delete(self, name): if not name: raise ValueError('The name must be given to delete().') name = self.path(name) # If the file or directory exists, delete it from the filesystem. try: if os.path.isdir(name): os.rmdir(name) else: os.remove(name) except FileNotFoundError: # FileNotFoundError is raised if the file or directory was removed # concurrently. pass def exists(self, name): return os.path.exists(self.path(name)) def listdir(self, path): path = self.path(path) directories, files = [], [] for entry in os.scandir(path): if entry.is_dir(): directories.append(entry.name) else: files.append(entry.name) return directories, files def path(self, name): return safe_join(self.location, name) def size(self, name): return os.path.getsize(self.path(name)) def url(self, name): if self.base_url is None: raise ValueError("This file is not accessible via a URL.") url = filepath_to_uri(name) if url is not None: url = url.lstrip('/') return urljoin(self.base_url, url) def _datetime_from_timestamp(self, ts): """ If timezone support is enabled, make an aware datetime object in UTC; otherwise make a naive one in the local timezone. """ if settings.USE_TZ: # Safe to use .replace() because UTC doesn't have DST return datetime.utcfromtimestamp(ts).replace(tzinfo=timezone.utc) else: return datetime.fromtimestamp(ts) def get_accessed_time(self, name): return self._datetime_from_timestamp(os.path.getatime(self.path(name))) def get_created_time(self, name): return self._datetime_from_timestamp(os.path.getctime(self.path(name))) def get_modified_time(self, name): return self._datetime_from_timestamp(os.path.getmtime(self.path(name))) def get_storage_class(import_path=None): return import_string(import_path or settings.DEFAULT_FILE_STORAGE) class DefaultStorage(LazyObject): def _setup(self): self._wrapped = get_storage_class()() default_storage = DefaultStorage()
28e3c3fcfc5010795551aa6733ab39b9c956e3c3d0230da129f81402882a8b28
"Database cache backend." import base64 import pickle from datetime import datetime from django.conf import settings from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache from django.db import DatabaseError, connections, models, router, transaction from django.utils import timezone class Options: """A class that will quack like a Django model _meta class. This allows cache operations to be controlled by the router """ def __init__(self, table): self.db_table = table self.app_label = 'django_cache' self.model_name = 'cacheentry' self.verbose_name = 'cache entry' self.verbose_name_plural = 'cache entries' self.object_name = 'CacheEntry' self.abstract = False self.managed = True self.proxy = False self.swapped = False class BaseDatabaseCache(BaseCache): def __init__(self, table, params): super().__init__(params) self._table = table class CacheEntry: _meta = Options(table) self.cache_model_class = CacheEntry class DatabaseCache(BaseDatabaseCache): # This class uses cursors provided by the database connection. This means # it reads expiration values as aware or naive datetimes, depending on the # value of USE_TZ and whether the database supports time zones. The ORM's # conversion and adaptation infrastructure is then used to avoid comparing # aware and naive datetimes accidentally. pickle_protocol = pickle.HIGHEST_PROTOCOL def get(self, key, default=None, version=None): return self.get_many([key], version).get(key, default) def get_many(self, keys, version=None): if not keys: return {} key_map = {} for key in keys: self.validate_key(key) key_map[self.make_key(key, version)] = key db = router.db_for_read(self.cache_model_class) connection = connections[db] quote_name = connection.ops.quote_name table = quote_name(self._table) with connection.cursor() as cursor: cursor.execute( 'SELECT %s, %s, %s FROM %s WHERE %s IN (%s)' % ( quote_name('cache_key'), quote_name('value'), quote_name('expires'), table, quote_name('cache_key'), ', '.join(['%s'] * len(key_map)), ), list(key_map), ) rows = cursor.fetchall() result = {} expired_keys = [] expression = models.Expression(output_field=models.DateTimeField()) converters = (connection.ops.get_db_converters(expression) + expression.get_db_converters(connection)) for key, value, expires in rows: for converter in converters: expires = converter(expires, expression, connection) if expires < timezone.now(): expired_keys.append(key) else: value = connection.ops.process_clob(value) value = pickle.loads(base64.b64decode(value.encode())) result[key_map.get(key)] = value self._base_delete_many(expired_keys) return result def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): key = self.make_key(key, version=version) self.validate_key(key) self._base_set('set', key, value, timeout) def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): key = self.make_key(key, version=version) self.validate_key(key) return self._base_set('add', key, value, timeout) def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None): key = self.make_key(key, version=version) self.validate_key(key) return self._base_set('touch', key, None, timeout) def _base_set(self, mode, key, value, timeout=DEFAULT_TIMEOUT): timeout = self.get_backend_timeout(timeout) db = router.db_for_write(self.cache_model_class) connection = connections[db] quote_name = connection.ops.quote_name table = quote_name(self._table) with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM %s" % table) num = cursor.fetchone()[0] now = timezone.now() now = now.replace(microsecond=0) if timeout is None: exp = datetime.max elif settings.USE_TZ: exp = datetime.utcfromtimestamp(timeout) else: exp = datetime.fromtimestamp(timeout) exp = exp.replace(microsecond=0) if num > self._max_entries: self._cull(db, cursor, now) pickled = pickle.dumps(value, self.pickle_protocol) # The DB column is expecting a string, so make sure the value is a # string, not bytes. Refs #19274. b64encoded = base64.b64encode(pickled).decode('latin1') try: # Note: typecasting for datetimes is needed by some 3rd party # database backends. All core backends work without typecasting, # so be careful about changes here - test suite will NOT pick # regressions. with transaction.atomic(using=db): cursor.execute( 'SELECT %s, %s FROM %s WHERE %s = %%s' % ( quote_name('cache_key'), quote_name('expires'), table, quote_name('cache_key'), ), [key] ) result = cursor.fetchone() if result: current_expires = result[1] expression = models.Expression(output_field=models.DateTimeField()) for converter in (connection.ops.get_db_converters(expression) + expression.get_db_converters(connection)): current_expires = converter(current_expires, expression, connection) exp = connection.ops.adapt_datetimefield_value(exp) if result and mode == 'touch': cursor.execute( 'UPDATE %s SET %s = %%s WHERE %s = %%s' % ( table, quote_name('expires'), quote_name('cache_key') ), [exp, key] ) elif result and (mode == 'set' or (mode == 'add' and current_expires < now)): cursor.execute( 'UPDATE %s SET %s = %%s, %s = %%s WHERE %s = %%s' % ( table, quote_name('value'), quote_name('expires'), quote_name('cache_key'), ), [b64encoded, exp, key] ) elif mode != 'touch': cursor.execute( 'INSERT INTO %s (%s, %s, %s) VALUES (%%s, %%s, %%s)' % ( table, quote_name('cache_key'), quote_name('value'), quote_name('expires'), ), [key, b64encoded, exp] ) else: return False # touch failed. except DatabaseError: # To be threadsafe, updates/inserts are allowed to fail silently return False else: return True def delete(self, key, version=None): self.validate_key(key) return self._base_delete_many([self.make_key(key, version)]) def delete_many(self, keys, version=None): key_list = [] for key in keys: self.validate_key(key) key_list.append(self.make_key(key, version)) self._base_delete_many(key_list) def _base_delete_many(self, keys): if not keys: return False db = router.db_for_write(self.cache_model_class) connection = connections[db] quote_name = connection.ops.quote_name table = quote_name(self._table) with connection.cursor() as cursor: cursor.execute( 'DELETE FROM %s WHERE %s IN (%s)' % ( table, quote_name('cache_key'), ', '.join(['%s'] * len(keys)), ), keys, ) return bool(cursor.rowcount) def has_key(self, key, version=None): key = self.make_key(key, version=version) self.validate_key(key) db = router.db_for_read(self.cache_model_class) connection = connections[db] quote_name = connection.ops.quote_name if settings.USE_TZ: now = datetime.utcnow() else: now = datetime.now() now = now.replace(microsecond=0) with connection.cursor() as cursor: cursor.execute( 'SELECT %s FROM %s WHERE %s = %%s and expires > %%s' % ( quote_name('cache_key'), quote_name(self._table), quote_name('cache_key'), ), [key, connection.ops.adapt_datetimefield_value(now)] ) return cursor.fetchone() is not None def _cull(self, db, cursor, now): if self._cull_frequency == 0: self.clear() else: connection = connections[db] table = connection.ops.quote_name(self._table) cursor.execute("DELETE FROM %s WHERE expires < %%s" % table, [connection.ops.adapt_datetimefield_value(now)]) cursor.execute("SELECT COUNT(*) FROM %s" % table) num = cursor.fetchone()[0] if num > self._max_entries: cull_num = num // self._cull_frequency cursor.execute( connection.ops.cache_key_culling_sql() % table, [cull_num]) last_cache_key = cursor.fetchone() if last_cache_key: cursor.execute( 'DELETE FROM %s WHERE cache_key < %%s' % table, [last_cache_key[0]], ) def clear(self): db = router.db_for_write(self.cache_model_class) connection = connections[db] table = connection.ops.quote_name(self._table) with connection.cursor() as cursor: cursor.execute('DELETE FROM %s' % table)
cce09f3c929b5a340519eeac120ce9f58ae0177e88623876cc2e86507d94c720
import copy import json import operator import re from functools import partial, reduce, update_wrapper from urllib.parse import quote as urlquote from django import forms from django.conf import settings from django.contrib import messages from django.contrib.admin import helpers, widgets from django.contrib.admin.checks import ( BaseModelAdminChecks, InlineModelAdminChecks, ModelAdminChecks, ) from django.contrib.admin.decorators import display from django.contrib.admin.exceptions import DisallowedModelAdminToField from django.contrib.admin.templatetags.admin_urls import add_preserved_filters from django.contrib.admin.utils import ( NestedObjects, construct_change_message, flatten_fieldsets, get_deleted_objects, lookup_spawns_duplicates, model_format_dict, model_ngettext, quote, unquote, ) from django.contrib.admin.widgets import ( AutocompleteSelect, AutocompleteSelectMultiple, ) from django.contrib.auth import get_permission_codename from django.core.exceptions import ( FieldDoesNotExist, FieldError, PermissionDenied, ValidationError, ) from django.core.paginator import Paginator from django.db import models, router, transaction from django.db.models.constants import LOOKUP_SEP from django.forms.formsets import DELETION_FIELD_NAME, all_valid from django.forms.models import ( BaseInlineFormSet, inlineformset_factory, modelform_defines_fields, modelform_factory, modelformset_factory, ) from django.forms.widgets import CheckboxSelectMultiple, SelectMultiple from django.http import HttpResponseRedirect from django.http.response import HttpResponseBase from django.template.response import SimpleTemplateResponse, TemplateResponse from django.urls import reverse from django.utils.decorators import method_decorator from django.utils.html import format_html from django.utils.http import urlencode from django.utils.safestring import mark_safe from django.utils.text import ( capfirst, format_lazy, get_text_list, smart_split, unescape_string_literal, ) from django.utils.translation import gettext as _, ngettext from django.views.decorators.csrf import csrf_protect from django.views.generic import RedirectView IS_POPUP_VAR = '_popup' TO_FIELD_VAR = '_to_field' HORIZONTAL, VERTICAL = 1, 2 def get_content_type_for_model(obj): # Since this module gets imported in the application's root package, # it cannot import models from other applications at the module level. from django.contrib.contenttypes.models import ContentType return ContentType.objects.get_for_model(obj, for_concrete_model=False) def get_ul_class(radio_style): return 'radiolist' if radio_style == VERTICAL else 'radiolist inline' class IncorrectLookupParameters(Exception): pass # Defaults for formfield_overrides. ModelAdmin subclasses can change this # by adding to ModelAdmin.formfield_overrides. FORMFIELD_FOR_DBFIELD_DEFAULTS = { models.DateTimeField: { 'form_class': forms.SplitDateTimeField, 'widget': widgets.AdminSplitDateTime }, models.DateField: {'widget': widgets.AdminDateWidget}, models.TimeField: {'widget': widgets.AdminTimeWidget}, models.TextField: {'widget': widgets.AdminTextareaWidget}, models.URLField: {'widget': widgets.AdminURLFieldWidget}, models.IntegerField: {'widget': widgets.AdminIntegerFieldWidget}, models.BigIntegerField: {'widget': widgets.AdminBigIntegerFieldWidget}, models.CharField: {'widget': widgets.AdminTextInputWidget}, models.ImageField: {'widget': widgets.AdminFileWidget}, models.FileField: {'widget': widgets.AdminFileWidget}, models.EmailField: {'widget': widgets.AdminEmailInputWidget}, models.UUIDField: {'widget': widgets.AdminUUIDInputWidget}, } csrf_protect_m = method_decorator(csrf_protect) class BaseModelAdmin(metaclass=forms.MediaDefiningClass): """Functionality common to both ModelAdmin and InlineAdmin.""" autocomplete_fields = () raw_id_fields = () fields = None exclude = None fieldsets = None form = forms.ModelForm filter_vertical = () filter_horizontal = () radio_fields = {} prepopulated_fields = {} formfield_overrides = {} readonly_fields = () ordering = None sortable_by = None view_on_site = True show_full_result_count = True checks_class = BaseModelAdminChecks def check(self, **kwargs): return self.checks_class().check(self, **kwargs) def __init__(self): # Merge FORMFIELD_FOR_DBFIELD_DEFAULTS with the formfield_overrides # rather than simply overwriting. overrides = copy.deepcopy(FORMFIELD_FOR_DBFIELD_DEFAULTS) for k, v in self.formfield_overrides.items(): overrides.setdefault(k, {}).update(v) self.formfield_overrides = overrides def formfield_for_dbfield(self, db_field, request, **kwargs): """ Hook for specifying the form Field instance for a given database Field instance. If kwargs are given, they're passed to the form Field's constructor. """ # If the field specifies choices, we don't need to look for special # admin widgets - we just need to use a select widget of some kind. if db_field.choices: return self.formfield_for_choice_field(db_field, request, **kwargs) # ForeignKey or ManyToManyFields if isinstance(db_field, (models.ForeignKey, models.ManyToManyField)): # Combine the field kwargs with any options for formfield_overrides. # Make sure the passed in **kwargs override anything in # formfield_overrides because **kwargs is more specific, and should # always win. if db_field.__class__ in self.formfield_overrides: kwargs = {**self.formfield_overrides[db_field.__class__], **kwargs} # Get the correct formfield. if isinstance(db_field, models.ForeignKey): formfield = self.formfield_for_foreignkey(db_field, request, **kwargs) elif isinstance(db_field, models.ManyToManyField): formfield = self.formfield_for_manytomany(db_field, request, **kwargs) # For non-raw_id fields, wrap the widget with a wrapper that adds # extra HTML -- the "add other" interface -- to the end of the # rendered output. formfield can be None if it came from a # OneToOneField with parent_link=True or a M2M intermediary. if formfield and db_field.name not in self.raw_id_fields: related_modeladmin = self.admin_site._registry.get(db_field.remote_field.model) wrapper_kwargs = {} if related_modeladmin: wrapper_kwargs.update( can_add_related=related_modeladmin.has_add_permission(request), can_change_related=related_modeladmin.has_change_permission(request), can_delete_related=related_modeladmin.has_delete_permission(request), can_view_related=related_modeladmin.has_view_permission(request), ) formfield.widget = widgets.RelatedFieldWidgetWrapper( formfield.widget, db_field.remote_field, self.admin_site, **wrapper_kwargs ) return formfield # If we've got overrides for the formfield defined, use 'em. **kwargs # passed to formfield_for_dbfield override the defaults. for klass in db_field.__class__.mro(): if klass in self.formfield_overrides: kwargs = {**copy.deepcopy(self.formfield_overrides[klass]), **kwargs} return db_field.formfield(**kwargs) # For any other type of field, just call its formfield() method. return db_field.formfield(**kwargs) def formfield_for_choice_field(self, db_field, request, **kwargs): """ Get a form Field for a database Field that has declared choices. """ # If the field is named as a radio_field, use a RadioSelect if db_field.name in self.radio_fields: # Avoid stomping on custom widget/choices arguments. if 'widget' not in kwargs: kwargs['widget'] = widgets.AdminRadioSelect(attrs={ 'class': get_ul_class(self.radio_fields[db_field.name]), }) if 'choices' not in kwargs: kwargs['choices'] = db_field.get_choices( include_blank=db_field.blank, blank_choice=[('', _('None'))] ) return db_field.formfield(**kwargs) def get_field_queryset(self, db, db_field, request): """ If the ModelAdmin specifies ordering, the queryset should respect that ordering. Otherwise don't specify the queryset, let the field decide (return None in that case). """ related_admin = self.admin_site._registry.get(db_field.remote_field.model) if related_admin is not None: ordering = related_admin.get_ordering(request) if ordering is not None and ordering != (): return db_field.remote_field.model._default_manager.using(db).order_by(*ordering) return None def formfield_for_foreignkey(self, db_field, request, **kwargs): """ Get a form Field for a ForeignKey. """ db = kwargs.get('using') if 'widget' not in kwargs: if db_field.name in self.get_autocomplete_fields(request): kwargs['widget'] = AutocompleteSelect(db_field, self.admin_site, using=db) elif db_field.name in self.raw_id_fields: kwargs['widget'] = widgets.ForeignKeyRawIdWidget(db_field.remote_field, self.admin_site, using=db) elif db_field.name in self.radio_fields: kwargs['widget'] = widgets.AdminRadioSelect(attrs={ 'class': get_ul_class(self.radio_fields[db_field.name]), }) kwargs['empty_label'] = _('None') if db_field.blank else None if 'queryset' not in kwargs: queryset = self.get_field_queryset(db, db_field, request) if queryset is not None: kwargs['queryset'] = queryset return db_field.formfield(**kwargs) def formfield_for_manytomany(self, db_field, request, **kwargs): """ Get a form Field for a ManyToManyField. """ # If it uses an intermediary model that isn't auto created, don't show # a field in admin. if not db_field.remote_field.through._meta.auto_created: return None db = kwargs.get('using') if 'widget' not in kwargs: autocomplete_fields = self.get_autocomplete_fields(request) if db_field.name in autocomplete_fields: kwargs['widget'] = AutocompleteSelectMultiple( db_field, self.admin_site, using=db, ) elif db_field.name in self.raw_id_fields: kwargs['widget'] = widgets.ManyToManyRawIdWidget( db_field.remote_field, self.admin_site, using=db, ) elif db_field.name in [*self.filter_vertical, *self.filter_horizontal]: kwargs['widget'] = widgets.FilteredSelectMultiple( db_field.verbose_name, db_field.name in self.filter_vertical ) if 'queryset' not in kwargs: queryset = self.get_field_queryset(db, db_field, request) if queryset is not None: kwargs['queryset'] = queryset form_field = db_field.formfield(**kwargs) if (isinstance(form_field.widget, SelectMultiple) and not isinstance(form_field.widget, (CheckboxSelectMultiple, AutocompleteSelectMultiple))): msg = _('Hold down “Control”, or “Command” on a Mac, to select more than one.') help_text = form_field.help_text form_field.help_text = format_lazy('{} {}', help_text, msg) if help_text else msg return form_field def get_autocomplete_fields(self, request): """ Return a list of ForeignKey and/or ManyToMany fields which should use an autocomplete widget. """ return self.autocomplete_fields def get_view_on_site_url(self, obj=None): if obj is None or not self.view_on_site: return None if callable(self.view_on_site): return self.view_on_site(obj) elif hasattr(obj, 'get_absolute_url'): # use the ContentType lookup if view_on_site is True return reverse('admin:view_on_site', kwargs={ 'content_type_id': get_content_type_for_model(obj).pk, 'object_id': obj.pk }) def get_empty_value_display(self): """ Return the empty_value_display set on ModelAdmin or AdminSite. """ try: return mark_safe(self.empty_value_display) except AttributeError: return mark_safe(self.admin_site.empty_value_display) def get_exclude(self, request, obj=None): """ Hook for specifying exclude. """ return self.exclude def get_fields(self, request, obj=None): """ Hook for specifying fields. """ if self.fields: return self.fields # _get_form_for_get_fields() is implemented in subclasses. form = self._get_form_for_get_fields(request, obj) return [*form.base_fields, *self.get_readonly_fields(request, obj)] def get_fieldsets(self, request, obj=None): """ Hook for specifying fieldsets. """ if self.fieldsets: return self.fieldsets return [(None, {'fields': self.get_fields(request, obj)})] def get_inlines(self, request, obj): """Hook for specifying custom inlines.""" return self.inlines def get_ordering(self, request): """ Hook for specifying field ordering. """ return self.ordering or () # otherwise we might try to *None, which is bad ;) def get_readonly_fields(self, request, obj=None): """ Hook for specifying custom readonly fields. """ return self.readonly_fields def get_prepopulated_fields(self, request, obj=None): """ Hook for specifying custom prepopulated fields. """ return self.prepopulated_fields def get_queryset(self, request): """ Return a QuerySet of all model instances that can be edited by the admin site. This is used by changelist_view. """ qs = self.model._default_manager.get_queryset() # TODO: this should be handled by some parameter to the ChangeList. ordering = self.get_ordering(request) if ordering: qs = qs.order_by(*ordering) return qs def get_sortable_by(self, request): """Hook for specifying which fields can be sorted in the changelist.""" return self.sortable_by if self.sortable_by is not None else self.get_list_display(request) def lookup_allowed(self, lookup, value): from django.contrib.admin.filters import SimpleListFilter model = self.model # Check FKey lookups that are allowed, so that popups produced by # ForeignKeyRawIdWidget, on the basis of ForeignKey.limit_choices_to, # are allowed to work. for fk_lookup in model._meta.related_fkey_lookups: # As ``limit_choices_to`` can be a callable, invoke it here. if callable(fk_lookup): fk_lookup = fk_lookup() if (lookup, value) in widgets.url_params_from_lookup_dict(fk_lookup).items(): return True relation_parts = [] prev_field = None for part in lookup.split(LOOKUP_SEP): try: field = model._meta.get_field(part) except FieldDoesNotExist: # Lookups on nonexistent fields are ok, since they're ignored # later. break # It is allowed to filter on values that would be found from local # model anyways. For example, if you filter on employee__department__id, # then the id value would be found already from employee__department_id. if not prev_field or (prev_field.is_relation and field not in prev_field.get_path_info()[-1].target_fields): relation_parts.append(part) if not getattr(field, 'get_path_info', None): # This is not a relational field, so further parts # must be transforms. break prev_field = field model = field.get_path_info()[-1].to_opts.model if len(relation_parts) <= 1: # Either a local field filter, or no fields at all. return True valid_lookups = {self.date_hierarchy} for filter_item in self.list_filter: if isinstance(filter_item, type) and issubclass(filter_item, SimpleListFilter): valid_lookups.add(filter_item.parameter_name) elif isinstance(filter_item, (list, tuple)): valid_lookups.add(filter_item[0]) else: valid_lookups.add(filter_item) # Is it a valid relational lookup? return not { LOOKUP_SEP.join(relation_parts), LOOKUP_SEP.join(relation_parts + [part]) }.isdisjoint(valid_lookups) def to_field_allowed(self, request, to_field): """ Return True if the model associated with this admin should be allowed to be referenced by the specified field. """ opts = self.model._meta try: field = opts.get_field(to_field) except FieldDoesNotExist: return False # Always allow referencing the primary key since it's already possible # to get this information from the change view URL. if field.primary_key: return True # Allow reverse relationships to models defining m2m fields if they # target the specified field. for many_to_many in opts.many_to_many: if many_to_many.m2m_target_field_name() == to_field: return True # Make sure at least one of the models registered for this site # references this field through a FK or a M2M relationship. registered_models = set() for model, admin in self.admin_site._registry.items(): registered_models.add(model) for inline in admin.inlines: registered_models.add(inline.model) related_objects = ( f for f in opts.get_fields(include_hidden=True) if (f.auto_created and not f.concrete) ) for related_object in related_objects: related_model = related_object.related_model remote_field = related_object.field.remote_field if (any(issubclass(model, related_model) for model in registered_models) and hasattr(remote_field, 'get_related_field') and remote_field.get_related_field() == field): return True return False def has_add_permission(self, request): """ Return True if the given request has permission to add an object. Can be overridden by the user in subclasses. """ opts = self.opts codename = get_permission_codename('add', opts) return request.user.has_perm("%s.%s" % (opts.app_label, codename)) def has_change_permission(self, request, obj=None): """ Return True if the given request has permission to change the given Django model instance, the default implementation doesn't examine the `obj` parameter. Can be overridden by the user in subclasses. In such case it should return True if the given request has permission to change the `obj` model instance. If `obj` is None, this should return True if the given request has permission to change *any* object of the given type. """ opts = self.opts codename = get_permission_codename('change', opts) return request.user.has_perm("%s.%s" % (opts.app_label, codename)) def has_delete_permission(self, request, obj=None): """ Return True if the given request has permission to change the given Django model instance, the default implementation doesn't examine the `obj` parameter. Can be overridden by the user in subclasses. In such case it should return True if the given request has permission to delete the `obj` model instance. If `obj` is None, this should return True if the given request has permission to delete *any* object of the given type. """ opts = self.opts codename = get_permission_codename('delete', opts) return request.user.has_perm("%s.%s" % (opts.app_label, codename)) def has_view_permission(self, request, obj=None): """ Return True if the given request has permission to view the given Django model instance. The default implementation doesn't examine the `obj` parameter. If overridden by the user in subclasses, it should return True if the given request has permission to view the `obj` model instance. If `obj` is None, it should return True if the request has permission to view any object of the given type. """ opts = self.opts codename_view = get_permission_codename('view', opts) codename_change = get_permission_codename('change', opts) return ( request.user.has_perm('%s.%s' % (opts.app_label, codename_view)) or request.user.has_perm('%s.%s' % (opts.app_label, codename_change)) ) def has_view_or_change_permission(self, request, obj=None): return self.has_view_permission(request, obj) or self.has_change_permission(request, obj) def has_module_permission(self, request): """ Return True if the given request has any permission in the given app label. Can be overridden by the user in subclasses. In such case it should return True if the given request has permission to view the module on the admin index page and access the module's index page. Overriding it does not restrict access to the add, change or delete views. Use `ModelAdmin.has_(add|change|delete)_permission` for that. """ return request.user.has_module_perms(self.opts.app_label) class ModelAdmin(BaseModelAdmin): """Encapsulate all admin options and functionality for a given model.""" list_display = ('__str__',) list_display_links = () list_filter = () list_select_related = False list_per_page = 100 list_max_show_all = 200 list_editable = () search_fields = () date_hierarchy = None save_as = False save_as_continue = True save_on_top = False paginator = Paginator preserve_filters = True inlines = [] # Custom templates (designed to be over-ridden in subclasses) add_form_template = None change_form_template = None change_list_template = None delete_confirmation_template = None delete_selected_confirmation_template = None object_history_template = None popup_response_template = None # Actions actions = [] action_form = helpers.ActionForm actions_on_top = True actions_on_bottom = False actions_selection_counter = True checks_class = ModelAdminChecks def __init__(self, model, admin_site): self.model = model self.opts = model._meta self.admin_site = admin_site super().__init__() def __str__(self): return "%s.%s" % (self.model._meta.app_label, self.__class__.__name__) def get_inline_instances(self, request, obj=None): inline_instances = [] for inline_class in self.get_inlines(request, obj): inline = inline_class(self.model, self.admin_site) if request: if not (inline.has_view_or_change_permission(request, obj) or inline.has_add_permission(request, obj) or inline.has_delete_permission(request, obj)): continue if not inline.has_add_permission(request, obj): inline.max_num = 0 inline_instances.append(inline) return inline_instances def get_urls(self): from django.urls import path def wrap(view): def wrapper(*args, **kwargs): return self.admin_site.admin_view(view)(*args, **kwargs) wrapper.model_admin = self return update_wrapper(wrapper, view) info = self.model._meta.app_label, self.model._meta.model_name return [ path('', wrap(self.changelist_view), name='%s_%s_changelist' % info), path('add/', wrap(self.add_view), name='%s_%s_add' % info), path('<path:object_id>/history/', wrap(self.history_view), name='%s_%s_history' % info), path('<path:object_id>/delete/', wrap(self.delete_view), name='%s_%s_delete' % info), path('<path:object_id>/change/', wrap(self.change_view), name='%s_%s_change' % info), # For backwards compatibility (was the change url before 1.9) path('<path:object_id>/', wrap(RedirectView.as_view( pattern_name='%s:%s_%s_change' % ((self.admin_site.name,) + info) ))), ] @property def urls(self): return self.get_urls() @property def media(self): extra = '' if settings.DEBUG else '.min' js = [ 'vendor/jquery/jquery%s.js' % extra, 'jquery.init.js', 'core.js', 'admin/RelatedObjectLookups.js', 'actions.js', 'urlify.js', 'prepopulate.js', 'vendor/xregexp/xregexp%s.js' % extra, ] return forms.Media(js=['admin/js/%s' % url for url in js]) def get_model_perms(self, request): """ Return a dict of all perms for this model. This dict has the keys ``add``, ``change``, ``delete``, and ``view`` mapping to the True/False for each of those actions. """ return { 'add': self.has_add_permission(request), 'change': self.has_change_permission(request), 'delete': self.has_delete_permission(request), 'view': self.has_view_permission(request), } def _get_form_for_get_fields(self, request, obj): return self.get_form(request, obj, fields=None) def get_form(self, request, obj=None, change=False, **kwargs): """ Return a Form class for use in the admin add view. This is used by add_view and change_view. """ if 'fields' in kwargs: fields = kwargs.pop('fields') else: fields = flatten_fieldsets(self.get_fieldsets(request, obj)) excluded = self.get_exclude(request, obj) exclude = [] if excluded is None else list(excluded) readonly_fields = self.get_readonly_fields(request, obj) exclude.extend(readonly_fields) # Exclude all fields if it's a change form and the user doesn't have # the change permission. if change and hasattr(request, 'user') and not self.has_change_permission(request, obj): exclude.extend(fields) if excluded is None and hasattr(self.form, '_meta') and self.form._meta.exclude: # Take the custom ModelForm's Meta.exclude into account only if the # ModelAdmin doesn't define its own. exclude.extend(self.form._meta.exclude) # if exclude is an empty list we pass None to be consistent with the # default on modelform_factory exclude = exclude or None # Remove declared form fields which are in readonly_fields. new_attrs = dict.fromkeys(f for f in readonly_fields if f in self.form.declared_fields) form = type(self.form.__name__, (self.form,), new_attrs) defaults = { 'form': form, 'fields': fields, 'exclude': exclude, 'formfield_callback': partial(self.formfield_for_dbfield, request=request), **kwargs, } if defaults['fields'] is None and not modelform_defines_fields(defaults['form']): defaults['fields'] = forms.ALL_FIELDS try: return modelform_factory(self.model, **defaults) except FieldError as e: raise FieldError( '%s. Check fields/fieldsets/exclude attributes of class %s.' % (e, self.__class__.__name__) ) def get_changelist(self, request, **kwargs): """ Return the ChangeList class for use on the changelist page. """ from django.contrib.admin.views.main import ChangeList return ChangeList def get_changelist_instance(self, request): """ Return a `ChangeList` instance based on `request`. May raise `IncorrectLookupParameters`. """ list_display = self.get_list_display(request) list_display_links = self.get_list_display_links(request, list_display) # Add the action checkboxes if any actions are available. if self.get_actions(request): list_display = ['action_checkbox', *list_display] sortable_by = self.get_sortable_by(request) ChangeList = self.get_changelist(request) return ChangeList( request, self.model, list_display, list_display_links, self.get_list_filter(request), self.date_hierarchy, self.get_search_fields(request), self.get_list_select_related(request), self.list_per_page, self.list_max_show_all, self.list_editable, self, sortable_by, ) def get_object(self, request, object_id, from_field=None): """ Return an instance matching the field and value provided, the primary key is used if no field is provided. Return ``None`` if no match is found or the object_id fails validation. """ queryset = self.get_queryset(request) model = queryset.model field = model._meta.pk if from_field is None else model._meta.get_field(from_field) try: object_id = field.to_python(object_id) return queryset.get(**{field.name: object_id}) except (model.DoesNotExist, ValidationError, ValueError): return None def get_changelist_form(self, request, **kwargs): """ Return a Form class for use in the Formset on the changelist page. """ defaults = { 'formfield_callback': partial(self.formfield_for_dbfield, request=request), **kwargs, } if defaults.get('fields') is None and not modelform_defines_fields(defaults.get('form')): defaults['fields'] = forms.ALL_FIELDS return modelform_factory(self.model, **defaults) def get_changelist_formset(self, request, **kwargs): """ Return a FormSet class for use on the changelist page if list_editable is used. """ defaults = { 'formfield_callback': partial(self.formfield_for_dbfield, request=request), **kwargs, } return modelformset_factory( self.model, self.get_changelist_form(request), extra=0, fields=self.list_editable, **defaults ) def get_formsets_with_inlines(self, request, obj=None): """ Yield formsets and the corresponding inlines. """ for inline in self.get_inline_instances(request, obj): yield inline.get_formset(request, obj), inline def get_paginator(self, request, queryset, per_page, orphans=0, allow_empty_first_page=True): return self.paginator(queryset, per_page, orphans, allow_empty_first_page) def log_addition(self, request, object, message): """ Log that an object has been successfully added. The default implementation creates an admin LogEntry object. """ from django.contrib.admin.models import ADDITION, LogEntry return LogEntry.objects.log_action( user_id=request.user.pk, content_type_id=get_content_type_for_model(object).pk, object_id=object.pk, object_repr=str(object), action_flag=ADDITION, change_message=message, ) def log_change(self, request, object, message): """ Log that an object has been successfully changed. The default implementation creates an admin LogEntry object. """ from django.contrib.admin.models import CHANGE, LogEntry return LogEntry.objects.log_action( user_id=request.user.pk, content_type_id=get_content_type_for_model(object).pk, object_id=object.pk, object_repr=str(object), action_flag=CHANGE, change_message=message, ) def log_deletion(self, request, object, object_repr): """ Log that an object will be deleted. Note that this method must be called before the deletion. The default implementation creates an admin LogEntry object. """ from django.contrib.admin.models import DELETION, LogEntry return LogEntry.objects.log_action( user_id=request.user.pk, content_type_id=get_content_type_for_model(object).pk, object_id=object.pk, object_repr=object_repr, action_flag=DELETION, ) @display(description=mark_safe('<input type="checkbox" id="action-toggle">')) def action_checkbox(self, obj): """ A list_display column containing a checkbox widget. """ return helpers.checkbox.render(helpers.ACTION_CHECKBOX_NAME, str(obj.pk)) @staticmethod def _get_action_description(func, name): return getattr(func, 'short_description', capfirst(name.replace('_', ' '))) def _get_base_actions(self): """Return the list of actions, prior to any request-based filtering.""" actions = [] base_actions = (self.get_action(action) for action in self.actions or []) # get_action might have returned None, so filter any of those out. base_actions = [action for action in base_actions if action] base_action_names = {name for _, name, _ in base_actions} # Gather actions from the admin site first for (name, func) in self.admin_site.actions: if name in base_action_names: continue description = self._get_action_description(func, name) actions.append((func, name, description)) # Add actions from this ModelAdmin. actions.extend(base_actions) return actions def _filter_actions_by_permissions(self, request, actions): """Filter out any actions that the user doesn't have access to.""" filtered_actions = [] for action in actions: callable = action[0] if not hasattr(callable, 'allowed_permissions'): filtered_actions.append(action) continue permission_checks = ( getattr(self, 'has_%s_permission' % permission) for permission in callable.allowed_permissions ) if any(has_permission(request) for has_permission in permission_checks): filtered_actions.append(action) return filtered_actions def get_actions(self, request): """ Return a dictionary mapping the names of all actions for this ModelAdmin to a tuple of (callable, name, description) for each action. """ # If self.actions is set to None that means actions are disabled on # this page. if self.actions is None or IS_POPUP_VAR in request.GET: return {} actions = self._filter_actions_by_permissions(request, self._get_base_actions()) return {name: (func, name, desc) for func, name, desc in actions} def get_action_choices(self, request, default_choices=models.BLANK_CHOICE_DASH): """ Return a list of choices for use in a form object. Each choice is a tuple (name, description). """ choices = [] + default_choices for func, name, description in self.get_actions(request).values(): choice = (name, description % model_format_dict(self.opts)) choices.append(choice) return choices def get_action(self, action): """ Return a given action from a parameter, which can either be a callable, or the name of a method on the ModelAdmin. Return is a tuple of (callable, name, description). """ # If the action is a callable, just use it. if callable(action): func = action action = action.__name__ # Next, look for a method. Grab it off self.__class__ to get an unbound # method instead of a bound one; this ensures that the calling # conventions are the same for functions and methods. elif hasattr(self.__class__, action): func = getattr(self.__class__, action) # Finally, look for a named method on the admin site else: try: func = self.admin_site.get_action(action) except KeyError: return None description = self._get_action_description(func, action) return func, action, description def get_list_display(self, request): """ Return a sequence containing the fields to be displayed on the changelist. """ return self.list_display def get_list_display_links(self, request, list_display): """ Return a sequence containing the fields to be displayed as links on the changelist. The list_display parameter is the list of fields returned by get_list_display(). """ if self.list_display_links or self.list_display_links is None or not list_display: return self.list_display_links else: # Use only the first item in list_display as link return list(list_display)[:1] def get_list_filter(self, request): """ Return a sequence containing the fields to be displayed as filters in the right sidebar of the changelist page. """ return self.list_filter def get_list_select_related(self, request): """ Return a list of fields to add to the select_related() part of the changelist items query. """ return self.list_select_related def get_search_fields(self, request): """ Return a sequence containing the fields to be searched whenever somebody submits a search query. """ return self.search_fields def get_search_results(self, request, queryset, search_term): """ Return a tuple containing a queryset to implement the search and a boolean indicating if the results may contain duplicates. """ # Apply keyword searches. def construct_search(field_name): if field_name.startswith('^'): return "%s__istartswith" % field_name[1:] elif field_name.startswith('='): return "%s__iexact" % field_name[1:] elif field_name.startswith('@'): return "%s__search" % field_name[1:] # Use field_name if it includes a lookup. opts = queryset.model._meta lookup_fields = field_name.split(LOOKUP_SEP) # Go through the fields, following all relations. prev_field = None for path_part in lookup_fields: if path_part == 'pk': path_part = opts.pk.name try: field = opts.get_field(path_part) except FieldDoesNotExist: # Use valid query lookups. if prev_field and prev_field.get_lookup(path_part): return field_name else: prev_field = field if hasattr(field, 'get_path_info'): # Update opts to follow the relation. opts = field.get_path_info()[-1].to_opts # Otherwise, use the field with icontains. return "%s__icontains" % field_name may_have_duplicates = False search_fields = self.get_search_fields(request) if search_fields and search_term: orm_lookups = [construct_search(str(search_field)) for search_field in search_fields] for bit in smart_split(search_term): if bit.startswith(('"', "'")) and bit[0] == bit[-1]: bit = unescape_string_literal(bit) or_queries = [models.Q(**{orm_lookup: bit}) for orm_lookup in orm_lookups] queryset = queryset.filter(reduce(operator.or_, or_queries)) may_have_duplicates |= any( lookup_spawns_duplicates(self.opts, search_spec) for search_spec in orm_lookups ) return queryset, may_have_duplicates def get_preserved_filters(self, request): """ Return the preserved filters querystring. """ match = request.resolver_match if self.preserve_filters and match: opts = self.model._meta current_url = '%s:%s' % (match.app_name, match.url_name) changelist_url = 'admin:%s_%s_changelist' % (opts.app_label, opts.model_name) if current_url == changelist_url: preserved_filters = request.GET.urlencode() else: preserved_filters = request.GET.get('_changelist_filters') if preserved_filters: return urlencode({'_changelist_filters': preserved_filters}) return '' def construct_change_message(self, request, form, formsets, add=False): """ Construct a JSON structure describing changes from a changed object. """ return construct_change_message(form, formsets, add) def message_user(self, request, message, level=messages.INFO, extra_tags='', fail_silently=False): """ Send a message to the user. The default implementation posts a message using the django.contrib.messages backend. Exposes almost the same API as messages.add_message(), but accepts the positional arguments in a different order to maintain backwards compatibility. For convenience, it accepts the `level` argument as a string rather than the usual level number. """ if not isinstance(level, int): # attempt to get the level if passed a string try: level = getattr(messages.constants, level.upper()) except AttributeError: levels = messages.constants.DEFAULT_TAGS.values() levels_repr = ', '.join('`%s`' % level for level in levels) raise ValueError( 'Bad message level string: `%s`. Possible values are: %s' % (level, levels_repr) ) messages.add_message(request, level, message, extra_tags=extra_tags, fail_silently=fail_silently) def save_form(self, request, form, change): """ Given a ModelForm return an unsaved instance. ``change`` is True if the object is being changed, and False if it's being added. """ return form.save(commit=False) def save_model(self, request, obj, form, change): """ Given a model instance save it to the database. """ obj.save() def delete_model(self, request, obj): """ Given a model instance delete it from the database. """ obj.delete() def delete_queryset(self, request, queryset): """Given a queryset, delete it from the database.""" queryset.delete() def save_formset(self, request, form, formset, change): """ Given an inline formset save it to the database. """ formset.save() def save_related(self, request, form, formsets, change): """ Given the ``HttpRequest``, the parent ``ModelForm`` instance, the list of inline formsets and a boolean value based on whether the parent is being added or changed, save the related objects to the database. Note that at this point save_form() and save_model() have already been called. """ form.save_m2m() for formset in formsets: self.save_formset(request, form, formset, change=change) def render_change_form(self, request, context, add=False, change=False, form_url='', obj=None): opts = self.model._meta app_label = opts.app_label preserved_filters = self.get_preserved_filters(request) form_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, form_url) view_on_site_url = self.get_view_on_site_url(obj) has_editable_inline_admin_formsets = False for inline in context['inline_admin_formsets']: if inline.has_add_permission or inline.has_change_permission or inline.has_delete_permission: has_editable_inline_admin_formsets = True break context.update({ 'add': add, 'change': change, 'has_view_permission': self.has_view_permission(request, obj), 'has_add_permission': self.has_add_permission(request), 'has_change_permission': self.has_change_permission(request, obj), 'has_delete_permission': self.has_delete_permission(request, obj), 'has_editable_inline_admin_formsets': has_editable_inline_admin_formsets, 'has_file_field': context['adminform'].form.is_multipart() or any( admin_formset.formset.is_multipart() for admin_formset in context['inline_admin_formsets'] ), 'has_absolute_url': view_on_site_url is not None, 'absolute_url': view_on_site_url, 'form_url': form_url, 'opts': opts, 'content_type_id': get_content_type_for_model(self.model).pk, 'save_as': self.save_as, 'save_on_top': self.save_on_top, 'to_field_var': TO_FIELD_VAR, 'is_popup_var': IS_POPUP_VAR, 'app_label': app_label, }) if add and self.add_form_template is not None: form_template = self.add_form_template else: form_template = self.change_form_template request.current_app = self.admin_site.name return TemplateResponse(request, form_template or [ "admin/%s/%s/change_form.html" % (app_label, opts.model_name), "admin/%s/change_form.html" % app_label, "admin/change_form.html" ], context) def response_add(self, request, obj, post_url_continue=None): """ Determine the HttpResponse for the add_view stage. """ opts = obj._meta preserved_filters = self.get_preserved_filters(request) obj_url = reverse( 'admin:%s_%s_change' % (opts.app_label, opts.model_name), args=(quote(obj.pk),), current_app=self.admin_site.name, ) # Add a link to the object's change form if the user can edit the obj. if self.has_change_permission(request, obj): obj_repr = format_html('<a href="{}">{}</a>', urlquote(obj_url), obj) else: obj_repr = str(obj) msg_dict = { 'name': opts.verbose_name, 'obj': obj_repr, } # Here, we distinguish between different save types by checking for # the presence of keys in request.POST. if IS_POPUP_VAR in request.POST: to_field = request.POST.get(TO_FIELD_VAR) if to_field: attr = str(to_field) else: attr = obj._meta.pk.attname value = obj.serializable_value(attr) popup_response_data = json.dumps({ 'value': str(value), 'obj': str(obj), }) return TemplateResponse(request, self.popup_response_template or [ 'admin/%s/%s/popup_response.html' % (opts.app_label, opts.model_name), 'admin/%s/popup_response.html' % opts.app_label, 'admin/popup_response.html', ], { 'popup_response_data': popup_response_data, }) elif "_continue" in request.POST or ( # Redirecting after "Save as new". "_saveasnew" in request.POST and self.save_as_continue and self.has_change_permission(request, obj) ): msg = _('The {name} “{obj}” was added successfully.') if self.has_change_permission(request, obj): msg += ' ' + _('You may edit it again below.') self.message_user(request, format_html(msg, **msg_dict), messages.SUCCESS) if post_url_continue is None: post_url_continue = obj_url post_url_continue = add_preserved_filters( {'preserved_filters': preserved_filters, 'opts': opts}, post_url_continue ) return HttpResponseRedirect(post_url_continue) elif "_addanother" in request.POST: msg = format_html( _('The {name} “{obj}” was added successfully. You may add another {name} below.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) redirect_url = request.path redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url) return HttpResponseRedirect(redirect_url) else: msg = format_html( _('The {name} “{obj}” was added successfully.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) return self.response_post_save_add(request, obj) def response_change(self, request, obj): """ Determine the HttpResponse for the change_view stage. """ if IS_POPUP_VAR in request.POST: opts = obj._meta to_field = request.POST.get(TO_FIELD_VAR) attr = str(to_field) if to_field else opts.pk.attname value = request.resolver_match.kwargs['object_id'] new_value = obj.serializable_value(attr) popup_response_data = json.dumps({ 'action': 'change', 'value': str(value), 'obj': str(obj), 'new_value': str(new_value), }) return TemplateResponse(request, self.popup_response_template or [ 'admin/%s/%s/popup_response.html' % (opts.app_label, opts.model_name), 'admin/%s/popup_response.html' % opts.app_label, 'admin/popup_response.html', ], { 'popup_response_data': popup_response_data, }) opts = self.model._meta preserved_filters = self.get_preserved_filters(request) msg_dict = { 'name': opts.verbose_name, 'obj': format_html('<a href="{}">{}</a>', urlquote(request.path), obj), } if "_continue" in request.POST: msg = format_html( _('The {name} “{obj}” was changed successfully. You may edit it again below.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) redirect_url = request.path redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url) return HttpResponseRedirect(redirect_url) elif "_saveasnew" in request.POST: msg = format_html( _('The {name} “{obj}” was added successfully. You may edit it again below.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) redirect_url = reverse('admin:%s_%s_change' % (opts.app_label, opts.model_name), args=(obj.pk,), current_app=self.admin_site.name) redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url) return HttpResponseRedirect(redirect_url) elif "_addanother" in request.POST: msg = format_html( _('The {name} “{obj}” was changed successfully. You may add another {name} below.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) redirect_url = reverse('admin:%s_%s_add' % (opts.app_label, opts.model_name), current_app=self.admin_site.name) redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url) return HttpResponseRedirect(redirect_url) else: msg = format_html( _('The {name} “{obj}” was changed successfully.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) return self.response_post_save_change(request, obj) def _response_post_save(self, request, obj): opts = self.model._meta if self.has_view_or_change_permission(request): post_url = reverse('admin:%s_%s_changelist' % (opts.app_label, opts.model_name), current_app=self.admin_site.name) preserved_filters = self.get_preserved_filters(request) post_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, post_url) else: post_url = reverse('admin:index', current_app=self.admin_site.name) return HttpResponseRedirect(post_url) def response_post_save_add(self, request, obj): """ Figure out where to redirect after the 'Save' button has been pressed when adding a new object. """ return self._response_post_save(request, obj) def response_post_save_change(self, request, obj): """ Figure out where to redirect after the 'Save' button has been pressed when editing an existing object. """ return self._response_post_save(request, obj) def response_action(self, request, queryset): """ Handle an admin action. This is called if a request is POSTed to the changelist; it returns an HttpResponse if the action was handled, and None otherwise. """ # There can be multiple action forms on the page (at the top # and bottom of the change list, for example). Get the action # whose button was pushed. try: action_index = int(request.POST.get('index', 0)) except ValueError: action_index = 0 # Construct the action form. data = request.POST.copy() data.pop(helpers.ACTION_CHECKBOX_NAME, None) data.pop("index", None) # Use the action whose button was pushed try: data.update({'action': data.getlist('action')[action_index]}) except IndexError: # If we didn't get an action from the chosen form that's invalid # POST data, so by deleting action it'll fail the validation check # below. So no need to do anything here pass action_form = self.action_form(data, auto_id=None) action_form.fields['action'].choices = self.get_action_choices(request) # If the form's valid we can handle the action. if action_form.is_valid(): action = action_form.cleaned_data['action'] select_across = action_form.cleaned_data['select_across'] func = self.get_actions(request)[action][0] # Get the list of selected PKs. If nothing's selected, we can't # perform an action on it, so bail. Except we want to perform # the action explicitly on all objects. selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME) if not selected and not select_across: # Reminder that something needs to be selected or nothing will happen msg = _("Items must be selected in order to perform " "actions on them. No items have been changed.") self.message_user(request, msg, messages.WARNING) return None if not select_across: # Perform the action only on the selected objects queryset = queryset.filter(pk__in=selected) response = func(self, request, queryset) # Actions may return an HttpResponse-like object, which will be # used as the response from the POST. If not, we'll be a good # little HTTP citizen and redirect back to the changelist page. if isinstance(response, HttpResponseBase): return response else: return HttpResponseRedirect(request.get_full_path()) else: msg = _("No action selected.") self.message_user(request, msg, messages.WARNING) return None def response_delete(self, request, obj_display, obj_id): """ Determine the HttpResponse for the delete_view stage. """ opts = self.model._meta if IS_POPUP_VAR in request.POST: popup_response_data = json.dumps({ 'action': 'delete', 'value': str(obj_id), }) return TemplateResponse(request, self.popup_response_template or [ 'admin/%s/%s/popup_response.html' % (opts.app_label, opts.model_name), 'admin/%s/popup_response.html' % opts.app_label, 'admin/popup_response.html', ], { 'popup_response_data': popup_response_data, }) self.message_user( request, _('The %(name)s “%(obj)s” was deleted successfully.') % { 'name': opts.verbose_name, 'obj': obj_display, }, messages.SUCCESS, ) if self.has_change_permission(request, None): post_url = reverse( 'admin:%s_%s_changelist' % (opts.app_label, opts.model_name), current_app=self.admin_site.name, ) preserved_filters = self.get_preserved_filters(request) post_url = add_preserved_filters( {'preserved_filters': preserved_filters, 'opts': opts}, post_url ) else: post_url = reverse('admin:index', current_app=self.admin_site.name) return HttpResponseRedirect(post_url) def render_delete_form(self, request, context): opts = self.model._meta app_label = opts.app_label request.current_app = self.admin_site.name context.update( to_field_var=TO_FIELD_VAR, is_popup_var=IS_POPUP_VAR, media=self.media, ) return TemplateResponse( request, self.delete_confirmation_template or [ "admin/{}/{}/delete_confirmation.html".format(app_label, opts.model_name), "admin/{}/delete_confirmation.html".format(app_label), "admin/delete_confirmation.html", ], context, ) def get_inline_formsets(self, request, formsets, inline_instances, obj=None): # Edit permissions on parent model are required for editable inlines. can_edit_parent = self.has_change_permission(request, obj) if obj else self.has_add_permission(request) inline_admin_formsets = [] for inline, formset in zip(inline_instances, formsets): fieldsets = list(inline.get_fieldsets(request, obj)) readonly = list(inline.get_readonly_fields(request, obj)) if can_edit_parent: has_add_permission = inline.has_add_permission(request, obj) has_change_permission = inline.has_change_permission(request, obj) has_delete_permission = inline.has_delete_permission(request, obj) else: # Disable all edit-permissions, and overide formset settings. has_add_permission = has_change_permission = has_delete_permission = False formset.extra = formset.max_num = 0 has_view_permission = inline.has_view_permission(request, obj) prepopulated = dict(inline.get_prepopulated_fields(request, obj)) inline_admin_formset = helpers.InlineAdminFormSet( inline, formset, fieldsets, prepopulated, readonly, model_admin=self, has_add_permission=has_add_permission, has_change_permission=has_change_permission, has_delete_permission=has_delete_permission, has_view_permission=has_view_permission, ) inline_admin_formsets.append(inline_admin_formset) return inline_admin_formsets def get_changeform_initial_data(self, request): """ Get the initial form data from the request's GET params. """ initial = dict(request.GET.items()) for k in initial: try: f = self.model._meta.get_field(k) except FieldDoesNotExist: continue # We have to special-case M2Ms as a list of comma-separated PKs. if isinstance(f, models.ManyToManyField): initial[k] = initial[k].split(",") return initial def _get_obj_does_not_exist_redirect(self, request, opts, object_id): """ Create a message informing the user that the object doesn't exist and return a redirect to the admin index page. """ msg = _('%(name)s with ID “%(key)s” doesn’t exist. Perhaps it was deleted?') % { 'name': opts.verbose_name, 'key': unquote(object_id), } self.message_user(request, msg, messages.WARNING) url = reverse('admin:index', current_app=self.admin_site.name) return HttpResponseRedirect(url) @csrf_protect_m def changeform_view(self, request, object_id=None, form_url='', extra_context=None): with transaction.atomic(using=router.db_for_write(self.model)): return self._changeform_view(request, object_id, form_url, extra_context) def _changeform_view(self, request, object_id, form_url, extra_context): to_field = request.POST.get(TO_FIELD_VAR, request.GET.get(TO_FIELD_VAR)) if to_field and not self.to_field_allowed(request, to_field): raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field) model = self.model opts = model._meta if request.method == 'POST' and '_saveasnew' in request.POST: object_id = None add = object_id is None if add: if not self.has_add_permission(request): raise PermissionDenied obj = None else: obj = self.get_object(request, unquote(object_id), to_field) if request.method == 'POST': if not self.has_change_permission(request, obj): raise PermissionDenied else: if not self.has_view_or_change_permission(request, obj): raise PermissionDenied if obj is None: return self._get_obj_does_not_exist_redirect(request, opts, object_id) fieldsets = self.get_fieldsets(request, obj) ModelForm = self.get_form( request, obj, change=not add, fields=flatten_fieldsets(fieldsets) ) if request.method == 'POST': form = ModelForm(request.POST, request.FILES, instance=obj) form_validated = form.is_valid() if form_validated: new_object = self.save_form(request, form, change=not add) else: new_object = form.instance formsets, inline_instances = self._create_formsets(request, new_object, change=not add) if all_valid(formsets) and form_validated: self.save_model(request, new_object, form, not add) self.save_related(request, form, formsets, not add) change_message = self.construct_change_message(request, form, formsets, add) if add: self.log_addition(request, new_object, change_message) return self.response_add(request, new_object) else: self.log_change(request, new_object, change_message) return self.response_change(request, new_object) else: form_validated = False else: if add: initial = self.get_changeform_initial_data(request) form = ModelForm(initial=initial) formsets, inline_instances = self._create_formsets(request, form.instance, change=False) else: form = ModelForm(instance=obj) formsets, inline_instances = self._create_formsets(request, obj, change=True) if not add and not self.has_change_permission(request, obj): readonly_fields = flatten_fieldsets(fieldsets) else: readonly_fields = self.get_readonly_fields(request, obj) adminForm = helpers.AdminForm( form, list(fieldsets), # Clear prepopulated fields on a view-only form to avoid a crash. self.get_prepopulated_fields(request, obj) if add or self.has_change_permission(request, obj) else {}, readonly_fields, model_admin=self) media = self.media + adminForm.media inline_formsets = self.get_inline_formsets(request, formsets, inline_instances, obj) for inline_formset in inline_formsets: media = media + inline_formset.media if add: title = _('Add %s') elif self.has_change_permission(request, obj): title = _('Change %s') else: title = _('View %s') context = { **self.admin_site.each_context(request), 'title': title % opts.verbose_name, 'subtitle': str(obj) if obj else None, 'adminform': adminForm, 'object_id': object_id, 'original': obj, 'is_popup': IS_POPUP_VAR in request.POST or IS_POPUP_VAR in request.GET, 'to_field': to_field, 'media': media, 'inline_admin_formsets': inline_formsets, 'errors': helpers.AdminErrorList(form, formsets), 'preserved_filters': self.get_preserved_filters(request), } # Hide the "Save" and "Save and continue" buttons if "Save as New" was # previously chosen to prevent the interface from getting confusing. if request.method == 'POST' and not form_validated and "_saveasnew" in request.POST: context['show_save'] = False context['show_save_and_continue'] = False # Use the change template instead of the add template. add = False context.update(extra_context or {}) return self.render_change_form(request, context, add=add, change=not add, obj=obj, form_url=form_url) def add_view(self, request, form_url='', extra_context=None): return self.changeform_view(request, None, form_url, extra_context) def change_view(self, request, object_id, form_url='', extra_context=None): return self.changeform_view(request, object_id, form_url, extra_context) def _get_edited_object_pks(self, request, prefix): """Return POST data values of list_editable primary keys.""" pk_pattern = re.compile( r'{}-\d+-{}$'.format(re.escape(prefix), self.model._meta.pk.name) ) return [value for key, value in request.POST.items() if pk_pattern.match(key)] def _get_list_editable_queryset(self, request, prefix): """ Based on POST data, return a queryset of the objects that were edited via list_editable. """ object_pks = self._get_edited_object_pks(request, prefix) queryset = self.get_queryset(request) validate = queryset.model._meta.pk.to_python try: for pk in object_pks: validate(pk) except ValidationError: # Disable the optimization if the POST data was tampered with. return queryset return queryset.filter(pk__in=object_pks) @csrf_protect_m def changelist_view(self, request, extra_context=None): """ The 'change list' admin view for this model. """ from django.contrib.admin.views.main import ERROR_FLAG opts = self.model._meta app_label = opts.app_label if not self.has_view_or_change_permission(request): raise PermissionDenied try: cl = self.get_changelist_instance(request) except IncorrectLookupParameters: # Wacky lookup parameters were given, so redirect to the main # changelist page, without parameters, and pass an 'invalid=1' # parameter via the query string. If wacky parameters were given # and the 'invalid=1' parameter was already in the query string, # something is screwed up with the database, so display an error # page. if ERROR_FLAG in request.GET: return SimpleTemplateResponse('admin/invalid_setup.html', { 'title': _('Database error'), }) return HttpResponseRedirect(request.path + '?' + ERROR_FLAG + '=1') # If the request was POSTed, this might be a bulk action or a bulk # edit. Try to look up an action or confirmation first, but if this # isn't an action the POST will fall through to the bulk edit check, # below. action_failed = False selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME) actions = self.get_actions(request) # Actions with no confirmation if (actions and request.method == 'POST' and 'index' in request.POST and '_save' not in request.POST): if selected: response = self.response_action(request, queryset=cl.get_queryset(request)) if response: return response else: action_failed = True else: msg = _("Items must be selected in order to perform " "actions on them. No items have been changed.") self.message_user(request, msg, messages.WARNING) action_failed = True # Actions with confirmation if (actions and request.method == 'POST' and helpers.ACTION_CHECKBOX_NAME in request.POST and 'index' not in request.POST and '_save' not in request.POST): if selected: response = self.response_action(request, queryset=cl.get_queryset(request)) if response: return response else: action_failed = True if action_failed: # Redirect back to the changelist page to avoid resubmitting the # form if the user refreshes the browser or uses the "No, take # me back" button on the action confirmation page. return HttpResponseRedirect(request.get_full_path()) # If we're allowing changelist editing, we need to construct a formset # for the changelist given all the fields to be edited. Then we'll # use the formset to validate/process POSTed data. formset = cl.formset = None # Handle POSTed bulk-edit data. if request.method == 'POST' and cl.list_editable and '_save' in request.POST: if not self.has_change_permission(request): raise PermissionDenied FormSet = self.get_changelist_formset(request) modified_objects = self._get_list_editable_queryset(request, FormSet.get_default_prefix()) formset = cl.formset = FormSet(request.POST, request.FILES, queryset=modified_objects) if formset.is_valid(): changecount = 0 for form in formset.forms: if form.has_changed(): obj = self.save_form(request, form, change=True) self.save_model(request, obj, form, change=True) self.save_related(request, form, formsets=[], change=True) change_msg = self.construct_change_message(request, form, None) self.log_change(request, obj, change_msg) changecount += 1 if changecount: msg = ngettext( "%(count)s %(name)s was changed successfully.", "%(count)s %(name)s were changed successfully.", changecount ) % { 'count': changecount, 'name': model_ngettext(opts, changecount), } self.message_user(request, msg, messages.SUCCESS) return HttpResponseRedirect(request.get_full_path()) # Handle GET -- construct a formset for display. elif cl.list_editable and self.has_change_permission(request): FormSet = self.get_changelist_formset(request) formset = cl.formset = FormSet(queryset=cl.result_list) # Build the list of media to be used by the formset. if formset: media = self.media + formset.media else: media = self.media # Build the action form and populate it with available actions. if actions: action_form = self.action_form(auto_id=None) action_form.fields['action'].choices = self.get_action_choices(request) media += action_form.media else: action_form = None selection_note_all = ngettext( '%(total_count)s selected', 'All %(total_count)s selected', cl.result_count ) context = { **self.admin_site.each_context(request), 'module_name': str(opts.verbose_name_plural), 'selection_note': _('0 of %(cnt)s selected') % {'cnt': len(cl.result_list)}, 'selection_note_all': selection_note_all % {'total_count': cl.result_count}, 'title': cl.title, 'subtitle': None, 'is_popup': cl.is_popup, 'to_field': cl.to_field, 'cl': cl, 'media': media, 'has_add_permission': self.has_add_permission(request), 'opts': cl.opts, 'action_form': action_form, 'actions_on_top': self.actions_on_top, 'actions_on_bottom': self.actions_on_bottom, 'actions_selection_counter': self.actions_selection_counter, 'preserved_filters': self.get_preserved_filters(request), **(extra_context or {}), } request.current_app = self.admin_site.name return TemplateResponse(request, self.change_list_template or [ 'admin/%s/%s/change_list.html' % (app_label, opts.model_name), 'admin/%s/change_list.html' % app_label, 'admin/change_list.html' ], context) def get_deleted_objects(self, objs, request): """ Hook for customizing the delete process for the delete view and the "delete selected" action. """ return get_deleted_objects(objs, request, self.admin_site) @csrf_protect_m def delete_view(self, request, object_id, extra_context=None): with transaction.atomic(using=router.db_for_write(self.model)): return self._delete_view(request, object_id, extra_context) def _delete_view(self, request, object_id, extra_context): "The 'delete' admin view for this model." opts = self.model._meta app_label = opts.app_label to_field = request.POST.get(TO_FIELD_VAR, request.GET.get(TO_FIELD_VAR)) if to_field and not self.to_field_allowed(request, to_field): raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field) obj = self.get_object(request, unquote(object_id), to_field) if not self.has_delete_permission(request, obj): raise PermissionDenied if obj is None: return self._get_obj_does_not_exist_redirect(request, opts, object_id) # Populate deleted_objects, a data structure of all related objects that # will also be deleted. deleted_objects, model_count, perms_needed, protected = self.get_deleted_objects([obj], request) if request.POST and not protected: # The user has confirmed the deletion. if perms_needed: raise PermissionDenied obj_display = str(obj) attr = str(to_field) if to_field else opts.pk.attname obj_id = obj.serializable_value(attr) self.log_deletion(request, obj, obj_display) self.delete_model(request, obj) return self.response_delete(request, obj_display, obj_id) object_name = str(opts.verbose_name) if perms_needed or protected: title = _("Cannot delete %(name)s") % {"name": object_name} else: title = _("Are you sure?") context = { **self.admin_site.each_context(request), 'title': title, 'subtitle': None, 'object_name': object_name, 'object': obj, 'deleted_objects': deleted_objects, 'model_count': dict(model_count).items(), 'perms_lacking': perms_needed, 'protected': protected, 'opts': opts, 'app_label': app_label, 'preserved_filters': self.get_preserved_filters(request), 'is_popup': IS_POPUP_VAR in request.POST or IS_POPUP_VAR in request.GET, 'to_field': to_field, **(extra_context or {}), } return self.render_delete_form(request, context) def history_view(self, request, object_id, extra_context=None): "The 'history' admin view for this model." from django.contrib.admin.models import LogEntry # First check if the user can see this history. model = self.model obj = self.get_object(request, unquote(object_id)) if obj is None: return self._get_obj_does_not_exist_redirect(request, model._meta, object_id) if not self.has_view_or_change_permission(request, obj): raise PermissionDenied # Then get the history for this object. opts = model._meta app_label = opts.app_label action_list = LogEntry.objects.filter( object_id=unquote(object_id), content_type=get_content_type_for_model(model) ).select_related().order_by('action_time') context = { **self.admin_site.each_context(request), 'title': _('Change history: %s') % obj, 'subtitle': None, 'action_list': action_list, 'module_name': str(capfirst(opts.verbose_name_plural)), 'object': obj, 'opts': opts, 'preserved_filters': self.get_preserved_filters(request), **(extra_context or {}), } request.current_app = self.admin_site.name return TemplateResponse(request, self.object_history_template or [ "admin/%s/%s/object_history.html" % (app_label, opts.model_name), "admin/%s/object_history.html" % app_label, "admin/object_history.html" ], context) def get_formset_kwargs(self, request, obj, inline, prefix): formset_params = { 'instance': obj, 'prefix': prefix, 'queryset': inline.get_queryset(request), } if request.method == 'POST': formset_params.update({ 'data': request.POST.copy(), 'files': request.FILES, 'save_as_new': '_saveasnew' in request.POST }) return formset_params def _create_formsets(self, request, obj, change): "Helper function to generate formsets for add/change_view." formsets = [] inline_instances = [] prefixes = {} get_formsets_args = [request] if change: get_formsets_args.append(obj) for FormSet, inline in self.get_formsets_with_inlines(*get_formsets_args): prefix = FormSet.get_default_prefix() prefixes[prefix] = prefixes.get(prefix, 0) + 1 if prefixes[prefix] != 1 or not prefix: prefix = "%s-%s" % (prefix, prefixes[prefix]) formset_params = self.get_formset_kwargs(request, obj, inline, prefix) formset = FormSet(**formset_params) def user_deleted_form(request, obj, formset, index): """Return whether or not the user deleted the form.""" return ( inline.has_delete_permission(request, obj) and '{}-{}-DELETE'.format(formset.prefix, index) in request.POST ) # Bypass validation of each view-only inline form (since the form's # data won't be in request.POST), unless the form was deleted. if not inline.has_change_permission(request, obj if change else None): for index, form in enumerate(formset.initial_forms): if user_deleted_form(request, obj, formset, index): continue form._errors = {} form.cleaned_data = form.initial formsets.append(formset) inline_instances.append(inline) return formsets, inline_instances class InlineModelAdmin(BaseModelAdmin): """ Options for inline editing of ``model`` instances. Provide ``fk_name`` to specify the attribute name of the ``ForeignKey`` from ``model`` to its parent. This is required if ``model`` has more than one ``ForeignKey`` to its parent. """ model = None fk_name = None formset = BaseInlineFormSet extra = 3 min_num = None max_num = None template = None verbose_name = None verbose_name_plural = None can_delete = True show_change_link = False checks_class = InlineModelAdminChecks classes = None def __init__(self, parent_model, admin_site): self.admin_site = admin_site self.parent_model = parent_model self.opts = self.model._meta self.has_registered_model = admin_site.is_registered(self.model) super().__init__() if self.verbose_name is None: self.verbose_name = self.model._meta.verbose_name if self.verbose_name_plural is None: self.verbose_name_plural = self.model._meta.verbose_name_plural @property def media(self): extra = '' if settings.DEBUG else '.min' js = ['vendor/jquery/jquery%s.js' % extra, 'jquery.init.js', 'inlines.js'] if self.filter_vertical or self.filter_horizontal: js.extend(['SelectBox.js', 'SelectFilter2.js']) if self.classes and 'collapse' in self.classes: js.append('collapse.js') return forms.Media(js=['admin/js/%s' % url for url in js]) def get_extra(self, request, obj=None, **kwargs): """Hook for customizing the number of extra inline forms.""" return self.extra def get_min_num(self, request, obj=None, **kwargs): """Hook for customizing the min number of inline forms.""" return self.min_num def get_max_num(self, request, obj=None, **kwargs): """Hook for customizing the max number of extra inline forms.""" return self.max_num def get_formset(self, request, obj=None, **kwargs): """Return a BaseInlineFormSet class for use in admin add/change views.""" if 'fields' in kwargs: fields = kwargs.pop('fields') else: fields = flatten_fieldsets(self.get_fieldsets(request, obj)) excluded = self.get_exclude(request, obj) exclude = [] if excluded is None else list(excluded) exclude.extend(self.get_readonly_fields(request, obj)) if excluded is None and hasattr(self.form, '_meta') and self.form._meta.exclude: # Take the custom ModelForm's Meta.exclude into account only if the # InlineModelAdmin doesn't define its own. exclude.extend(self.form._meta.exclude) # If exclude is an empty list we use None, since that's the actual # default. exclude = exclude or None can_delete = self.can_delete and self.has_delete_permission(request, obj) defaults = { 'form': self.form, 'formset': self.formset, 'fk_name': self.fk_name, 'fields': fields, 'exclude': exclude, 'formfield_callback': partial(self.formfield_for_dbfield, request=request), 'extra': self.get_extra(request, obj, **kwargs), 'min_num': self.get_min_num(request, obj, **kwargs), 'max_num': self.get_max_num(request, obj, **kwargs), 'can_delete': can_delete, **kwargs, } base_model_form = defaults['form'] can_change = self.has_change_permission(request, obj) if request else True can_add = self.has_add_permission(request, obj) if request else True class DeleteProtectedModelForm(base_model_form): def hand_clean_DELETE(self): """ We don't validate the 'DELETE' field itself because on templates it's not rendered using the field information, but just using a generic "deletion_field" of the InlineModelAdmin. """ if self.cleaned_data.get(DELETION_FIELD_NAME, False): using = router.db_for_write(self._meta.model) collector = NestedObjects(using=using) if self.instance._state.adding: return collector.collect([self.instance]) if collector.protected: objs = [] for p in collector.protected: objs.append( # Translators: Model verbose name and instance representation, # suitable to be an item in a list. _('%(class_name)s %(instance)s') % { 'class_name': p._meta.verbose_name, 'instance': p} ) params = { 'class_name': self._meta.model._meta.verbose_name, 'instance': self.instance, 'related_objects': get_text_list(objs, _('and')), } msg = _("Deleting %(class_name)s %(instance)s would require " "deleting the following protected related objects: " "%(related_objects)s") raise ValidationError(msg, code='deleting_protected', params=params) def is_valid(self): result = super().is_valid() self.hand_clean_DELETE() return result def has_changed(self): # Protect against unauthorized edits. if not can_change and not self.instance._state.adding: return False if not can_add and self.instance._state.adding: return False return super().has_changed() defaults['form'] = DeleteProtectedModelForm if defaults['fields'] is None and not modelform_defines_fields(defaults['form']): defaults['fields'] = forms.ALL_FIELDS return inlineformset_factory(self.parent_model, self.model, **defaults) def _get_form_for_get_fields(self, request, obj=None): return self.get_formset(request, obj, fields=None).form def get_queryset(self, request): queryset = super().get_queryset(request) if not self.has_view_or_change_permission(request): queryset = queryset.none() return queryset def _has_any_perms_for_target_model(self, request, perms): """ This method is called only when the ModelAdmin's model is for an ManyToManyField's implicit through model (if self.opts.auto_created). Return True if the user has any of the given permissions ('add', 'change', etc.) for the model that points to the through model. """ opts = self.opts # Find the target model of an auto-created many-to-many relationship. for field in opts.fields: if field.remote_field and field.remote_field.model != self.parent_model: opts = field.remote_field.model._meta break return any( request.user.has_perm('%s.%s' % (opts.app_label, get_permission_codename(perm, opts))) for perm in perms ) def has_add_permission(self, request, obj): if self.opts.auto_created: # Auto-created intermediate models don't have their own # permissions. The user needs to have the change permission for the # related model in order to be able to do anything with the # intermediate model. return self._has_any_perms_for_target_model(request, ['change']) return super().has_add_permission(request) def has_change_permission(self, request, obj=None): if self.opts.auto_created: # Same comment as has_add_permission(). return self._has_any_perms_for_target_model(request, ['change']) return super().has_change_permission(request) def has_delete_permission(self, request, obj=None): if self.opts.auto_created: # Same comment as has_add_permission(). return self._has_any_perms_for_target_model(request, ['change']) return super().has_delete_permission(request, obj) def has_view_permission(self, request, obj=None): if self.opts.auto_created: # Same comment as has_add_permission(). The 'change' permission # also implies the 'view' permission. return self._has_any_perms_for_target_model(request, ['view', 'change']) return super().has_view_permission(request) class StackedInline(InlineModelAdmin): template = 'admin/edit_inline/stacked.html' class TabularInline(InlineModelAdmin): template = 'admin/edit_inline/tabular.html'