diff --git a/.github/workflows/devskim.yml b/.github/workflows/devskim.yml deleted file mode 100644 index b08b9ef3..00000000 --- a/.github/workflows/devskim.yml +++ /dev/null @@ -1,34 +0,0 @@ -# This workflow uses actions that are not certified by GitHub. -# They are provided by a third-party and are governed by -# separate terms of service, privacy policy, and support -# documentation. - -name: DevSkim - -on: - push: - branches: [ dev, master ] - pull_request: - branches: [ dev ] - schedule: - - cron: '29 14 * * 3' - -jobs: - lint: - name: DevSkim - runs-on: ubuntu-20.04 - permissions: - actions: read - contents: read - security-events: write - steps: - - name: Checkout code - uses: actions/checkout@v3 - - - name: Run DevSkim scanner - uses: microsoft/DevSkim-Action@v1 - - - name: Upload DevSkim scan results to GitHub Security tab - uses: github/codeql-action/upload-sarif@v2 - with: - sarif_file: devskim-results.sarif diff --git a/README.md b/README.md index aa529cc7..1f9b4c0e 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ We hope you enjoy using the MSSQL-Django 3rd party backend. ## Features -- Supports Django 3.2 and 4.0 +- Supports Django 2.2, 3.0, 3.1, 3.2 and 4.0 - Tested on Microsoft SQL Server 2016, 2017, 2019 - Passes most of the tests of the Django test suite - Compatible with @@ -67,13 +67,6 @@ in DATABASES control the behavior of the backend: String. Database user password. -- TOKEN - - String. Access token fetched as a user or service principal which - has access to the database. E.g. when using `azure.identity`, the - result of `DefaultAzureCredential().get_token('https://database.windows.net/.default')` - can be passed. - - AUTOCOMMIT Boolean. Set this to `False` if you want to disable diff --git a/SUPPORT.md b/SUPPORT.md index cb574695..b896c569 100644 --- a/SUPPORT.md +++ b/SUPPORT.md @@ -6,6 +6,8 @@ This project uses GitHub Issues to track bugs and feature requests. Please searc issues before filing new issues to avoid duplicates. For new issues, file your bug or feature request as a new Issue. +For help and questions about using this project, please utilize the Django Developers form at https://groups.google.com/g/django-developers. Please search for an existing discussion on your topic before adding a new conversation. For new conversations, include "MSSQL" in a descriptive subject. + ## Microsoft Support Policy Support for this project is limited to the resources listed above. diff --git a/mssql/base.py b/mssql/base.py index fbdd979d..1f75d4f3 100644 --- a/mssql/base.py +++ b/mssql/base.py @@ -7,7 +7,6 @@ import os import re import time -import struct from django.core.exceptions import ImproperlyConfigured @@ -54,22 +53,7 @@ def encode_connection_string(fields): '%s=%s' % (k, encode_value(v)) for k, v in fields.items() ) -def prepare_token_for_odbc(token): - """ - Will prepare token for passing it to the odbc driver, as it expects - bytes and not a string - :param token: - :return: packed binary byte representation of token string - """ - if not isinstance(token, str): - raise TypeError("Invalid token format provided.") - tokenstr = token.encode() - exptoken = b"" - for i in tokenstr: - exptoken += bytes({i}) - exptoken += bytes(1) - return struct.pack("=i", len(exptoken)) + exptoken def encode_value(v): """If the value contains a semicolon, or starts with a left curly brace, @@ -310,7 +294,7 @@ def get_new_connection(self, conn_params): cstr_parts['UID'] = user if 'Authentication=ActiveDirectoryInteractive' not in options_extra_params: cstr_parts['PWD'] = password - elif 'TOKEN' not in conn_params: + else: if ms_drivers.match(driver) and 'Authentication=ActiveDirectoryMsi' not in options_extra_params: cstr_parts['Trusted_Connection'] = trusted_connection else: @@ -340,17 +324,11 @@ def get_new_connection(self, conn_params): conn = None retry_count = 0 need_to_retry = False - args = { - 'unicode_results': unicode_results, - 'timeout': timeout, - } - if 'TOKEN' in conn_params: - args['attrs_before'] = { - 1256: prepare_token_for_odbc(conn_params['TOKEN']) - } while conn is None: try: - conn = Database.connect(connstr, **args) + conn = Database.connect(connstr, + unicode_results=unicode_results, + timeout=timeout) except Exception as e: for error_number in self._transient_error_numbers: if error_number in e.args[1]: diff --git a/mssql/compiler.py b/mssql/compiler.py index 8a3da541..14c630fd 100644 --- a/mssql/compiler.py +++ b/mssql/compiler.py @@ -319,9 +319,7 @@ def as_sql(self, with_limits=True, with_col_aliases=False): # For subqueres with an ORDER BY clause, SQL Server also # requires a TOP or OFFSET clause which is not generated for # Django 2.x. See https://github.com/microsoft/mssql-django/issues/12 - # Add OFFSET for all Django versions. - # https://github.com/microsoft/mssql-django/issues/109 - if not (do_offset or do_limit): + if django.VERSION < (3, 0, 0) and not (do_offset or do_limit): result.append("OFFSET 0 ROWS") # SQL Server requires the backend-specific emulation (2008 or earlier) @@ -428,16 +426,6 @@ def get_returned_fields(self): return self.returning_fields return self.return_id - def can_return_columns_from_insert(self): - if django.VERSION >= (3, 0, 0): - return self.connection.features.can_return_columns_from_insert - return self.connection.features.can_return_id_from_insert - - def can_return_rows_from_bulk_insert(self): - if django.VERSION >= (3, 0, 0): - return self.connection.features.can_return_rows_from_bulk_insert - return self.connection.features.can_return_ids_from_bulk_insert - def fix_auto(self, sql, opts, fields, qn): if opts.auto_field is not None: # db_column is None if not explicitly specified by model field @@ -453,39 +441,15 @@ def fix_auto(self, sql, opts, fields, qn): return sql - def bulk_insert_default_values_sql(self, table): - seed_rows_number = 8 - cross_join_power = 4 # 8^4 = 4096 > maximum allowed batch size for the backend = 1000 - - def generate_seed_rows(n): - return " UNION ALL ".join("SELECT 1 AS x" for _ in range(n)) - - def cross_join(p): - return ", ".join("SEED_ROWS AS _%s" % i for i in range(p)) - - return """ - WITH SEED_ROWS AS (%s) - MERGE INTO %s - USING ( - SELECT TOP %s * FROM (SELECT 1 as x FROM %s) FAKE_ROWS - ) FAKE_DATA - ON 1 = 0 - WHEN NOT MATCHED THEN - INSERT DEFAULT VALUES - """ % (generate_seed_rows(seed_rows_number), - table, - len(self.query.objs), - cross_join(cross_join_power)) - def as_sql(self): # We don't need quote_name_unless_alias() here, since these are all # going to be column names (so we can avoid the extra overhead). qn = self.connection.ops.quote_name opts = self.query.get_meta() result = ['INSERT INTO %s' % qn(opts.db_table)] + fields = self.query.fields or [opts.pk] if self.query.fields: - fields = self.query.fields result.append('(%s)' % ', '.join(qn(f.column) for f in fields)) values_format = 'VALUES (%s)' value_rows = [ @@ -506,31 +470,11 @@ def as_sql(self): placeholder_rows, param_rows = self.assemble_as_sql(fields, value_rows) - if self.get_returned_fields() and self.can_return_columns_from_insert(): - if self.can_return_rows_from_bulk_insert(): - if not(self.query.fields): - # There isn't really a single statement to bulk multiple DEFAULT VALUES insertions, - # so we have to use a workaround: - # https://dba.stackexchange.com/questions/254771/insert-multiple-rows-into-a-table-with-only-an-identity-column - result = [self.bulk_insert_default_values_sql(qn(opts.db_table))] - r_sql, self.returning_params = self.connection.ops.return_insert_columns(self.get_returned_fields()) - if r_sql: - result.append(r_sql) - sql = " ".join(result) + ";" - return [(sql, None)] - # Regular bulk insert - params = [] - r_sql, self.returning_params = self.connection.ops.return_insert_columns(self.get_returned_fields()) - if r_sql: - result.append(r_sql) - params += [self.returning_params] - params += param_rows - result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows)) - else: - result.insert(0, 'SET NOCOUNT ON') - result.append((values_format + ';') % ', '.join(placeholder_rows[0])) - params = [param_rows[0]] - result.append('SELECT CAST(SCOPE_IDENTITY() AS bigint)') + if self.get_returned_fields() and self.connection.features.can_return_id_from_insert: + result.insert(0, 'SET NOCOUNT ON') + result.append((values_format + ';') % ', '.join(placeholder_rows[0])) + params = [param_rows[0]] + result.append('SELECT CAST(SCOPE_IDENTITY() AS bigint)') sql = [(" ".join(result), tuple(chain.from_iterable(params)))] else: if can_bulk: diff --git a/mssql/features.py b/mssql/features.py index 8b6dff4b..12b99519 100644 --- a/mssql/features.py +++ b/mssql/features.py @@ -12,7 +12,6 @@ class DatabaseFeatures(BaseDatabaseFeatures): can_introspect_small_integer_field = True can_return_columns_from_insert = True can_return_id_from_insert = True - can_return_rows_from_bulk_insert = True can_rollback_ddl = True can_use_chunked_reads = False for_update_after_from = True diff --git a/mssql/functions.py b/mssql/functions.py index b0958d69..f8cb17dc 100644 --- a/mssql/functions.py +++ b/mssql/functions.py @@ -4,16 +4,18 @@ import json from django import VERSION -from django.core import validators + from django.db import NotSupportedError, connections, transaction -from django.db.models import BooleanField, CheckConstraint, Value -from django.db.models.expressions import Case, Exists, Expression, OrderBy, When, Window -from django.db.models.fields import BinaryField, Field +from django.db.models import BooleanField, Value from django.db.models.functions import Cast, NthValue -from django.db.models.functions.math import ATan2, Ln, Log, Mod, Round -from django.db.models.lookups import In, Lookup -from django.db.models.query import QuerySet +from django.db.models.functions.math import ATan2, Log, Ln, Mod, Round +from django.db.models.expressions import Case, Exists, OrderBy, When, Window, Expression +from django.db.models.lookups import Lookup, In +from django.db.models import lookups, CheckConstraint +from django.db.models.fields import BinaryField, Field from django.db.models.sql.query import Query +from django.db.models.query import QuerySet +from django.core import validators if VERSION >= (3, 1): from django.db.models.fields.json import ( @@ -65,11 +67,9 @@ def sqlserver_nth_value(self, compiler, connection, **extra_content): def sqlserver_round(self, compiler, connection, **extra_context): return self.as_sql(compiler, connection, template='%(function)s(%(expressions)s, 0)', **extra_context) - def sqlserver_random(self, compiler, connection, **extra_context): return self.as_sql(compiler, connection, function='RAND', **extra_context) - def sqlserver_window(self, compiler, connection, template=None): # MSSQL window functions require an OVER clause with ORDER BY if self.order_by is None: @@ -125,13 +125,6 @@ def sqlserver_orderby(self, compiler, connection): def split_parameter_list_as_sql(self, compiler, connection): - if connection.vendor == 'microsoft': - return mssql_split_parameter_list_as_sql(self, compiler, connection) - else: - return in_split_parameter_list_as_sql(self, compiler, connection) - - -def mssql_split_parameter_list_as_sql(self, compiler, connection): # Insert In clause parameters 1000 at a time into a temp table. lhs, _ = self.process_lhs(compiler, connection) _, rhs_params = self.batch_process_rhs(compiler, connection) @@ -150,7 +143,6 @@ def mssql_split_parameter_list_as_sql(self, compiler, connection): return in_clause, () - def unquote_json_rhs(rhs_params): for value in rhs_params: value = json.loads(value) @@ -158,13 +150,12 @@ def unquote_json_rhs(rhs_params): rhs_params = [param.replace('"', '') for param in rhs_params] return rhs_params - def json_KeyTransformExact_process_rhs(self, compiler, connection): - rhs, rhs_params = key_transform_exact_process_rhs(self, compiler, connection) - if connection.vendor == 'microsoft': - rhs_params = unquote_json_rhs(rhs_params) - return rhs, rhs_params + if isinstance(self.rhs, KeyTransform): + return super(lookups.Exact, self).process_rhs(compiler, connection) + rhs, rhs_params = super(KeyTransformExact, self).process_rhs(compiler, connection) + return rhs, unquote_json_rhs(rhs_params) def json_KeyTransformIn(self, compiler, connection): lhs, _ = super(KeyTransformIn, self).process_lhs(compiler, connection) @@ -172,7 +163,6 @@ def json_KeyTransformIn(self, compiler, connection): return (lhs + ' IN ' + rhs, unquote_json_rhs(rhs_params)) - def json_HasKeyLookup(self, compiler, connection): # Process JSON path from the left-hand side. if isinstance(self.lhs, KeyTransform): @@ -203,7 +193,6 @@ def json_HasKeyLookup(self, compiler, connection): return sql % tuple(rhs_params), [] - def BinaryField_init(self, *args, **kwargs): # Add max_length option for BinaryField, default to max kwargs.setdefault('editable', False) @@ -213,7 +202,6 @@ def BinaryField_init(self, *args, **kwargs): else: self.max_length = 'max' - def _get_check_sql(self, model, schema_editor): if VERSION >= (3, 1): query = Query(model=model, alias_cols=False) @@ -222,16 +210,13 @@ def _get_check_sql(self, model, schema_editor): where = query.build_where(self.check) compiler = query.get_compiler(connection=schema_editor.connection) sql, params = where.as_sql(compiler, schema_editor.connection) - if schema_editor.connection.vendor == 'microsoft': - try: - for p in params: - str(p).encode('ascii') - except UnicodeEncodeError: - sql = sql.replace('%s', 'N%s') + try: + for p in params: str(p).encode('ascii') + except UnicodeEncodeError: + sql = sql.replace('%s', 'N%s') return sql % tuple(schema_editor.quote_value(p) for p in params) - def bulk_update_with_default(self, objs, fields, batch_size=None, default=0): """ Update the given fields in each of the given objects in the database. @@ -270,10 +255,10 @@ def bulk_update_with_default(self, objs, fields, batch_size=None, default=0): attr = getattr(obj, field.attname) if not isinstance(attr, Expression): if attr is None: - value_none_counter += 1 + value_none_counter+=1 attr = Value(attr, output_field=field) when_statements.append(When(pk=obj.pk, then=attr)) - if connections[self.db].vendor == 'microsoft' and value_none_counter == len(when_statements): + if(value_none_counter == len(when_statements)): case_statement = Case(*when_statements, output_field=field, default=Value(default)) else: case_statement = Case(*when_statements, output_field=field) @@ -287,15 +272,10 @@ def bulk_update_with_default(self, objs, fields, batch_size=None, default=0): rows_updated += self.filter(pk__in=pks).update(**update_kwargs) return rows_updated - ATan2.as_microsoft = sqlserver_atan2 -# Need copy of old In.split_parameter_list_as_sql for other backends to call -in_split_parameter_list_as_sql = In.split_parameter_list_as_sql In.split_parameter_list_as_sql = split_parameter_list_as_sql if VERSION >= (3, 1): KeyTransformIn.as_microsoft = json_KeyTransformIn - # Need copy of old KeyTransformExact.process_rhs to call later - key_transform_exact_process_rhs = KeyTransformExact.process_rhs KeyTransformExact.process_rhs = json_KeyTransformExact_process_rhs HasKeyLookup.as_microsoft = json_HasKeyLookup Ln.as_microsoft = sqlserver_ln diff --git a/mssql/introspection.py b/mssql/introspection.py index b69efd58..66aefce3 100644 --- a/mssql/introspection.py +++ b/mssql/introspection.py @@ -253,14 +253,9 @@ def get_constraints(self, cursor, table_name): constraints[constraint] = { "columns": [], "primary_key": kind.lower() == "primary key", - # In the sys.indexes table, primary key indexes have is_unique_constraint as false, - # but is_unique as true. "unique": kind.lower() in ["primary key", "unique"], - "unique_constraint": kind.lower() == "unique", "foreign_key": (ref_table, ref_column) if kind.lower() == "foreign key" else None, "check": False, - # Potentially misleading: primary key and unique constraints still have indexes attached to them. - # Should probably be updated with the additional info from the sys.indexes table we fetch later on. "index": False, } # Record the details @@ -285,7 +280,6 @@ def get_constraints(self, cursor, table_name): "columns": [], "primary_key": False, "unique": False, - "unique_constraint": False, "foreign_key": None, "check": True, "index": False, @@ -297,7 +291,6 @@ def get_constraints(self, cursor, table_name): SELECT i.name AS index_name, i.is_unique, - i.is_unique_constraint, i.is_primary_key, i.type, i.type_desc, @@ -323,13 +316,12 @@ def get_constraints(self, cursor, table_name): ic.index_column_id ASC """, [table_name]) indexes = {} - for index, unique, unique_constraint, primary, type_, desc, order, column in cursor.fetchall(): + for index, unique, primary, type_, desc, order, column in cursor.fetchall(): if index not in indexes: indexes[index] = { "columns": [], "primary_key": primary, "unique": unique, - "unique_constraint": unique_constraint, "foreign_key": None, "check": False, "index": True, diff --git a/mssql/operations.py b/mssql/operations.py index 8d607fdd..429beefb 100644 --- a/mssql/operations.py +++ b/mssql/operations.py @@ -188,24 +188,6 @@ def datetime_trunc_sql(self, lookup_type, field_name, tzname): sql = "CONVERT(datetime2, CONVERT(varchar, %s, 20))" % field_name return sql - def fetch_returned_insert_rows(self, cursor): - """ - Given a cursor object that has just performed an INSERT...OUTPUT INSERTED - statement into a table, return the list of returned data. - """ - return cursor.fetchall() - - def return_insert_columns(self, fields): - if not fields: - return '', () - columns = [ - '%s.%s' % ( - 'INSERTED', - self.quote_name(field.column), - ) for field in fields - ] - return 'OUTPUT %s' % ', '.join(columns), () - def for_update_sql(self, nowait=False, skip_locked=False, of=()): if skip_locked: return 'WITH (ROWLOCK, UPDLOCK, READPAST)' @@ -368,7 +350,6 @@ def _sql_flush_new(self, style, tables, *, reset_sequences=False, allow_cascade= return [ sequence for sequence in self.connection.introspection.sequence_list() - if sequence['table'].lower() in [table.lower() for table in tables] ] return [] diff --git a/mssql/schema.py b/mssql/schema.py index 8556c15e..435369a7 100644 --- a/mssql/schema.py +++ b/mssql/schema.py @@ -171,12 +171,7 @@ def alter_unique_together(self, model, old_unique_together, new_unique_together) news = {tuple(fields) for fields in new_unique_together} # Deleted uniques for fields in olds.difference(news): - meta_constraint_names = {constraint.name for constraint in model._meta.constraints} - meta_index_names = {constraint.name for constraint in model._meta.indexes} - columns = [model._meta.get_field(field).column for field in fields] - self._delete_unique_constraint_for_columns( - model, columns, exclude=meta_constraint_names | meta_index_names, strict=True) - + self._delete_composed_index(model, fields, {'unique': True}, self.sql_delete_index) # Created uniques if django_version >= (4, 0): for field_names in news.difference(olds): @@ -230,14 +225,10 @@ def _model_indexes_sql(self, model): output.append(index.create_sql(model, self)) return output - def _db_table_constraint_names(self, db_table, column_names=None, column_match_any=False, - unique=None, primary_key=None, index=None, foreign_key=None, - check=None, type_=None, exclude=None, unique_constraint=None): - """ - Return all constraint names matching the columns and conditions. Modified from base `_constraint_names` - `any_column_matches`=False: (default) only return constraints covering exactly `column_names` - `any_column_matches`=True : return any constraints which include at least 1 of `column_names` - """ + def _db_table_constraint_names(self, db_table, column_names=None, unique=None, + primary_key=None, index=None, foreign_key=None, + check=None, type_=None, exclude=None): + """Return all constraint names matching the columns and conditions.""" if column_names is not None: column_names = [ self.connection.introspection.identifier_converter(name) @@ -247,13 +238,9 @@ def _db_table_constraint_names(self, db_table, column_names=None, column_match_a constraints = self.connection.introspection.get_constraints(cursor, db_table) result = [] for name, infodict in constraints.items(): - if column_names is None or column_names == infodict['columns'] or ( - column_match_any and any(col in infodict['columns'] for col in column_names) - ): + if column_names is None or column_names == infodict['columns']: if unique is not None and infodict['unique'] != unique: continue - if unique_constraint is not None and infodict['unique_constraint'] != unique_constraint: - continue if primary_key is not None and infodict['primary_key'] != primary_key: continue if index is not None and infodict['index'] != index: @@ -306,7 +293,16 @@ def _alter_field(self, model, old_field, new_field, old_type, new_type, self.execute(self._delete_constraint_sql(self.sql_delete_fk, model, fk_name)) # Has unique been removed? if old_field.unique and (not new_field.unique or self._field_became_primary_key(old_field, new_field)): - self._delete_unique_constraint_for_columns(model, [old_field.column], strict=strict) + # Find the unique constraint for this field + constraint_names = self._constraint_names(model, [old_field.column], unique=True, primary_key=False) + if strict and len(constraint_names) != 1: + raise ValueError("Found wrong number (%s) of unique constraints for %s.%s" % ( + len(constraint_names), + model._meta.db_table, + old_field.column, + )) + for constraint_name in constraint_names: + self.execute(self._delete_constraint_sql(self.sql_delete_unique, model, constraint_name)) # Drop incoming FK constraints if the field is a primary key or unique, # which might be a to_field target, and things are going to change. drop_foreign_keys = ( @@ -369,32 +365,29 @@ def _alter_field(self, model, old_field, new_field, old_type, new_type, # Have they renamed the column? if old_field.column != new_field.column: sql_restore_index = '' - # Drop any unique indexes which include the column to be renamed - index_names = self._db_table_constraint_names( - db_table=model._meta.db_table, column_names=[old_field.column], column_match_any=True, - index=True, unique=True, - ) + # Drop unique indexes for table to be altered + index_names = self._db_table_constraint_names(model._meta.db_table, index=True) for index_name in index_names: - # Before dropping figure out how to recreate it afterwards - with self.connection.cursor() as cursor: - cursor.execute(f""" - SELECT COL_NAME(ic.object_id,ic.column_id) AS column_name, - filter_definition - FROM sys.indexes AS i - INNER JOIN sys.index_columns AS ic - ON i.object_id = ic.object_id AND i.index_id = ic.index_id - WHERE i.object_id = OBJECT_ID('{model._meta.db_table}') - and i.name = '{index_name}' - """) - result = cursor.fetchall() - columns_to_recreate_index = ', '.join(['%s' % self.quote_name(column[0]) for column in result]) - filter_definition = result[0][1] - sql_restore_index += f'CREATE UNIQUE INDEX {index_name} ON {model._meta.db_table} ({columns_to_recreate_index}) WHERE {filter_definition};' - self.execute(self._db_table_delete_constraint_sql( - self.sql_delete_index, model._meta.db_table, index_name)) + if(index_name.endswith('uniq')): + with self.connection.cursor() as cursor: + cursor.execute(f""" + SELECT COL_NAME(ic.object_id,ic.column_id) AS column_name, + filter_definition + FROM sys.indexes AS i + INNER JOIN sys.index_columns AS ic + ON i.object_id = ic.object_id AND i.index_id = ic.index_id + WHERE i.object_id = OBJECT_ID('{model._meta.db_table}') + and i.name = '{index_name}' + """) + result = cursor.fetchall() + columns_to_recreate_index = ', '.join(['%s' % self.quote_name(column[0]) for column in result]) + filter_definition = result[0][1] + sql_restore_index += f'CREATE UNIQUE INDEX {index_name} ON {model._meta.db_table} ({columns_to_recreate_index}) WHERE {filter_definition};' + self.execute(self._db_table_delete_constraint_sql( + self.sql_delete_index, model._meta.db_table, index_name)) self.execute(self._rename_field_sql(model._meta.db_table, old_field, new_field, new_type)) - # Restore index(es) now the column has been renamed - if sql_restore_index: + # Restore indexes for altered table + if(sql_restore_index): self.execute(sql_restore_index.replace(f'[{old_field.column}]', f'[{new_field.column}]')) # Rename all references to the renamed column. for sql in self.deferred_sql: @@ -437,24 +430,21 @@ def _alter_field(self, model, old_field, new_field, old_type, new_type, fragment = self._alter_column_null_sql(model, old_field, new_field) if fragment: null_actions.append(fragment) - # Drop unique constraint, SQL Server requires explicit deletion - self._delete_unique_constraints(model, old_field, new_field, strict) - # Drop indexes, SQL Server requires explicit deletion - indexes_dropped = self._delete_indexes(model, old_field, new_field) - auto_index_names = [] - for index_from_meta in model._meta.indexes: - auto_index_names.append(self._create_index_name(model._meta.db_table, index_from_meta.fields)) - - if ( - new_field.get_internal_type() not in ("JSONField", "TextField") and - (old_field.db_index or not new_field.db_index) and - new_field.db_index or - ((indexes_dropped and sorted(indexes_dropped) == sorted([index.name for index in model._meta.indexes])) or - (indexes_dropped and sorted(indexes_dropped) == sorted(auto_index_names))) - ): - create_index_sql_statement = self._create_index_sql(model, [new_field]) - if create_index_sql_statement.__str__() not in [sql.__str__() for sql in self.deferred_sql]: - post_actions.append((create_index_sql_statement, ())) + if not new_field.null: + # Drop unique constraint, SQL Server requires explicit deletion + self._delete_unique_constraints(model, old_field, new_field, strict) + # Drop indexes, SQL Server requires explicit deletion + indexes_dropped = self._delete_indexes(model, old_field, new_field) + if ( + new_field.get_internal_type() not in ("JSONField", "TextField") and + (old_field.db_index or not new_field.db_index) and + new_field.db_index or + (indexes_dropped and sorted(indexes_dropped) == sorted( + [index.name for index in model._meta.indexes])) + ): + create_index_sql_statement = self._create_index_sql(model, [new_field]) + if create_index_sql_statement.__str__() not in [sql.__str__() for sql in self.deferred_sql]: + post_actions.append((create_index_sql_statement, ())) # Only if we have a default and there is a change from NULL to NOT NULL four_way_default_alteration = ( new_field.has_default() and @@ -532,7 +522,7 @@ def _alter_field(self, model, old_field, new_field, old_type, new_type, self.execute(self._create_index_sql(model, [new_field])) # Restore indexes & unique constraints deleted above, SQL Server requires explicit restoration - if (old_type != new_type or (old_field.null != new_field.null)) and ( + if (old_type != new_type or (old_field.null and not new_field.null)) and ( old_field.column == new_field.column ): # Restore unique constraints @@ -692,29 +682,21 @@ def _delete_unique_constraints(self, model, old_field, new_field, strict=False): unique_columns.append([old_field.column]) if unique_columns: for columns in unique_columns: - self._delete_unique_constraint_for_columns(model, columns, strict=strict) - - def _delete_unique_constraint_for_columns(self, model, columns, strict=False, **constraint_names_kwargs): - constraint_names_unique = self._db_table_constraint_names( - model._meta.db_table, columns, unique=True, unique_constraint=True, **constraint_names_kwargs) - constraint_names_primary = self._db_table_constraint_names( - model._meta.db_table, columns, unique=True, primary_key=True, **constraint_names_kwargs) - constraint_names_normal = constraint_names_unique + constraint_names_primary - constraint_names_index = self._db_table_constraint_names( - model._meta.db_table, columns, unique=True, unique_constraint=False, primary_key=False, - **constraint_names_kwargs) - constraint_names = constraint_names_normal + constraint_names_index - if strict and len(constraint_names) != 1: - raise ValueError("Found wrong number (%s) of unique constraints for columns %s" % ( - len(constraint_names), - repr(columns), - )) - for constraint_name in constraint_names_normal: - self.execute(self._delete_constraint_sql(self.sql_delete_unique, model, constraint_name)) - # Unique indexes which are not table constraints must be deleted using the appropriate SQL. - # These may exist for example to enforce ANSI-compliant unique constraints on nullable columns. - for index_name in constraint_names_index: - self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name)) + constraint_names_normal = self._constraint_names(model, columns, unique=True, index=False) + constraint_names_index = self._constraint_names(model, columns, unique=True, index=True) + constraint_names = constraint_names_normal + constraint_names_index + if strict and len(constraint_names) != 1: + raise ValueError("Found wrong number (%s) of unique constraints for %s.%s" % ( + len(constraint_names), + model._meta.db_table, + old_field.column, + )) + for constraint_name in constraint_names_normal: + self.execute(self._delete_constraint_sql(self.sql_delete_unique, model, constraint_name)) + # Unique indexes which are not table constraints must be deleted using the appropriate SQL. + # These may exist for example to enforce ANSI-compliant unique constraints on nullable columns. + for index_name in constraint_names_index: + self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name)) def _rename_field_sql(self, table, old_field, new_field, new_type): new_type = self._set_field_new_type_null_status(old_field, new_type) diff --git a/setup.py b/setup.py index 9a51638a..a3cdc416 100644 --- a/setup.py +++ b/setup.py @@ -15,6 +15,9 @@ 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', + 'Framework :: Django :: 2.2', + 'Framework :: Django :: 3.0', + 'Framework :: Django :: 3.1', 'Framework :: Django :: 3.2', 'Framework :: Django :: 4.0', ] @@ -25,7 +28,7 @@ setup( name='mssql-django', - version='1.1.3', + version='1.1.2', description='Django backend for Microsoft SQL Server', long_description=long_description, long_description_content_type='text/markdown', diff --git a/testapp/migrations/0002_test_unique_nullable_part1.py b/testapp/migrations/0002_test_unique_nullable_part1.py index 2bad11dc..911a5952 100644 --- a/testapp/migrations/0002_test_unique_nullable_part1.py +++ b/testapp/migrations/0002_test_unique_nullable_part1.py @@ -15,7 +15,6 @@ class Migration(migrations.Migration): fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('test_field', models.CharField(max_length=100, null=True, unique=True)), - ('y', models.IntegerField(unique=True, null=True)), ], ), ] diff --git a/testapp/migrations/0003_test_unique_nullable_part2.py b/testapp/migrations/0003_test_unique_nullable_part2.py index 76064f72..14280266 100644 --- a/testapp/migrations/0003_test_unique_nullable_part2.py +++ b/testapp/migrations/0003_test_unique_nullable_part2.py @@ -16,10 +16,4 @@ class Migration(migrations.Migration): field=models.CharField(default='', max_length=100, unique=True), preserve_default=False, ), - # Test for renaming of a unique+nullable column - migrations.RenameField( - model_name='testuniquenullablemodel', - old_name='y', - new_name='y_renamed', - ), ] diff --git a/testapp/migrations/0011_test_unique_constraints.py b/testapp/migrations/0011_test_unique_constraints.py index d36ab620..b2a26dad 100644 --- a/testapp/migrations/0011_test_unique_constraints.py +++ b/testapp/migrations/0011_test_unique_constraints.py @@ -25,9 +25,6 @@ class Migration(migrations.Migration): ('_type', models.CharField(max_length=50)), ('status', models.CharField(max_length=50)), ], - # Stop Django attempting to automatically create migrations for this table. Instead - # migrations are attempted manually in `test_unsupportable_unique_constraint` where - # they are expected to fail. options={ 'managed': False, }, diff --git a/testapp/migrations/0012_test_indexes_retained_part1.py b/testapp/migrations/0012_test_indexes_retained_part1.py index 3123e31f..288e3dff 100644 --- a/testapp/migrations/0012_test_indexes_retained_part1.py +++ b/testapp/migrations/0012_test_indexes_retained_part1.py @@ -7,7 +7,7 @@ class Migration(migrations.Migration): ('testapp', '0011_test_unique_constraints'), ] - # Prep test for issue https://github.com/microsoft/mssql-django/issues/14 + # Prep test for issue https://github.com/ESSolutions/django-mssql-backend/issues/58 operations = [ migrations.CreateModel( name='TestIndexesRetained', diff --git a/testapp/migrations/0013_test_indexes_retained_part2.py b/testapp/migrations/0013_test_indexes_retained_part2.py index e81efd9b..1fb53039 100644 --- a/testapp/migrations/0013_test_indexes_retained_part2.py +++ b/testapp/migrations/0013_test_indexes_retained_part2.py @@ -7,7 +7,7 @@ class Migration(migrations.Migration): ('testapp', '0012_test_indexes_retained_part1'), ] - # Run test for issue https://github.com/microsoft/mssql-django/issues/14 + # Run test for issue https://github.com/ESSolutions/django-mssql-backend/issues/58 # where the following operations should leave indexes intact operations = [ migrations.AlterField( diff --git a/testapp/migrations/0016_jsonmodel.py b/testapp/migrations/0016_jsonmodel.py deleted file mode 100644 index 9f8965f9..00000000 --- a/testapp/migrations/0016_jsonmodel.py +++ /dev/null @@ -1,28 +0,0 @@ -# Generated by Django 4.0.1 on 2022-02-01 15:58 - -from django import VERSION -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('testapp', '0015_test_rename_m2mfield_part2'), - ] - - # JSONField added in Django 3.1 - if VERSION >= (3, 1): - operations = [ - migrations.CreateModel( - name='JSONModel', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('value', models.JSONField()), - ], - options={ - 'required_db_features': {'supports_json_field'}, - }, - ), - ] - else: - pass diff --git a/testapp/migrations/0017_binarydata_testcheckconstraintwithunicode_and_more.py b/testapp/migrations/0017_binarydata_testcheckconstraintwithunicode_and_more.py deleted file mode 100644 index de5495ec..00000000 --- a/testapp/migrations/0017_binarydata_testcheckconstraintwithunicode_and_more.py +++ /dev/null @@ -1,39 +0,0 @@ -# Generated by Django 4.0.2 on 2022-02-23 19:06 - -from django import VERSION -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('testapp', '0016_jsonmodel'), - ] - - operations = [ - migrations.CreateModel( - name='BinaryData', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('binary', models.BinaryField(max_length='max', null=True)), - ], - ), - ] - - if VERSION >= (3, 2): - operations += [ - migrations.CreateModel( - name='TestCheckConstraintWithUnicode', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=100)), - ], - options={ - 'required_db_features': {'supports_table_check_constraints'}, - }, - ), - migrations.AddConstraint( - model_name='testcheckconstraintwithunicode', - constraint=models.CheckConstraint(check=models.Q(('name__startswith', '÷'), _negated=True), name='name_does_not_starts_with_÷'), - ), - ] diff --git a/testapp/migrations/0018_choice_question.py b/testapp/migrations/0018_choice_question.py deleted file mode 100644 index 0d216adc..00000000 --- a/testapp/migrations/0018_choice_question.py +++ /dev/null @@ -1,34 +0,0 @@ -# Generated by Django 3.2.12 on 2022-03-14 18:36 - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - dependencies = [ - ('testapp', '0017_binarydata_testcheckconstraintwithunicode_and_more'), - ] - - operations = [ - migrations.CreateModel( - name='Question', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('question_text', models.CharField(max_length=200)), - ('pub_date', models.DateTimeField(verbose_name='date published')), - ], - ), - migrations.CreateModel( - name='Choice', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('choice_text', models.CharField(max_length=200)), - ('votes', models.IntegerField(default=0)), - ('question', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='testapp.question')), - ], - options={ - 'unique_together': {('question', 'choice_text')}, - }, - ), - ] diff --git a/testapp/migrations/0019_customer_name_customer_address.py b/testapp/migrations/0019_customer_name_customer_address.py deleted file mode 100644 index 804e3d15..00000000 --- a/testapp/migrations/0019_customer_name_customer_address.py +++ /dev/null @@ -1,35 +0,0 @@ -# Generated by Django 4.0.3 on 2022-03-24 14:51 - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - dependencies = [ - ('testapp', '0018_choice_question'), - ] - - operations = [ - migrations.CreateModel( - name='Customer_name', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('Customer_name', models.CharField(max_length=100)), - ], - options={ - 'ordering': ['Customer_name'], - }, - ), - migrations.CreateModel( - name='Customer_address', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('Customer_address', models.CharField(max_length=100)), - ('Customer_name', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='testapp.customer_name')), - ], - options={ - 'ordering': ['Customer_address'], - }, - ), - ] diff --git a/testapp/models.py b/testapp/models.py index 382c3496..cec74519 100644 --- a/testapp/models.py +++ b/testapp/models.py @@ -1,10 +1,8 @@ # Copyright (c) Microsoft Corporation. # Licensed under the BSD license. -import datetime import uuid -from django import VERSION from django.db import models from django.db.models import Q from django.utils import timezone @@ -60,17 +58,13 @@ class TestUniqueNullableModel(models.Model): # Field used for testing changing the 'type' of a field that's both unique & nullable x = models.CharField(max_length=11, null=True, unique=True) - # A variant of Issue https://github.com/microsoft/mssql-django/issues/14 case (b) - # but for a unique index (not db_index) - y_renamed = models.IntegerField(null=True, unique=True) - class TestNullableUniqueTogetherModel(models.Model): class Meta: unique_together = (('a', 'b', 'c'),) # Issue https://github.com/ESSolutions/django-mssql-backend/issues/45 (case 2) - # Fields used for testing changing the type of a field that is in a `unique_together` + # Fields used for testing changing the 'type of a field that is in a `unique_together` a = models.CharField(max_length=51, null=True) b = models.CharField(max_length=50) c = models.CharField(max_length=50) @@ -85,7 +79,7 @@ class TestRemoveOneToOneFieldModel(models.Model): class TestIndexesRetainedRenamed(models.Model): - # Issue https://github.com/microsoft/mssql-django/issues/14 + # Issue https://github.com/ESSolutions/django-mssql-backend/issues/58 # In all these cases the column index should still exist afterwards # case (a) `a` starts out not nullable, but then is changed to be nullable a = models.IntegerField(db_index=True, null=True) @@ -153,63 +147,3 @@ class Meta: _type = models.CharField(max_length=50) status = models.CharField(max_length=50) - - -class BinaryData(models.Model): - binary = models.BinaryField(null=True) - - -if VERSION >= (3, 1): - class JSONModel(models.Model): - value = models.JSONField() - - class Meta: - required_db_features = {'supports_json_field'} - - -if VERSION >= (3, 2): - class TestCheckConstraintWithUnicode(models.Model): - name = models.CharField(max_length=100) - - class Meta: - required_db_features = { - 'supports_table_check_constraints', - } - constraints = [ - models.CheckConstraint( - check=~models.Q(name__startswith='\u00f7'), - name='name_does_not_starts_with_\u00f7', - ) - ] - - -class Question(models.Model): - question_text = models.CharField(max_length=200) - pub_date = models.DateTimeField('date published') - - def __str__(self): - return self.question_text - - def was_published_recently(self): - return self.pub_date >= timezone.now() - datetime.timedelta(days=1) - - -class Choice(models.Model): - question = models.ForeignKey(Question, on_delete=models.CASCADE, null=True) - choice_text = models.CharField(max_length=200) - votes = models.IntegerField(default=0) - - class Meta: - unique_together = (('question', 'choice_text')) - - -class Customer_name(models.Model): - Customer_name = models.CharField(max_length=100) - class Meta: - ordering = ['Customer_name'] - -class Customer_address(models.Model): - Customer_name = models.ForeignKey(Customer_name, on_delete=models.CASCADE) - Customer_address = models.CharField(max_length=100) - class Meta: - ordering = ['Customer_address'] diff --git a/testapp/settings.py b/testapp/settings.py index cc24fe60..35c13904 100644 --- a/testapp/settings.py +++ b/testapp/settings.py @@ -1,11 +1,5 @@ # Copyright (c) Microsoft Corporation. # Licensed under the BSD license. -import os -from pathlib import Path - -from django import VERSION - -BASE_DIR = Path(__file__).resolve().parent.parent DATABASES = { "default": { @@ -28,56 +22,6 @@ }, } -# Django 3.0 and below unit test doesn't handle more than 2 databases in DATABASES correctly -if VERSION >= (3, 1): - DATABASES['sqlite'] = { - "ENGINE": "django.db.backends.sqlite3", - "NAME": str(BASE_DIR / "db.sqlitetest"), - } - - -# Set to `True` locally if you want SQL queries logged to django_sql.log -DEBUG = False - -# Logging -LOG_DIR = os.path.join(os.path.dirname(__file__), '..', 'logs') -os.makedirs(LOG_DIR, exist_ok=True) -LOGGING = { - 'version': 1, - 'disable_existing_loggers': True, - 'formatters': { - 'myformatter': { - 'format': '%(asctime)s P%(process)05dT%(thread)05d [%(levelname)s] %(name)s: %(message)s', - }, - }, - 'handlers': { - 'db_output': { - 'level': 'DEBUG', - 'class': 'logging.handlers.RotatingFileHandler', - 'filename': os.path.join(LOG_DIR, 'django_sql.log'), - 'formatter': 'myformatter', - }, - 'default': { - 'level': 'DEBUG', - 'class': 'logging.handlers.RotatingFileHandler', - 'filename': os.path.join(LOG_DIR, 'default.log'), - 'formatter': 'myformatter', - } - }, - 'loggers': { - '': { - 'handlers': ['default'], - 'level': 'DEBUG', - 'propagate': False, - }, - 'django.db': { - 'handlers': ['db_output'], - 'level': 'DEBUG', - 'propagate': False, - }, - }, -} - INSTALLED_APPS = ( 'django.contrib.contenttypes', 'django.contrib.staticfiles', @@ -208,6 +152,7 @@ 'schema.tests.SchemaTests.test_alter_smallint_pk_to_smallautofield_pk', 'annotations.tests.NonAggregateAnnotationTestCase.test_combined_expression_annotation_with_aggregation', + 'bulk_create.tests.BulkCreateTests.test_bulk_insert_nullable_fields', 'db_functions.comparison.test_cast.CastTests.test_cast_to_integer', 'db_functions.datetime.test_extract_trunc.DateFunctionTests.test_extract_func', 'db_functions.datetime.test_extract_trunc.DateFunctionTests.test_extract_iso_weekday_func', @@ -279,7 +224,6 @@ 'backends.tests.BackendTestCase.test_queries_logger', 'migrations.test_operations.OperationTests.test_alter_field_pk_mti_fk', 'migrations.test_operations.OperationTests.test_run_sql_add_missing_semicolon_on_collect_sql', - 'migrations.test_operations.OperationTests.test_alter_field_pk_mti_and_fk_to_base' ] REGEX_TESTS = [ diff --git a/testapp/tests/test_constraints.py b/testapp/tests/test_constraints.py index 1dda8a46..dca4e456 100644 --- a/testapp/tests/test_constraints.py +++ b/testapp/tests/test_constraints.py @@ -20,13 +20,13 @@ @skipUnlessDBFeature('supports_nullable_unique_constraints') class TestNullableUniqueColumn(TestCase): - def test_type_change(self): + def test_multiple_nulls(self): # Issue https://github.com/ESSolutions/django-mssql-backend/issues/45 (case 1) # After field `x` has had its type changed, the filtered UNIQUE INDEX which is # implementing the nullable unique constraint should still be correctly in place # i.e. allowing multiple NULLs but still enforcing uniqueness of non-NULLs - # Allowed (NULL != NULL) + # Allowed TestUniqueNullableModel.objects.create(x=None, test_field='randomness') TestUniqueNullableModel.objects.create(x=None, test_field='doesntmatter') @@ -35,22 +35,6 @@ def test_type_change(self): with self.assertRaises(IntegrityError): TestUniqueNullableModel.objects.create(x="foo", test_field='nonsense') - def test_rename(self): - # Rename of a column which is both nullable & unique. Test that - # the constraint-enforcing unique index survived this migration - # Related to both: - # Issue https://github.com/microsoft/mssql-django/issues/67 - # Issue https://github.com/microsoft/mssql-django/issues/14 - - # Allowed (NULL != NULL) - TestUniqueNullableModel.objects.create(y_renamed=None, test_field='something') - TestUniqueNullableModel.objects.create(y_renamed=None, test_field='anything') - - # Disallowed - TestUniqueNullableModel.objects.create(y_renamed=42, test_field='nonimportant') - with self.assertRaises(IntegrityError): - TestUniqueNullableModel.objects.create(y_renamed=42, test_field='whocares') - @skipUnlessDBFeature('supports_partially_nullable_unique_constraints') class TestPartiallyNullableUniqueTogether(TestCase): @@ -92,7 +76,6 @@ def test_uniqueness_still_enforced_afterwards(self): other2 = M2MOtherModel.objects.create(name='2') thing1.others_renamed.set([other1, other2]) # Check that the unique_together on the through table is still enforced - # (created by create_many_to_many_intermediary_model) ThroughModel = TestRenameManyToManyFieldModel.others_renamed.through with self.assertRaises(IntegrityError, msg='Through model fails to enforce uniqueness after m2m rename'): # This should fail due to the unique_together because (thing1, other1) is already in the through table diff --git a/testapp/tests/test_fields.py b/testapp/tests/test_fields.py index 7b85d5ea..9b8f25f2 100644 --- a/testapp/tests/test_fields.py +++ b/testapp/tests/test_fields.py @@ -3,20 +3,9 @@ from django.test import TestCase -from ..models import UUIDModel, Customer_name, Customer_address +from ..models import UUIDModel class TestUUIDField(TestCase): def test_create(self): UUIDModel.objects.create() - - -class TestOrderBy(TestCase): - def test_order_by(self): - # Issue 109 - # Sample: https://github.com/jwaschkau/django-mssql-issue109 - john = Customer_name.objects.create(Customer_name='John') - Customer_address.objects.create(Customer_address='123 Main St', Customer_name=john) - names = Customer_name.objects.select_for_update().all() - addresses = Customer_address.objects.filter(Customer_address='123 Main St', Customer_name__in=names) - self.assertEqual(len(addresses), 1) diff --git a/testapp/tests/test_indexes.py b/testapp/tests/test_indexes.py index 9237e73c..1d7a8f83 100644 --- a/testapp/tests/test_indexes.py +++ b/testapp/tests/test_indexes.py @@ -1,42 +1,14 @@ -import logging - import django.db -from django import VERSION -from django.apps import apps -from django.db import models -from django.db.models import UniqueConstraint -from django.db.utils import DEFAULT_DB_ALIAS, ConnectionHandler, ProgrammingError from django.test import TestCase from ..models import ( - TestIndexesRetainedRenamed, - Choice, - Question, + TestIndexesRetainedRenamed ) -connections = ConnectionHandler() - -if (VERSION >= (3, 2)): - from django.utils.connection import ConnectionProxy - connection = ConnectionProxy(connections, DEFAULT_DB_ALIAS) -else: - from django.db import DefaultConnectionProxy - connection = DefaultConnectionProxy() - -logger = logging.getLogger('mssql.tests') - - -def _get_constraints(table_name): - connection = django.db.connections[django.db.DEFAULT_DB_ALIAS] - return connection.introspection.get_constraints( - connection.cursor(), - table_name=table_name, - ) - class TestIndexesRetained(TestCase): """ - Issue https://github.com/microsoft/mssql-django/issues/14 + Issue https://github.com/ESSolutions/django-mssql-backend/issues/58 Indexes dropped during a migration should be re-created afterwards assuming the field still has `db_index=True` """ @@ -46,7 +18,11 @@ def setUpClass(cls): super().setUpClass() # Pre-fetch which indexes exist for the relevant test model # now that all the test migrations have run - cls.constraints = _get_constraints(table_name=TestIndexesRetainedRenamed._meta.db_table) + connection = django.db.connections[django.db.DEFAULT_DB_ALIAS] + cls.constraints = connection.introspection.get_constraints( + connection.cursor(), + table_name=TestIndexesRetainedRenamed._meta.db_table + ) cls.indexes = {k: v for k, v in cls.constraints.items() if v['index'] is True} def _assert_index_exists(self, columns): @@ -60,125 +36,13 @@ def _assert_index_exists(self, columns): ) def test_field_made_nullable(self): - # case (a) of https://github.com/microsoft/mssql-django/issues/14 + # case (a) of https://github.com/ESSolutions/django-mssql-backend/issues/58 self._assert_index_exists({'a'}) def test_field_renamed(self): - # case (b) of https://github.com/microsoft/mssql-django/issues/14 + # case (b) of https://github.com/ESSolutions/django-mssql-backend/issues/58 self._assert_index_exists({'b_renamed'}) def test_table_renamed(self): - # case (c) of https://github.com/microsoft/mssql-django/issues/14 + # case (c) of https://github.com/ESSolutions/django-mssql-backend/issues/58 self._assert_index_exists({'c'}) - -def _get_all_models(): - for app in apps.get_app_configs(): - app_label = app.label - for model_name, model_class in app.models.items(): - yield model_class, model_name, app_label - - -class TestCorrectIndexes(TestCase): - - def test_correct_indexes_exist(self): - """ - Check there are the correct number of indexes for each field after all migrations - by comparing what the model says (e.g. `db_index=True` / `index_together` etc.) - with the actual constraints found in the database. - This acts as a general regression test for issues such as: - - duplicate index created (e.g. https://github.com/microsoft/mssql-django/issues/77) - - index dropped but accidentally not recreated - - index incorrectly 'recreated' when it was never actually dropped or required at all - Note of course that it only covers cases which exist in testapp/models.py and associated migrations - """ - connection = django.db.connections[django.db.DEFAULT_DB_ALIAS] - for model_cls, model_name, app_label in _get_all_models(): - logger.debug('Checking model: %s.%s', app_label, model_name) - if not model_cls._meta.managed: - # Models where the table is not managed by Django migrations are irrelevant - continue - model_constraints = _get_constraints(table_name=model_cls._meta.db_table) - # Check correct indexes are in place for all fields in model - for field in model_cls._meta.get_fields(): - if not hasattr(field, 'column'): - # ignore things like reverse fields which don't have a column on this table - continue - col_name = connection.introspection.identifier_converter(field.column) - field_str = f'{app_label}.{model_name}.{field.name} ({col_name})' - logger.debug(' > Checking field: %s', field_str) - - # Find constraints which include this column - col_constraints = [ - dict(name=name, **infodict) for name, infodict in model_constraints.items() - if col_name in infodict['columns'] - ] - col_indexes = [c for c in col_constraints if c['index']] - for c in col_constraints: - logger.debug(' > Column <%s> is involved in constraint: %s', col_name, c) - - # There should be an explicit index for each of the following cases - expected_index_causes = [] - if field.db_index: - expected_index_causes.append('db_index=True') - for field_names in model_cls._meta.index_together: - if field.name in field_names: - expected_index_causes.append(f'index_together[{field_names}]') - if field._unique and field.null: - # This is implemented using a (filtered) unique index (not a constraint) to get ANSI NULL behaviour - expected_index_causes.append('unique=True & null=True') - for field_names in model_cls._meta.unique_together: - if field.name in field_names: - # unique_together results in an index because this backend implements it using a - # (filtered) unique index rather than a constraint, to get ANSI NULL behaviour - expected_index_causes.append(f'unique_together[{field_names}]') - for uniq_constraint in filter(lambda c: isinstance(c, UniqueConstraint), model_cls._meta.constraints): - if field.name in uniq_constraint.fields and uniq_constraint.condition is not None: - # Meta:constraints > UniqueConstraint with condition are implemented with filtered unique index - expected_index_causes.append(f'UniqueConstraint (with condition) in Meta: constraints') - - # Other cases like `unique=True, null=False` or `field.primary_key` do have index-like constraints - # but in those cases the introspection returns `"index": False` so they are not in the list of - # explicit indexes which we are checking here (`col_indexes`) - - assert len(col_indexes) == len(expected_index_causes), \ - 'Expected %s index(es) on %s but found %s.\n' \ - 'Check for behaviour changes around index drop/recreate in methods like _alter_field.\n' \ - 'Expected due to: %s\n' \ - 'Found: %s' % ( - len(expected_index_causes), - field_str, - len(col_indexes), - expected_index_causes, - '\n'.join(str(i) for i in col_indexes), - ) - logger.debug(' Found %s index(es) as expected', len(col_indexes)) - - -class TestIndexesBeingDropped(TestCase): - - def test_unique_index_dropped(self): - """ - Issues https://github.com/microsoft/mssql-django/issues/110 - and https://github.com/microsoft/mssql-django/issues/90 - Unique indexes not being dropped when changing non-nullable - foreign key with unique_together to nullable causing - dependent on column error - """ - old_field = Choice._meta.get_field('question') - new_field = models.ForeignKey( - Question, null=False, on_delete=models.deletion.CASCADE - ) - new_field.set_attributes_from_name("question") - with connection.schema_editor() as editor: - editor.alter_field(Choice, old_field, new_field, strict=True) - - old_field = new_field - new_field = models.ForeignKey( - Question, null=True, on_delete=models.deletion.CASCADE - ) - new_field.set_attributes_from_name("question") - try: - with connection.schema_editor() as editor: - editor.alter_field(Choice, old_field, new_field, strict=True) - except ProgrammingError: - self.fail("Unique indexes not being dropped") diff --git a/testapp/tests/test_jsonfield.py b/testapp/tests/test_jsonfield.py deleted file mode 100644 index 08dda57e..00000000 --- a/testapp/tests/test_jsonfield.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright (c) Microsoft Corporation. -# Licensed under the BSD license. - -from unittest import skipUnless - -from django import VERSION -from django.test import TestCase - -if VERSION >= (3, 1): - from ..models import JSONModel - - -def _check_jsonfield_supported_sqlite(): - # Info about JSONField support in SQLite: https://code.djangoproject.com/wiki/JSON1Extension - import sqlite3 - - supports_jsonfield = True - try: - conn = sqlite3.connect(':memory:') - cursor = conn.cursor() - cursor.execute('SELECT JSON(\'{"a": "b"}\')') - except sqlite3.OperationalError: - supports_jsonfield = False - finally: - return supports_jsonfield - - -class TestJSONField(TestCase): - databases = ['default'] - # Django 3.0 and below unit test doesn't handle more than 2 databases in DATABASES correctly - if VERSION >= (3, 1): - databases.append('sqlite') - - json = { - 'a': 'b', - 'b': 1, - 'c': '1', - 'd': [], - 'e': [1, 2], - 'f': ['a', 'b'], - 'g': [1, 'a'], - 'h': {}, - 'i': {'j': 1}, - 'j': False, - 'k': True, - 'l': { - 'foo': 'bar', - 'baz': {'a': 'b', 'c': 'd'}, - 'bar': ['foo', 'bar'], - 'bax': {'foo': 'bar'}, - }, - } - - @skipUnless(VERSION >= (3, 1), "JSONField not support in Django versions < 3.1") - @skipUnless( - _check_jsonfield_supported_sqlite(), - "JSONField not support by SQLite on this platform and Python version", - ) - def test_keytransformexact_not_overriding(self): - # Issue https://github.com/microsoft/mssql-django/issues/82 - json_obj = JSONModel(value=self.json) - json_obj.save() - self.assertSequenceEqual( - JSONModel.objects.filter(value__a='b'), - [json_obj], - ) - json_obj.save(using='sqlite') - self.assertSequenceEqual( - JSONModel.objects.using('sqlite').filter(value__a='b'), - [json_obj], - ) diff --git a/testapp/tests/test_multiple_databases.py b/testapp/tests/test_multiple_databases.py deleted file mode 100644 index e1eb885e..00000000 --- a/testapp/tests/test_multiple_databases.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright (c) Microsoft Corporation. -# Licensed under the BSD license. - -from unittest import skipUnless - -from django import VERSION -from django.core.exceptions import ValidationError -from django.db import OperationalError -from django.db.backends.sqlite3.operations import DatabaseOperations -from django.test import TestCase, skipUnlessDBFeature - -from ..models import BinaryData, Pizza, Topping - -if VERSION >= (3, 2): - from ..models import TestCheckConstraintWithUnicode - - -@skipUnless( - VERSION >= (3, 1), - "Django 3.0 and below doesn't support different databases in unit tests", -) -class TestMultpleDatabases(TestCase): - databases = ['default', 'sqlite'] - - def test_in_split_parameter_list_as_sql(self): - # Issue: https://github.com/microsoft/mssql-django/issues/92 - - # Mimic databases that have a limit on parameters (e.g. Oracle DB) - old_max_in_list_size = DatabaseOperations.max_in_list_size - DatabaseOperations.max_in_list_size = lambda self: 100 - - mssql_iterations = 3000 - Pizza.objects.bulk_create([Pizza() for _ in range(mssql_iterations)]) - Topping.objects.bulk_create([Topping() for _ in range(mssql_iterations)]) - prefetch_result = Pizza.objects.prefetch_related('toppings') - self.assertEqual(len(prefetch_result), mssql_iterations) - - # Different iterations since SQLite has max host parameters of 999 for versions prior to 3.32.0 - # Info about limit: https://www.sqlite.org/limits.html - sqlite_iterations = 999 - Pizza.objects.using('sqlite').bulk_create([Pizza() for _ in range(sqlite_iterations)]) - Topping.objects.using('sqlite').bulk_create([Topping() for _ in range(sqlite_iterations)]) - prefetch_result_sqlite = Pizza.objects.using('sqlite').prefetch_related('toppings') - self.assertEqual(len(prefetch_result_sqlite), sqlite_iterations) - - DatabaseOperations.max_in_list_size = old_max_in_list_size - - def test_binaryfield_init(self): - binary_data = b'\x00\x46\xFE' - binary = BinaryData(binary=binary_data) - binary.save() - binary.save(using='sqlite') - - try: - binary.full_clean() - except ValidationError: - self.fail() - - b1 = BinaryData.objects.filter(binary=binary_data) - self.assertSequenceEqual( - b1, - [binary], - ) - b2 = BinaryData.objects.using('sqlite').filter(binary=binary_data) - self.assertSequenceEqual( - b2, - [binary], - ) - - @skipUnlessDBFeature('supports_table_check_constraints') - @skipUnless( - VERSION >= (3, 2), - "Django 3.1 and below has errors from running migrations for this test", - ) - def test_checkconstraint_get_check_sql(self): - TestCheckConstraintWithUnicode.objects.create(name='abc') - try: - TestCheckConstraintWithUnicode.objects.using('sqlite').create(name='abc') - except OperationalError: - self.fail() - - def test_queryset_bulk_update(self): - objs = [ - BinaryData.objects.create(binary=b'\x00') for _ in range(5) - ] - for obj in objs: - obj.binary = None - BinaryData.objects.bulk_update(objs, ["binary"]) - self.assertCountEqual(BinaryData.objects.filter(binary__isnull=True), objs) - - objs = [ - BinaryData.objects.using('sqlite').create(binary=b'\x00') for _ in range(5) - ] - for obj in objs: - obj.binary = None - BinaryData.objects.using('sqlite').bulk_update(objs, ["binary"]) - self.assertCountEqual(BinaryData.objects.using('sqlite').filter(binary__isnull=True), objs)