diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..c05d96df --- /dev/null +++ b/.editorconfig @@ -0,0 +1,15 @@ +# https://editorconfig.org/ + +root = true + +[*] +indent_style = space +indent_size = 4 +insert_final_newline = true +trim_trailing_whitespace = true +end_of_line = lf +charset = utf-8 +max_line_length = 119 + +[*.{yml,yaml}] +indent_size = 2 diff --git a/.gitignore b/.gitignore index 07c45280..0c91415f 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,7 @@ Thumbs.db *.egg-info tests/local_settings.py + +# Virtual Env +/venv/ +.idea/ diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 00000000..2fbcc799 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,151 @@ +sudo: required +language: python +cache: pip + +branches: + only: + - azure-2.1 + +templates: + mssql: &mssql DB_PACKAGES="" DATABASE_URL="mssql://SA:MyPassword42@localhost:1433/default?isolation_level=read committed&driver=ODBC Driver 17 for SQL Server" DATABASE_URL_OTHER="mssql://SA:MyPassword42@localhost:1433/other?isolation_level=read committed&driver=ODBC Driver 17 for SQL Server" + +matrix: + include: + - env: FLAKE8 + python: "3.6" + install: pip install flake8==3.7.1 + script: flake8 + + - python: "3.6" + dist: trusty + services: docker + before_install: + - docker pull mcr.microsoft.com/mssql/server:2017-latest-ubuntu + - docker run -e 'ACCEPT_EULA=Y' -e 'SA_PASSWORD=MyPassword42' -p 1433:1433 -d mcr.microsoft.com/mssql/server:2017-latest-ubuntu + - curl https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add - + - curl https://packages.microsoft.com/config/ubuntu/14.04/prod.list | sudo tee /etc/apt/sources.list.d/mssql-release.list + - sudo apt-get update + - sudo ACCEPT_EULA=Y apt-get install msodbcsql17 + env: + - *mssql + + - os: windows + language: sh + python: "3.6" + services: docker + before_install: + - docker pull christianacca/mssql-server-windows-express:1803 + - docker run -e 'ACCEPT_EULA=Y' -e 'SA_PASSWORD=MyPassword42' -p 1433:1433 -d christianacca/mssql-server-windows-express:1803 + - wget https://download.microsoft.com/download/E/6/B/E6BFDC7A-5BCD-4C51-9912-635646DA801E/en-US/msodbcsql_17.3.1.1_x64.msi + - powershell "Start-Process msiexec.exe -Wait -ArgumentList '/I msodbcsql_17.3.1.1_x64.msi /qn /norestart IACCEPTMSODBCSQLLICENSETERMS=YES'" + - choco install python3 --version 3.6.6 + - export PATH="/c/Python36:/c/Python36/Scripts:$PATH" + env: + - *mssql + +install: + - python -m pip install --upgrade pip wheel setuptools + - pip install -e .["tests$DB_PACKAGES"] + - git clone --branch=stable/2.1.x https://github.com/django/django.git "$TRAVIS_BUILD_DIR/../django" --depth=1 + - export PYTHONPATH=$PYTHONPATH:$TRAVIS_BUILD_DIR + +script: + - cd "$TRAVIS_BUILD_DIR/../django/tests" + - pip install -r requirements/py3.txt + - ./runtests.py --settings=testapp.settings \ + aggregation + aggregation_regress + annotations + backends basic + bulk_create constraints + custom_columns + custom_lookups + custom_managers + custom_methods + custom_migration_operations + custom_pk + datatypes + dates + datetimes + db_functions + db_typecasts + db_utils + dbshell + defer + defer_regress + delete + delete_regress + distinct_on_fields + empty + empty_models + expressions + expressions_case + expressions_window + extra_regress + field_deconstruction + field_defaults + field_subclassing + filtered_relation + fixtures + fixtures_model_package + fixtures_regress + force_insert_update + foreign_object + from_db_value + generic_relations + generic_relations_regress + get_earliest_or_latest + get_object_or_404 + get_or_create + indexes + inspectdb + introspection + invalid_model_tests + known_related_objects + lookup + m2m_and_m2o + m2m_intermediary + m2m_multiple + m2m_recursive + m2m_regress + m2m_signals + m2m_through + m2m_through_regress + m2o_recursive + managers_regress + many_to_many + many_to_one + many_to_one_null + max_lengths + migrate_signals + migration_test_data_persistance + migrations + migrations2 + model_fields + model_indexes + model_options + mutually_referential + nested_foreign_keys + null_fk + null_fk_ordering + null_queries + one_to_one + or_lookups + order_with_respect_to + ordering + pagination + prefetch_related + queries + queryset_pickle + raw_query + reverse_lookup + save_delete_hooks + schema + select_for_update + select_related + select_related_onetoone + select_related_regress + transaction_hooks + transactions + update + update_only_fields diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 00000000..1064d458 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,5 @@ +[flake8] +exclude = .git,__pycache__, +# W504 is mutually exclusive with W503 +ignore = W504 +max-line-length = 119 diff --git a/setup.py b/setup.py index b2cff581..bfd75fb5 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ except ImportError: from distutils.core import setup -CLASSIFIERS=[ +CLASSIFIERS = [ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: BSD License', 'Framework :: Django', @@ -26,9 +26,12 @@ license='BSD', packages=['sql_server', 'sql_server.pyodbc'], install_requires=[ - 'Django>=2.2.0,<2.3', + 'Django>=2.1.0,<2.2', 'pyodbc>=3.0', ], + extras_require={ + 'tests': ['dj-database-url==0.5.0'], + }, classifiers=CLASSIFIERS, keywords='azure django', ) diff --git a/sql_server/pyodbc/base.py b/sql_server/pyodbc/base.py index bc0f8eec..7263cf6d 100644 --- a/sql_server/pyodbc/base.py +++ b/sql_server/pyodbc/base.py @@ -8,7 +8,7 @@ from django.core.exceptions import ImproperlyConfigured from django import VERSION -if VERSION[:3] < (2,2,0) or VERSION[:2] >= (2,3): +if VERSION[:3] < (2, 1, 0) or VERSION[:2] >= (2, 2): raise ImproperlyConfigured("Django %d.%d.%d is not supported." % VERSION[:3]) try: @@ -16,30 +16,28 @@ except ImportError as e: raise ImproperlyConfigured("Error loading pyodbc module: %s" % e) -from django.utils.version import get_version_tuple +from django.utils.version import get_version_tuple # noqa pyodbc_ver = get_version_tuple(Database.version) -if pyodbc_ver < (3,0): +if pyodbc_ver < (3, 0): raise ImproperlyConfigured("pyodbc 3.0 or newer is required; you have %s" % Database.version) -from django.conf import settings -from django.db import NotSupportedError -from django.db.backends.base.base import BaseDatabaseWrapper -from django.db.backends.base.validation import BaseDatabaseValidation -from django.utils.encoding import smart_str -from django.utils.functional import cached_property -from django.utils.timezone import utc +from django.conf import settings # noqa +from django.db import NotSupportedError # noqa +from django.db.backends.base.base import BaseDatabaseWrapper # noqa +from django.utils.encoding import smart_str # noqa +from django.utils.functional import cached_property # noqa if hasattr(settings, 'DATABASE_CONNECTION_POOLING'): if not settings.DATABASE_CONNECTION_POOLING: Database.pooling = False -from .client import DatabaseClient -from .creation import DatabaseCreation -from .features import DatabaseFeatures -from .introspection import DatabaseIntrospection -from .operations import DatabaseOperations -from .schema import DatabaseSchemaEditor +from .client import DatabaseClient # noqa +from .creation import DatabaseCreation # noqa +from .features import DatabaseFeatures # noqa +from .introspection import DatabaseIntrospection # noqa +from .operations import DatabaseOperations # noqa +from .schema import DatabaseSchemaEditor # noqa EDITION_AZURE_SQL_DB = 5 @@ -57,6 +55,7 @@ def encode_connection_string(fields): for k, v in fields.items() ) + def encode_value(v): """If the value contains a semicolon, or starts with a left curly brace, then enclose it in curly braces and escape all right curly braces. @@ -65,6 +64,7 @@ def encode_value(v): return '{%s}' % (v.replace('}', '}}'),) return v + class DatabaseWrapper(BaseDatabaseWrapper): vendor = 'microsoft' display_name = 'SQL Server' @@ -73,31 +73,31 @@ class DatabaseWrapper(BaseDatabaseWrapper): # be interpolated against the values of Field.__dict__ before being output. # If a column type is set to None, it won't be included in the output. data_types = { - 'AutoField': 'int IDENTITY (1, 1)', - 'BigAutoField': 'bigint IDENTITY (1, 1)', - 'BigIntegerField': 'bigint', - 'BinaryField': 'varbinary(max)', - 'BooleanField': 'bit', - 'CharField': 'nvarchar(%(max_length)s)', - 'DateField': 'date', - 'DateTimeField': 'datetime2', - 'DecimalField': 'numeric(%(max_digits)s, %(decimal_places)s)', - 'DurationField': 'bigint', - 'FileField': 'nvarchar(%(max_length)s)', - 'FilePathField': 'nvarchar(%(max_length)s)', - 'FloatField': 'double precision', - 'IntegerField': 'int', - 'IPAddressField': 'nvarchar(15)', + 'AutoField': 'int IDENTITY (1, 1)', + 'BigAutoField': 'bigint IDENTITY (1, 1)', + 'BigIntegerField': 'bigint', + 'BinaryField': 'varbinary(max)', + 'BooleanField': 'bit', + 'CharField': 'nvarchar(%(max_length)s)', + 'DateField': 'date', + 'DateTimeField': 'datetime2', + 'DecimalField': 'numeric(%(max_digits)s, %(decimal_places)s)', + 'DurationField': 'bigint', + 'FileField': 'nvarchar(%(max_length)s)', + 'FilePathField': 'nvarchar(%(max_length)s)', + 'FloatField': 'double precision', + 'IntegerField': 'int', + 'IPAddressField': 'nvarchar(15)', 'GenericIPAddressField': 'nvarchar(39)', - 'NullBooleanField': 'bit', - 'OneToOneField': 'int', + 'NullBooleanField': 'bit', + 'OneToOneField': 'int', 'PositiveIntegerField': 'int', 'PositiveSmallIntegerField': 'smallint', - 'SlugField': 'nvarchar(%(max_length)s)', + 'SlugField': 'nvarchar(%(max_length)s)', 'SmallIntegerField': 'smallint', - 'TextField': 'nvarchar(max)', - 'TimeField': 'time', - 'UUIDField': 'char(32)', + 'TextField': 'nvarchar(max)', + 'TimeField': 'time', + 'UUIDField': 'char(32)', } data_type_check_constraints = { 'PositiveIntegerField': '[%(column)s] >= 0', @@ -324,7 +324,7 @@ def init_connection_state(self): if ver < (0, 95): raise ImproperlyConfigured( "FreeTDS 0.95 or newer is required.") - except: + except Exception: # unknown driver version pass @@ -380,7 +380,7 @@ def sql_server_version(self, _known_versions={}): cursor.execute("SELECT CAST(SERVERPROPERTY('ProductVersion') AS varchar)") ver = cursor.fetchone()[0] ver = int(ver.split('.')[0]) - if not ver in self._sql_server_versions: + if ver not in self._sql_server_versions: raise NotSupportedError('SQL Server v%d is not supported.' % ver) _known_versions[self.alias] = self._sql_server_versions[ver] return _known_versions[self.alias] @@ -419,7 +419,7 @@ def _on_error(self, e): self.close() # wait a moment for recovery from network error time.sleep(self.connection_recovery_interval_msec) - except: + except Exception: pass self.connection = None @@ -458,14 +458,14 @@ def check_constraints(self, table_names=None): def disable_constraint_checking(self): # Azure SQL Database doesn't support sp_msforeachtable - #cursor.execute('EXEC sp_msforeachtable "ALTER TABLE ? NOCHECK CONSTRAINT ALL"') + # cursor.execute('EXEC sp_msforeachtable "ALTER TABLE ? NOCHECK CONSTRAINT ALL"') if not self.needs_rollback: self._execute_foreach('ALTER TABLE %s NOCHECK CONSTRAINT ALL') return not self.needs_rollback def enable_constraint_checking(self): # Azure SQL Database doesn't support sp_msforeachtable - #cursor.execute('EXEC sp_msforeachtable "ALTER TABLE ? WITH CHECK CHECK CONSTRAINT ALL"') + # cursor.execute('EXEC sp_msforeachtable "ALTER TABLE ? WITH CHECK CHECK CONSTRAINT ALL"') if not self.needs_rollback: self.check_constraints() @@ -475,6 +475,7 @@ class CursorWrapper(object): A wrapper around the pyodbc's cursor that takes in account a) some pyodbc DB-API 2.0 implementation and b) some common ODBC driver particularities. """ + def __init__(self, cursor, connection): self.active = True self.cursor = cursor diff --git a/sql_server/pyodbc/client.py b/sql_server/pyodbc/client.py index 5d771573..1247f406 100644 --- a/sql_server/pyodbc/client.py +++ b/sql_server/pyodbc/client.py @@ -3,6 +3,7 @@ from django.db.backends.base.client import BaseDatabaseClient + class DatabaseClient(BaseDatabaseClient): executable_name = 'sqlcmd' @@ -33,7 +34,7 @@ def runshell(self): if password: args += ["-P", password] else: - args += ["-E"] # Try trusted connection instead + args += ["-E"] # Try trusted connection instead if db: args += ["-d", db] if defaults_file: diff --git a/sql_server/pyodbc/compiler.py b/sql_server/pyodbc/compiler.py index 92d53676..0ea9da20 100644 --- a/sql_server/pyodbc/compiler.py +++ b/sql_server/pyodbc/compiler.py @@ -8,15 +8,17 @@ ) from django.db.models.sql import compiler from django.db.transaction import TransactionManagementError -from django.db.utils import DatabaseError, NotSupportedError +from django.db.utils import NotSupportedError def _as_sql_agv(self, compiler, connection): return self.as_sql(compiler, connection, template='%(function)s(CONVERT(float, %(field)s))') + def _as_sql_chr(self, compiler, connection): return self.as_sql(compiler, connection, function='NCHAR') + def _as_sql_concatpair(self, compiler, connection): if connection.sql_server_version < 2012: node = self.coalesce() @@ -24,26 +26,31 @@ def _as_sql_concatpair(self, compiler, connection): else: return self.as_sql(compiler, connection) + def _as_sql_count(self, compiler, connection): return self.as_sql(compiler, connection, function='COUNT_BIG') + def _as_sql_greatest(self, compiler, connection): # SQL Server does not provide GREATEST function, # so we emulate it with a table value constructor # https://msdn.microsoft.com/en-us/library/dd776382.aspx - template='(SELECT MAX(value) FROM (VALUES (%(expressions)s)) AS _%(function)s(value))' + template = '(SELECT MAX(value) FROM (VALUES (%(expressions)s)) AS _%(function)s(value))' return self.as_sql(compiler, connection, arg_joiner='), (', template=template) + def _as_sql_least(self, compiler, connection): # SQL Server does not provide LEAST function, # so we emulate it with a table value constructor # https://msdn.microsoft.com/en-us/library/dd776382.aspx - template='(SELECT MIN(value) FROM (VALUES (%(expressions)s)) AS _%(function)s(value))' + template = '(SELECT MIN(value) FROM (VALUES (%(expressions)s)) AS _%(function)s(value))' return self.as_sql(compiler, connection, arg_joiner='), (', template=template) + def _as_sql_length(self, compiler, connection): return self.as_sql(compiler, connection, function='LEN') + def _as_sql_lpad(self, compiler, connection): i = iter(self.get_source_expressions()) expression, expression_arg = compiler.compile(next(i)) @@ -59,7 +66,8 @@ def _as_sql_lpad(self, compiler, connection): params.extend(expression_arg) template = ('LEFT(REPLICATE(%(fill_text)s, %(length)s), CASE WHEN %(length)s > LEN(%(expression)s) ' 'THEN %(length)s - LEN(%(expression)s) ELSE 0 END) + %(expression)s') - return template % {'expression':expression, 'length':length, 'fill_text':fill_text }, params + return template % {'expression': expression, 'length': length, 'fill_text': fill_text}, params + def _as_sql_exists(self, compiler, connection, template=None, **extra_context): # MS SQL doesn't allow EXISTS() in the SELECT list, so wrap it with a @@ -69,6 +77,7 @@ def _as_sql_exists(self, compiler, connection, template=None, **extra_context): sql = 'CASE WHEN {} THEN 1 ELSE 0 END'.format(sql) return sql, params + def _as_sql_order_by(self, compiler, connection): template = None if self.nulls_last: @@ -77,9 +86,11 @@ def _as_sql_order_by(self, compiler, connection): template = 'CASE WHEN %(expression)s IS NULL THEN 0 ELSE 1 END, %(expression)s %(ordering)s' return self.as_sql(compiler, connection, template=template) + def _as_sql_repeat(self, compiler, connection): return self.as_sql(compiler, connection, function='REPLICATE') + def _as_sql_rpad(self, compiler, connection): i = iter(self.get_source_expressions()) expression, expression_arg = compiler.compile(next(i)) @@ -90,8 +101,9 @@ def _as_sql_rpad(self, compiler, connection): params.extend(fill_text_arg) params.extend(length_arg) params.extend(length_arg) - template='LEFT(%(expression)s + REPLICATE(%(fill_text)s, %(length)s), %(length)s)' - return template % {'expression':expression, 'length':length, 'fill_text':fill_text }, params + template = 'LEFT(%(expression)s + REPLICATE(%(fill_text)s, %(length)s), %(length)s)' + return template % {'expression': expression, 'length': length, 'fill_text': fill_text}, params + def _as_sql_stddev(self, compiler, connection): function = 'STDEV' @@ -99,26 +111,31 @@ def _as_sql_stddev(self, compiler, connection): function = '%sP' % function return self.as_sql(compiler, connection, function=function) + def _as_sql_strindex(self, compiler, connection): self.source_expressions.reverse() sql = self.as_sql(compiler, connection, function='CHARINDEX') self.source_expressions.reverse() return sql + def _as_sql_substr(self, compiler, connection): if len(self.get_source_expressions()) < 3: - self.get_source_expressions().append(Value(2**31-1)) + self.get_source_expressions().append(Value(2**31 - 1)) return self.as_sql(compiler, connection) + def _as_sql_trim(self, compiler, connection): return self.as_sql(compiler, connection, template='LTRIM(RTRIM(%(expressions)s))') + def _as_sql_variance(self, compiler, connection): function = 'VAR' if self.function == 'VAR_POP': function = '%sP' % function return self.as_sql(compiler, connection, function=function) + def _cursor_iter(cursor, sentinel, col_count, itersize): """ Yields blocks of rows from a cursor and ensures the cursor is closed when @@ -144,6 +161,7 @@ def _cursor_iter(cursor, sentinel, col_count, itersize): for rows in chunks: yield rows + compiler.cursor_iter = _cursor_iter @@ -189,7 +207,7 @@ def as_sql(self, with_limits=True, with_col_aliases=False): having, h_params = self.compile(self.having) if self.having is not None else ("", []) params = [] result = ['SELECT'] - + if self.query.distinct: distinct_result, distinct_params = self.connection.ops.distinct_sql( distinct_fields, @@ -197,11 +215,11 @@ def as_sql(self, with_limits=True, with_col_aliases=False): ) result += distinct_result params += distinct_params - + # SQL Server requires the keword for limitting at the begenning if do_limit and not do_offset: result.append('TOP %d' % high_mark) - + out_cols = [] col_idx = 1 for _, (s_sql, s_params), alias in self.select + extra_select: @@ -212,7 +230,7 @@ def as_sql(self, with_limits=True, with_col_aliases=False): col_idx += 1 params.extend(s_params) out_cols.append(s_sql) - + # SQL Server requires an order-by clause for offsetting if do_offset: meta = self.query.get_meta() @@ -228,7 +246,7 @@ def as_sql(self, with_limits=True, with_col_aliases=False): src = next(iter(expr.get_source_expressions())) if isinstance(src, Ref): src = next(iter(src.get_source_expressions())) - o_sql, _ = src.as_sql(self, self.connection) + o_sql, _ = src.as_sql(self, self.connection) odir = 'DESC' if expr.descending else 'ASC' o_sql = '%s %s' % (o_sql, odir) ordering.append(o_sql) @@ -238,7 +256,7 @@ def as_sql(self, with_limits=True, with_col_aliases=False): out_cols.append('ROW_NUMBER() OVER (ORDER BY %s) AS [rn]' % offsetting_order_by) elif not order_by: order_by.append(((None, ('%s ASC' % offsetting_order_by, [], None)))) - + if self.query.select_for_update and self.connection.features.has_select_for_update: if self.connection.get_autocommit(): raise TransactionManagementError('select_for_update cannot be used outside of a transaction.') @@ -275,7 +293,7 @@ def as_sql(self, with_limits=True, with_col_aliases=False): if where: result.append('WHERE %s' % where) params.extend(w_params) - + grouping = [] for g_sql, g_params in group_by: grouping.append(g_sql) @@ -285,7 +303,7 @@ def as_sql(self, with_limits=True, with_col_aliases=False): raise NotImplementedError('annotate() + distinct(fields) is not implemented.') order_by = order_by or self.connection.ops.force_no_ordering() result.append('GROUP BY %s' % ', '.join(grouping)) - + if having: result.append('HAVING %s' % having) params.extend(h_params) @@ -312,9 +330,9 @@ def as_sql(self, with_limits=True, with_col_aliases=False): result = ['SELECT * FROM (%s) AS X WHERE X.rn' % ' '.join(result)] # Place WHERE condition on `rn` for the desired range. if do_limit: - result.append('BETWEEN %d AND %d' % (low_mark+1, high_mark)) + result.append('BETWEEN %d AND %d' % (low_mark + 1, high_mark)) else: - result.append('>= %d' % (low_mark+1)) + result.append('>= %d' % (low_mark + 1)) if not self.query.subquery: result.append('ORDER BY X.rn') else: diff --git a/sql_server/pyodbc/creation.py b/sql_server/pyodbc/creation.py index a2db2e9a..1c533716 100644 --- a/sql_server/pyodbc/creation.py +++ b/sql_server/pyodbc/creation.py @@ -15,7 +15,7 @@ def _destroy_test_db(self, test_database_name, verbosity): to_azure_sql_db = self.connection.to_azure_sql_db if not to_azure_sql_db: cursor.execute("ALTER DATABASE %s SET SINGLE_USER WITH ROLLBACK IMMEDIATE" - % self.connection.ops.quote_name(test_database_name)) + % self.connection.ops.quote_name(test_database_name)) cursor.execute("DROP DATABASE %s" % self.connection.ops.quote_name(test_database_name)) diff --git a/sql_server/pyodbc/introspection.py b/sql_server/pyodbc/introspection.py index 82f4c910..4b92f9e6 100644 --- a/sql_server/pyodbc/introspection.py +++ b/sql_server/pyodbc/introspection.py @@ -12,32 +12,32 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): # Map type codes to Django Field types. data_types_reverse = { - SQL_AUTOFIELD: 'AutoField', - SQL_BIGAUTOFIELD: 'BigAutoField', - Database.SQL_BIGINT: 'BigIntegerField', - #Database.SQL_BINARY: , - Database.SQL_BIT: 'BooleanField', - Database.SQL_CHAR: 'CharField', - Database.SQL_DECIMAL: 'DecimalField', - Database.SQL_DOUBLE: 'FloatField', - Database.SQL_FLOAT: 'FloatField', - Database.SQL_GUID: 'TextField', - Database.SQL_INTEGER: 'IntegerField', - Database.SQL_LONGVARBINARY: 'BinaryField', - #Database.SQL_LONGVARCHAR: , - Database.SQL_NUMERIC: 'DecimalField', - Database.SQL_REAL: 'FloatField', - Database.SQL_SMALLINT: 'SmallIntegerField', - Database.SQL_SS_TIME2: 'TimeField', - Database.SQL_TINYINT: 'SmallIntegerField', - Database.SQL_TYPE_DATE: 'DateField', - Database.SQL_TYPE_TIME: 'TimeField', - Database.SQL_TYPE_TIMESTAMP: 'DateTimeField', - Database.SQL_VARBINARY: 'BinaryField', - Database.SQL_VARCHAR: 'TextField', - Database.SQL_WCHAR: 'CharField', - Database.SQL_WLONGVARCHAR: 'TextField', - Database.SQL_WVARCHAR: 'TextField', + SQL_AUTOFIELD: 'AutoField', + SQL_BIGAUTOFIELD: 'BigAutoField', + Database.SQL_BIGINT: 'BigIntegerField', + # Database.SQL_BINARY: , + Database.SQL_BIT: 'BooleanField', + Database.SQL_CHAR: 'CharField', + Database.SQL_DECIMAL: 'DecimalField', + Database.SQL_DOUBLE: 'FloatField', + Database.SQL_FLOAT: 'FloatField', + Database.SQL_GUID: 'TextField', + Database.SQL_INTEGER: 'IntegerField', + Database.SQL_LONGVARBINARY: 'BinaryField', + # Database.SQL_LONGVARCHAR: , + Database.SQL_NUMERIC: 'DecimalField', + Database.SQL_REAL: 'FloatField', + Database.SQL_SMALLINT: 'SmallIntegerField', + Database.SQL_SS_TIME2: 'TimeField', + Database.SQL_TINYINT: 'SmallIntegerField', + Database.SQL_TYPE_DATE: 'DateField', + Database.SQL_TYPE_TIME: 'TimeField', + Database.SQL_TYPE_TIMESTAMP: 'DateTimeField', + Database.SQL_VARBINARY: 'BinaryField', + Database.SQL_VARCHAR: 'TextField', + Database.SQL_WCHAR: 'CharField', + Database.SQL_WLONGVARCHAR: 'TextField', + Database.SQL_WVARCHAR: 'TextField', } ignored_tables = [] @@ -48,10 +48,10 @@ def get_field_type(self, data_type, description): # (it depends on the driver) size = description.internal_size if field_type == 'CharField': - if size == 0 or size >= 2**30-1: + if size == 0 or size >= 2**30 - 1: field_type = "TextField" elif field_type == 'TextField': - if size > 0 and size < 2**30-1: + if size > 0 and size < 2**30 - 1: field_type = 'CharField' return field_type @@ -72,11 +72,11 @@ def _is_auto_field(self, cursor, table_name, column_name): """ # COLUMNPROPERTY: http://msdn2.microsoft.com/en-us/library/ms174968.aspx - #from django.db import connection - #cursor.execute("SELECT COLUMNPROPERTY(OBJECT_ID(%s), %s, 'IsIdentity')", + # from django.db import connection + # cursor.execute("SELECT COLUMNPROPERTY(OBJECT_ID(%s), %s, 'IsIdentity')", # (connection.ops.quote_name(table_name), column_name)) cursor.execute("SELECT COLUMNPROPERTY(OBJECT_ID(%s), %s, 'IsIdentity')", - (self.connection.ops.quote_name(table_name), column_name)) + (self.connection.ops.quote_name(table_name), column_name)) return cursor.fetchall()[0][0] def get_table_description(self, cursor, table_name, identity_check=True): @@ -112,7 +112,7 @@ def get_sequences(self, cursor, table_name, table_fields=()): SELECT c.name FROM sys.columns c INNER JOIN sys.tables t ON c.object_id = t.object_id WHERE t.schema_id = SCHEMA_ID() AND t.name = %s AND c.is_identity = 1""", - [table_name]) + [table_name]) # SQL Server allows only one identity column per table # https://docs.microsoft.com/en-us/sql/t-sql/statements/create-table-transact-sql-identity-property row = cursor.fetchone() @@ -306,7 +306,7 @@ def get_constraints(self, cursor, table_name): "check": False, "index": True, "orders": [], - "type": Index.suffix if type_ in (1,2) else desc.lower(), + "type": Index.suffix if type_ in (1, 2) else desc.lower(), } indexes[index]["columns"].append(column) indexes[index]["orders"].append("DESC" if order == 1 else "ASC") diff --git a/sql_server/pyodbc/operations.py b/sql_server/pyodbc/operations.py index 7cfc31f9..845d61e1 100644 --- a/sql_server/pyodbc/operations.py +++ b/sql_server/pyodbc/operations.py @@ -121,14 +121,19 @@ def date_interval_sql(self, timedelta): return sql def date_trunc_sql(self, lookup_type, field_name): + CONVERT_YEAR = 'CONVERT(varchar, DATEPART(year, %s))' % field_name + CONVERT_QUARTER = 'CONVERT(varchar, 1+((DATEPART(quarter, %s)-1)*3))' % field_name + CONVERT_MONTH = 'CONVERT(varchar, 1+((DATEPART(quarter, %s)-1)*3))' % field_name + if lookup_type == 'year': - return "CONVERT(datetime2, CONVERT(varchar, DATEPART(year, %s)) + '/01/01')" % field_name + return "CONVERT(datetime2, %s + '/01/01')" % CONVERT_YEAR if lookup_type == 'quarter': - return "CONVERT(datetime2, CONVERT(varchar, DATEPART(year, %s)) + '/' + CONVERT(varchar, 1+((DATEPART(quarter, %s)-1)*3)) + '/01')" % (field_name, field_name) + return "CONVERT(datetime2, %s + '/' + %s + '/01')" % (CONVERT_YEAR, CONVERT_QUARTER) if lookup_type == 'month': - return "CONVERT(datetime2, CONVERT(varchar, DATEPART(year, %s)) + '/' + CONVERT(varchar, DATEPART(month, %s)) + '/01')" % (field_name, field_name) + return "CONVERT(datetime2, %s + '/' + %s + '/01')" % (CONVERT_YEAR, CONVERT_MONTH) if lookup_type == 'week': - return "DATEADD(DAY, (DATEPART(weekday, %s) + 5) %%%% 7 * -1, CONVERT(datetime2, CONVERT(varchar(12), %s, 112)))" % (field_name, field_name) + CONVERT = "CONVERT(datetime2, CONVERT(varchar(12), %s, 112))" % field_name + return "DATEADD(DAY, (DATEPART(weekday, %s) + 5) %%%% 7 * -1, %s)" % (CONVERT, field_name) if lookup_type == 'day': return "CONVERT(datetime2, CONVERT(varchar(12), %s, 112))" % field_name @@ -169,7 +174,7 @@ def for_update_sql(self, nowait=False, skip_locked=False, of=()): def format_for_duration_arithmetic(self, sql): if sql == '%s': - # use DATEADD only once because Django prepares only one parameter for this + # use DATEADD only once because Django prepares only one parameter for this fmt = 'DATEADD(second, %s / 1000000%%s, CAST(%%s AS datetime2))' sql = '%%s' else: @@ -244,7 +249,7 @@ def quote_name(self, name): not quote the given name if it's already been quoted. """ if name.startswith('[') and name.endswith(']'): - return name # Quoting once is enough. + return name # Quoting once is enough. return '[%s]' % name def random_function_sql(self): @@ -331,15 +336,16 @@ def sql_flush(self, style, tables, sequences, allow_cascade=False): elem['start_id'] = 1 elem.update(seq) seqs.append(elem) - cursor.execute("SELECT TABLE_NAME, CONSTRAINT_NAME FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS WHERE CONSTRAINT_TYPE not in ('PRIMARY KEY','UNIQUE')") + cursor.execute( + "SELECT TABLE_NAME, CONSTRAINT_NAME FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS WHERE CONSTRAINT_TYPE not in ('PRIMARY KEY','UNIQUE')") fks = cursor.fetchall() - sql_list = ['ALTER TABLE %s NOCHECK CONSTRAINT %s;' % \ - (self.quote_name(fk[0]), self.quote_name(fk[1])) for fk in fks] + sql_list = ['ALTER TABLE %s NOCHECK CONSTRAINT %s;' % + (self.quote_name(fk[0]), self.quote_name(fk[1])) for fk in fks] sql_list.extend(['%s %s %s;' % (style.SQL_KEYWORD('DELETE'), style.SQL_KEYWORD('FROM'), - style.SQL_FIELD(self.quote_name(table)) ) for table in tables]) + style.SQL_FIELD(self.quote_name(table))) for table in tables]) if self.connection.to_azure_sql_db and self.connection.sql_server_version < 2014: - warnings.warn("Resetting identity columns is not supported " \ + warnings.warn("Resetting identity columns is not supported " "on this versios of Azure SQL Database.", RuntimeWarning) else: @@ -352,10 +358,10 @@ def sql_flush(self, style, tables, sequences, allow_cascade=False): style.SQL_FIELD('%d' % seq['start_id']), style.SQL_KEYWORD('WITH'), style.SQL_KEYWORD('NO_INFOMSGS'), - ) for seq in seqs]) + ) for seq in seqs]) - sql_list.extend(['ALTER TABLE %s CHECK CONSTRAINT %s;' % \ - (self.quote_name(fk[0]), self.quote_name(fk[1])) for fk in fks]) + sql_list.extend(['ALTER TABLE %s CHECK CONSTRAINT %s;' % + (self.quote_name(fk[0]), self.quote_name(fk[1])) for fk in fks]) return sql_list else: return [] @@ -375,7 +381,7 @@ def subtract_temporals(self, internal_type, lhs, rhs): else: sql = "CAST(DATEDIFF(second, %(rhs)s, %(lhs)s) AS bigint) * 1000000 + DATEPART(microsecond, %(lhs)s) - DATEPART(microsecond, %(rhs)s)" params = rhs_params + lhs_params * 2 + rhs_params - return sql % {'lhs':lhs_sql, 'rhs':rhs_sql}, params + return sql % {'lhs': lhs_sql, 'rhs': rhs_sql}, params def tablespace_sql(self, tablespace, inline=False): """ @@ -409,7 +415,7 @@ def adapt_datetimefield_value(self, value): return value def time_trunc_sql(self, lookup_type, field_name): - #if self.connection.sql_server_version >= 2012: + # if self.connection.sql_server_version >= 2012: # fields = { # 'hour': 'DATEPART(hour, %s)' % field_name, # 'minute': 'DATEPART(minute, %s)' % field_name if lookup_type != 'hour' else '0', diff --git a/sql_server/pyodbc/schema.py b/sql_server/pyodbc/schema.py index 7904126e..9a757ffb 100644 --- a/sql_server/pyodbc/schema.py +++ b/sql_server/pyodbc/schema.py @@ -9,7 +9,6 @@ ) from django.db.models import Index from django.db.models.fields import AutoField, BigAutoField -from django.db.models.fields.related import ManyToManyField from django.db.transaction import TransactionManagementError from django.utils.encoding import force_text @@ -192,11 +191,11 @@ def _alter_field(self, model, old_field, new_field, old_type, new_type, # True | False | False | True # True | False | True | True if (old_field.db_index and not old_field.unique and (not new_field.db_index or new_field.unique)) or ( - # Drop indexes on nvarchar columns that are changing to a different type - # SQL Server requires explicit deletion - (old_field.db_index or old_field.unique) and ( - (old_type.startswith('nvarchar') and not new_type.startswith('nvarchar')) - )): + # Drop indexes on nvarchar columns that are changing to a different type + # SQL Server requires explicit deletion + (old_field.db_index or old_field.unique) and ( + (old_type.startswith('nvarchar') and not new_type.startswith('nvarchar')) + )): # Find the index for this field meta_index_names = {index.name for index in model._meta.indexes} # Retrieve only BTREE indexes since this is what's created with @@ -630,7 +629,7 @@ def delete_model(self, model): self._sql_select_foreign_key_constraints % { "table": self.quote_value(model._meta.db_table), }, - has_result = True + has_result=True ) if result: for table, constraint in result: diff --git a/testapp/__init__.py b/testapp/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/testapp/settings.py b/testapp/settings.py new file mode 100644 index 00000000..7a54c907 --- /dev/null +++ b/testapp/settings.py @@ -0,0 +1,13 @@ +import dj_database_url + +DATABASES = { + 'default': dj_database_url.config(default='sqlite:///db.sqlite'), + 'other': dj_database_url.config(env='DATABASE_URL_OTHER', default='sqlite:///db.sqlite'), +} + +SECRET_KEY = "django_tests_secret_key" + +# Use a fast hasher to speed up tests. +PASSWORD_HASHERS = [ + 'django.contrib.auth.hashers.MD5PasswordHasher', +]