Skip to content

Commit

Permalink
Use formatting with Black (#35)
Browse files Browse the repository at this point in the history
* Use formatting with Black

See also: [DEP 0008](https://github.com/django/deps/blob/main/final/0008-black.rst)

* Apply black to django_tidb/*
  • Loading branch information
dveeden committed Apr 5, 2023
1 parent 29f9bd0 commit 2e9a974
Show file tree
Hide file tree
Showing 8 changed files with 386 additions and 375 deletions.
43 changes: 25 additions & 18 deletions django_tidb/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@


class DatabaseWrapper(MysqlDatabaseWrapper):
vendor = 'tidb'
display_name = 'TiDB'
vendor = "tidb"
display_name = "TiDB"

SchemaEditorClass = DatabaseSchemaEditor
# Classes instantiated in __init__().
Expand All @@ -43,10 +43,12 @@ class DatabaseWrapper(MysqlDatabaseWrapper):
@cached_property
def data_type_check_constraints(self):
if self.features.supports_column_check_constraints:
check_constraints = {'PositiveBigIntegerField': '`%(column)s` >= 0',
'PositiveIntegerField': '`%(column)s` >= 0',
'PositiveSmallIntegerField': '`%(column)s` >= 0',
'JSONField': 'JSON_VALID(`%(column)s`)'}
check_constraints = {
"PositiveBigIntegerField": "`%(column)s` >= 0",
"PositiveIntegerField": "`%(column)s` >= 0",
"PositiveSmallIntegerField": "`%(column)s` >= 0",
"JSONField": "JSON_VALID(`%(column)s`)",
}
# MariaDB < 10.4.3 doesn't automatically use the JSON_VALID as
# a check constraint.
return check_constraints
Expand All @@ -58,36 +60,41 @@ def tidb_server_data(self):
# Select some server variables and test if the time zone
# definitions are installed. CONVERT_TZ returns NULL if 'UTC'
# timezone isn't loaded into the mysql.time_zone table.
cursor.execute("""
cursor.execute(
"""
SELECT VERSION(),
@@sql_mode,
@@default_storage_engine,
@@sql_auto_is_null,
@@lower_case_table_names,
CONVERT_TZ('2001-01-01 01:00:00', 'UTC', 'UTC') IS NOT NULL
""")
"""
)
row = cursor.fetchone()
return {
'version': row[0],
'sql_mode': row[1],
'default_storage_engine': row[2],
'sql_auto_is_null': bool(row[3]),
'lower_case_table_names': bool(row[4]),
'has_zoneinfo_database': bool(row[5]),
"version": row[0],
"sql_mode": row[1],
"default_storage_engine": row[2],
"sql_auto_is_null": bool(row[3]),
"lower_case_table_names": bool(row[4]),
"has_zoneinfo_database": bool(row[5]),
}

@cached_property
def tidb_server_info(self):
return self.tidb_server_data['version']
return self.tidb_server_data["version"]

@cached_property
def tidb_version(self):
match = server_version.match(self.tidb_server_info)
if not match:
raise Exception('Unable to determine Tidb version from version string %r' % self.tidb_server_info)
raise Exception(
"Unable to determine Tidb version from version string %r"
% self.tidb_server_info
)
return server_version.version

@cached_property
def sql_mode(self):
sql_mode = self.tidb_server_data['sql_mode']
return set(sql_mode.split(',') if sql_mode else ())
sql_mode = self.tidb_server_data["sql_mode"]
return set(sql_mode.split(",") if sql_mode else ())
548 changes: 262 additions & 286 deletions django_tidb/features.py

Large diffs are not rendered by default.

6 changes: 4 additions & 2 deletions django_tidb/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,10 @@

def char(self, compiler, connection, **extra_context):
return self.as_sql(
compiler, connection, function='CHAR',
template='%(function)s(%(expressions)s USING utf8mb4)',
compiler,
connection,
function="CHAR",
template="%(function)s(%(expressions)s USING utf8mb4)",
**extra_context
)

Expand Down
129 changes: 76 additions & 53 deletions django_tidb/introspection.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,13 @@
from django.db.models import Index
from django.utils.datastructures import OrderedSet

FieldInfo = namedtuple('FieldInfo', BaseFieldInfo._fields + ('extra', 'is_unsigned', 'has_json_constraint'))
FieldInfo = namedtuple(
"FieldInfo", BaseFieldInfo._fields + ("extra", "is_unsigned", "has_json_constraint")
)
InfoLine = namedtuple(
'InfoLine',
'col_name data_type max_len num_prec num_scale extra column_default '
'collation is_unsigned'
"InfoLine",
"col_name data_type max_len num_prec num_scale extra column_default "
"collation is_unsigned",
)


Expand All @@ -38,30 +40,37 @@ def get_table_description(self, cursor, table_name):
if self.connection.features.can_introspect_json_field:
# JSON data type is an alias for LONGTEXT in MariaDB, select
# JSON_VALID() constraints to introspect JSONField.
cursor.execute("""
cursor.execute(
"""
SELECT c.constraint_name AS column_name
FROM information_schema.check_constraints AS c
WHERE
c.table_name = %s AND
LOWER(c.check_clause) = 'json_valid(`' + LOWER(c.constraint_name) + '`)' AND
c.constraint_schema = DATABASE()
""", [table_name])
""",
[table_name],
)
json_constraints = {row[0] for row in cursor.fetchall()}
# A default collation for the given table.
cursor.execute("""
cursor.execute(
"""
SELECT table_collation
FROM information_schema.tables
WHERE table_schema = DATABASE()
AND table_name = %s
""", [table_name])
""",
[table_name],
)
row = cursor.fetchone()
default_column_collation = row[0] if row else ''
default_column_collation = row[0] if row else ""
# information_schema database gives more accurate results for some figures:
# - varchar length returned by cursor.description is an internal length,
# not visible length (#5725)
# - precision and scale (for decimal fields) (#5014)
# - auto_increment is not available in cursor.description
cursor.execute("""
cursor.execute(
"""
SELECT
column_name, data_type, character_maximum_length,
numeric_precision, numeric_scale, extra, column_default,
Expand All @@ -75,29 +84,35 @@ def get_table_description(self, cursor, table_name):
END AS is_unsigned
FROM information_schema.columns
WHERE table_name = %s AND table_schema = DATABASE()
""", [default_column_collation, table_name])
""",
[default_column_collation, table_name],
)
field_info = {line[0]: InfoLine(*line) for line in cursor.fetchall()}

cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name))
cursor.execute(
"SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name)
)

def to_int(i):
return int(i) if i is not None else i

fields = []
for line in cursor.description:
info = field_info[line[0]]
fields.append(FieldInfo(
*line[:3],
to_int(info.max_len) or line[3],
to_int(info.num_prec) or line[4],
to_int(info.num_scale) or line[5],
line[6],
info.column_default,
info.collation,
info.extra,
info.is_unsigned,
line[0] in json_constraints,
))
fields.append(
FieldInfo(
*line[:3],
to_int(info.max_len) or line[3],
to_int(info.num_prec) or line[4],
to_int(info.num_scale) or line[5],
line[6],
info.column_default,
info.collation,
info.extra,
info.is_unsigned,
line[0] in json_constraints,
)
)
return fields

def get_constraints(self, cursor, table_name):
Expand Down Expand Up @@ -126,20 +141,22 @@ def get_constraints(self, cursor, table_name):
for constraint, column, ref_table, ref_column, kind in cursor.fetchall():
if constraint not in constraints:
constraints[constraint] = {
'columns': OrderedSet(),
'primary_key': kind == 'PRIMARY KEY',
'unique': kind in {'PRIMARY KEY', 'UNIQUE'},
'index': False,
'check': False,
'foreign_key': (ref_table, ref_column) if ref_column else None,
"columns": OrderedSet(),
"primary_key": kind == "PRIMARY KEY",
"unique": kind in {"PRIMARY KEY", "UNIQUE"},
"index": False,
"check": False,
"foreign_key": (ref_table, ref_column) if ref_column else None,
}
if self.connection.features.supports_index_column_ordering:
constraints[constraint]['orders'] = []
constraints[constraint]['columns'].add(column)
constraints[constraint]["orders"] = []
constraints[constraint]["columns"].add(column)
# Add check constraints.
if self.connection.features.can_introspect_check_constraints:
unnamed_constraints_index = 0
columns = {info.name for info in self.get_table_description(cursor, table_name)}
columns = {
info.name for info in self.get_table_description(cursor, table_name)
}
type_query = """
SELECT cc.constraint_name, cc.check_clause
FROM
Expand All @@ -154,42 +171,48 @@ def get_constraints(self, cursor, table_name):
"""
cursor.execute(type_query, [table_name])
for constraint, check_clause in cursor.fetchall():
constraint_columns = self._parse_constraint_columns(check_clause, columns)
constraint_columns = self._parse_constraint_columns(
check_clause, columns
)
# Ensure uniqueness of unnamed constraints. Unnamed unique
# and check columns constraints have the same name as
# a column.
if set(constraint_columns) == {constraint}:
unnamed_constraints_index += 1
constraint = '__unnamed_constraint_%s__' % unnamed_constraints_index
constraint = "__unnamed_constraint_%s__" % unnamed_constraints_index
constraints[constraint] = {
'columns': constraint_columns,
'primary_key': False,
'unique': False,
'index': False,
'check': True,
'foreign_key': None,
"columns": constraint_columns,
"primary_key": False,
"unique": False,
"index": False,
"check": True,
"foreign_key": None,
}
# Now add in the indexes
cursor.execute("SHOW INDEX FROM %s" % self.connection.ops.quote_name(table_name))
cursor.execute(
"SHOW INDEX FROM %s" % self.connection.ops.quote_name(table_name)
)
for table, non_unique, index, colseq, column, order, type_ in [
x[:6] + (x[10],) for x in cursor.fetchall()
]:
if index not in constraints:
constraints[index] = {
'columns': OrderedSet(),
'primary_key': False,
'unique': not non_unique,
'check': False,
'foreign_key': None,
"columns": OrderedSet(),
"primary_key": False,
"unique": not non_unique,
"check": False,
"foreign_key": None,
}
if self.connection.features.supports_index_column_ordering:
constraints[index]['orders'] = []
constraints[index]['index'] = True
constraints[index]['type'] = Index.suffix if type_ == 'BTREE' else type_.lower()
constraints[index]['columns'].add(column)
constraints[index]["orders"] = []
constraints[index]["index"] = True
constraints[index]["type"] = (
Index.suffix if type_ == "BTREE" else type_.lower()
)
constraints[index]["columns"].add(column)
if self.connection.features.supports_index_column_ordering:
constraints[index]['orders'].append('DESC' if order == 'D' else 'ASC')
constraints[index]["orders"].append("DESC" if order == "D" else "ASC")
# Convert the sorted sets to lists
for constraint in constraints.values():
constraint['columns'] = list(constraint['columns'])
constraint["columns"] = list(constraint["columns"])
return constraints
24 changes: 12 additions & 12 deletions django_tidb/operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,30 +22,30 @@ def explain_query_prefix(self, format=None, **options):
supported_formats = self.connection.features.supported_explain_formats
normalized_format = format.upper()
if normalized_format not in supported_formats:
msg = '%s is not a recognized format.' % normalized_format
msg = "%s is not a recognized format." % normalized_format
if supported_formats:
msg += ' Allowed formats: %s' % ', '.join(sorted(supported_formats))
msg += " Allowed formats: %s" % ", ".join(sorted(supported_formats))
raise ValueError(msg)
if options:
raise ValueError('Unknown options: %s' % ', '.join(sorted(options.keys())))
analyze = options.pop('analyze', False)
raise ValueError("Unknown options: %s" % ", ".join(sorted(options.keys())))
analyze = options.pop("analyze", False)
prefix = self.explain_prefix
if analyze and self.connection.features.supports_explain_analyze:
prefix += ' ANALYZE'
prefix += " ANALYZE"
if format and not analyze:
# Only TiDB supports the analyze option with formats but with "ROW".
prefix += ' FORMAT=\"%s\"' % format
prefix += ' FORMAT="%s"' % format
return prefix

def regex_lookup(self, lookup_type):
# REGEXP BINARY doesn't work correctly in MySQL 8+ and REGEXP_LIKE
# doesn't exist in MySQL 5.x or in MariaDB.
if lookup_type == 'regex':
return '%s REGEXP BINARY %s'
return '%s REGEXP %s'
if lookup_type == "regex":
return "%s REGEXP BINARY %s"
return "%s REGEXP %s"

def lookup_cast(self, lookup_type, internal_type=None):
lookup = '%s'
if internal_type == 'JSONField':
lookup = 'JSON_UNQUOTE(%s)'
lookup = "%s"
if internal_type == "JSONField":
lookup = "JSON_UNQUOTE(%s)"
return lookup
4 changes: 2 additions & 2 deletions django_tidb/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,11 @@
class DatabaseSchemaEditor(MysqlDatabaseSchemaEditor):
@property
def sql_delete_check(self):
return 'ALTER TABLE %(table)s DROP CHECK %(name)s'
return "ALTER TABLE %(table)s DROP CHECK %(name)s"

@property
def sql_rename_column(self):
return 'ALTER TABLE %(table)s CHANGE %(old_column)s %(new_column)s %(type)s'
return "ALTER TABLE %(table)s CHANGE %(old_column)s %(new_column)s %(type)s"

def skip_default_on_alter(self, field):
return False
Expand Down
5 changes: 3 additions & 2 deletions django_tidb/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.


# TiDBVersion deal with tidb's version string.
# Our tidb version string is got from ```select version();```
# it look like this:
Expand All @@ -20,10 +21,10 @@ class TiDBVersion:
_version = (0, 0, 0)

def match(self, version):
version_list = version.split('-')
version_list = version.split("-")
if len(version_list) < 3:
return False
tidb_version_list = version_list[2].lstrip('v').split('.')
tidb_version_list = version_list[2].lstrip("v").split(".")
self._version = tuple(int(x) for x in tidb_version_list)
return True

Expand Down
2 changes: 2 additions & 0 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -34,5 +34,7 @@ setenv =
skip_install = True
deps =
flake8==6.0.0
black==23.1.0
commands =
flake8 --max-line-length 130 django_tidb
black --diff --check django_tidb

0 comments on commit 2e9a974

Please sign in to comment.