Permalink
Browse files

schema-evolution: re-applied schema evolution changes from:

  • Loading branch information...
1 parent 365f4b8 commit dac5af33de86877535f38eee0a8e11629cbbb5bc @keredson keredson committed Jul 20, 2007
View
@@ -481,6 +481,228 @@ def get_sql_indexes_for_model(model):
)
return output
+def get_sql_evolution(app):
+ "Returns SQL to update an existing schema to match the existing models."
+ from django.db import get_creation_module, models, backend, get_introspection_module, connection
+ data_types = get_creation_module().DATA_TYPES
+
+ if not data_types:
+ # This must be the "dummy" database backend, which means the user
+ # hasn't set DATABASE_ENGINE.
+ sys.stderr.write(style.ERROR("Error: Django doesn't know which syntax to use for your SQL statements,\n" +
+ "because you haven't specified the DATABASE_ENGINE setting.\n" +
+ "Edit your settings file and change DATABASE_ENGINE to something like 'postgresql' or 'mysql'.\n"))
+ sys.exit(1)
+
+ # First, try validating the models.
+ _check_for_validation_errors()
+
+ final_output = []
+
+ # stolen and trimmed from syncdb so that we know which models are about
+ # to be created (so we don't check them for updates)
+ table_list = _get_table_list()
+ seen_models = _get_installed_models(table_list)
+ created_models = set()
+ pending_references = {}
+
+ model_list = models.get_models(app)
+ for model in model_list:
+ # Create the model's database table, if it doesn't already exist.
+ if model._meta.db_table in table_list or model._meta.aka in table_list or len(set(model._meta.aka) & set(table_list))>0:
+ continue
+ sql, references = _get_sql_model_create(model, seen_models)
+ seen_models.add(model)
+ created_models.add(model)
+ table_list.append(model._meta.db_table)
+
+ introspection = get_introspection_module()
+ # This should work even if a connecton isn't available
+ try:
+ cursor = connection.cursor()
+ except:
+ cursor = None
+
+ # get the existing models, minus the models we've just created
+ app_models = models.get_models(app)
+ for model in created_models:
+ if model in app_models:
+ app_models.remove(model)
+
+ for klass in app_models:
+
+ output, new_table_name = get_sql_evolution_check_for_changed_model_name(klass)
+ final_output.extend(output)
+
+ output = get_sql_evolution_check_for_changed_field_flags(klass, new_table_name)
+ final_output.extend(output)
+
+ output = get_sql_evolution_check_for_changed_field_name(klass, new_table_name)
+ final_output.extend(output)
+
+ output = get_sql_evolution_check_for_new_fields(klass, new_table_name)
+ final_output.extend(output)
+
+ output = get_sql_evolution_check_for_dead_fields(klass, new_table_name)
+ final_output.extend(output)
+
+ return final_output
+get_sql_evolution.help_doc = "Returns SQL to update an existing schema to match the existing models."
+get_sql_evolution.args = APP_ARGS
+
+def get_sql_evolution_check_for_new_fields(klass, new_table_name):
+ "checks for model fields that are not in the existing data structure"
+ from django.db import backend, get_creation_module, models, get_introspection_module, connection
+ data_types = get_creation_module().DATA_TYPES
+ cursor = connection.cursor()
+ introspection = get_introspection_module()
+ opts = klass._meta
+ output = []
+ db_table = klass._meta.db_table
+ if new_table_name:
+ db_table = new_table_name
+ for f in opts.fields:
+ existing_fields = introspection.get_columns(cursor,db_table)
+ if f.column not in existing_fields and f.aka not in existing_fields and len(set(f.aka) & set(existing_fields))==0:
+ rel_field = f
+ data_type = f.get_internal_type()
+ col_type = data_types[data_type]
+ if col_type is not None:
+# field_output = []
+# field_output.append('ALTER TABLE')
+# field_output.append(db_table)
+# field_output.append('ADD COLUMN')
+# field_output.append(backend.quote_name(f.column))
+# field_output.append(style.SQL_COLTYPE(col_type % rel_field.__dict__))
+# field_output.append(style.SQL_KEYWORD('%sNULL' % (not f.null and 'NOT ' or '')))
+# if f.unique:
+# field_output.append(style.SQL_KEYWORD('UNIQUE'))
+# if f.primary_key:
+# field_output.append(style.SQL_KEYWORD('PRIMARY KEY'))
+# output.append(' '.join(field_output) + ';')
+ output.append( backend.get_add_column_sql( db_table, f.column, style.SQL_COLTYPE(col_type % rel_field.__dict__), f.null, f.unique, f.primary_key ) )
+ return output
+
+def get_sql_evolution_check_for_changed_model_name(klass):
+ from django.db import backend, get_creation_module, models, get_introspection_module, connection
+ cursor = connection.cursor()
+ introspection = get_introspection_module()
+ table_list = introspection.get_table_list(cursor)
+ if klass._meta.db_table in table_list:
+ return [], None
+ if klass._meta.aka in table_list:
+ return [ 'ALTER TABLE '+ backend.quote_name(klass._meta.aka) +' RENAME TO '+ backend.quote_name(klass._meta.db_table) + ';' ], klass._meta.aka
+ elif len(set(klass._meta.aka) & set(table_list))==1:
+ return [ 'ALTER TABLE '+ backend.quote_name(klass._meta.aka[0]) +' RENAME TO '+ backend.quote_name(klass._meta.db_table) + ';' ], klass._meta.aka[0]
+ else:
+ return [], None
+
+def get_sql_evolution_check_for_changed_field_name(klass, new_table_name):
+ from django.db import backend, get_creation_module, models, get_introspection_module, connection
+ data_types = get_creation_module().DATA_TYPES
+ cursor = connection.cursor()
+ introspection = get_introspection_module()
+ opts = klass._meta
+ output = []
+ db_table = klass._meta.db_table
+ if new_table_name:
+ db_table = new_table_name
+ for f in opts.fields:
+ existing_fields = introspection.get_columns(cursor,db_table)
+ if f.column not in existing_fields and (f.aka in existing_fields or len(set(f.aka) & set(existing_fields)))==1:
+ old_col = None
+ if isinstance( f.aka, str ):
+ old_col = f.aka
+ else:
+ old_col = f.aka[0]
+ rel_field = f
+ data_type = f.get_internal_type()
+ col_type = data_types[data_type]
+ if col_type is not None:
+ field_output = []
+ col_def = style.SQL_COLTYPE(col_type % rel_field.__dict__) +' '+ style.SQL_KEYWORD('%sNULL' % (not f.null and 'NOT ' or ''))
+ if f.unique:
+ col_def += style.SQL_KEYWORD(' UNIQUE')
+ if f.primary_key:
+ col_def += style.SQL_KEYWORD(' PRIMARY KEY')
+ field_output.append( backend.get_change_column_name_sql( klass._meta.db_table, introspection.get_indexes(cursor,db_table), backend.quote_name(old_col), backend.quote_name(f.column), col_def ) )
+ output.append(' '.join(field_output))
+ return output
+
+def get_sql_evolution_check_for_changed_field_flags(klass, new_table_name):
+ from django.db import backend, get_creation_module, models, get_introspection_module, connection
+ from django.db.models.fields import CharField, SlugField
+ from django.db.models.fields.related import RelatedField, ForeignKey
+ data_types = get_creation_module().DATA_TYPES
+ cursor = connection.cursor()
+ introspection = get_introspection_module()
+ opts = klass._meta
+ output = []
+ db_table = klass._meta.db_table
+ if new_table_name:
+ db_table = new_table_name
+ for f in opts.fields:
+ existing_fields = introspection.get_columns(cursor,db_table)
+ cf = None # current field, ie what it is before any renames
+ if f.column in existing_fields:
+ cf = f.column
+ elif f.aka in existing_fields:
+ cf = f.aka
+ elif len(set(f.aka) & set(existing_fields))==1:
+ cf = f.aka[0]
+ else:
+ continue # no idea what column you're talking about - should be handled by get_sql_evolution_check_for_new_fields())
+ data_type = f.get_internal_type()
+ if data_types.has_key(data_type):
+ column_flags = introspection.get_known_column_flags(cursor, db_table, cf)
+ if column_flags['allow_null']!=f.null or \
+ ( not f.primary_key and isinstance(f, CharField) and column_flags['maxlength']!=str(f.maxlength) ) or \
+ ( not f.primary_key and isinstance(f, SlugField) and column_flags['maxlength']!=str(f.maxlength) ) or \
+ column_flags['unique']!=f.unique or \
+ column_flags['primary_key']!=f.primary_key:
+ #column_flags['foreign_key']!=f.foreign_key:
+# print
+# print db_table, f.column, column_flags
+# print "column_flags['allow_null']!=f.null", column_flags['allow_null']!=f.null
+# print "not f.primary_key and isinstance(f, CharField) and column_flags['maxlength']!=str(f.maxlength)", not f.primary_key and isinstance(f, CharField) and column_flags['maxlength']!=str(f.maxlength)
+# print "not f.primary_key and isinstance(f, SlugField) and column_flags['maxlength']!=str(f.maxlength)", not f.primary_key and isinstance(f, SlugField) and column_flags['maxlength']!=str(f.maxlength)
+# print "column_flags['unique']!=f.unique", column_flags['unique']!=f.unique
+# print "column_flags['primary_key']!=f.primary_key", column_flags['primary_key']!=f.primary_key
+ col_type = data_types[data_type]
+ col_type_def = style.SQL_COLTYPE(col_type % f.__dict__)
+# col_def = style.SQL_COLTYPE(col_type % f.__dict__) +' '+ style.SQL_KEYWORD('%sNULL' % (not f.null and 'NOT ' or ''))
+# if f.unique:
+# col_def += ' '+ style.SQL_KEYWORD('UNIQUE')
+# if f.primary_key:
+# col_def += ' '+ style.SQL_KEYWORD('PRIMARY KEY')
+ output.append( backend.get_change_column_def_sql( db_table, cf, col_type_def, f.null, f.unique, f.primary_key ) )
+ #print db_table, cf, f.maxlength, introspection.get_known_column_flags(cursor, db_table, cf)
+ return output
+
+def get_sql_evolution_check_for_dead_fields(klass, new_table_name):
+ from django.db import backend, get_creation_module, models, get_introspection_module, connection
+ from django.db.models.fields import CharField, SlugField
+ from django.db.models.fields.related import RelatedField, ForeignKey
+ data_types = get_creation_module().DATA_TYPES
+ cursor = connection.cursor()
+ introspection = get_introspection_module()
+ opts = klass._meta
+ output = []
+ db_table = klass._meta.db_table
+ if new_table_name:
+ db_table = new_table_name
+ suspect_fields = set(introspection.get_columns(cursor,db_table))
+ for f in opts.fields:
+ suspect_fields.discard(f.column)
+ suspect_fields.discard(f.aka)
+ suspect_fields.difference_update(f.aka)
+ if len(suspect_fields)>0:
+ output.append( '-- warning: as the following may cause data loss, it/they must be run manually' )
+ for suspect_field in suspect_fields:
+ output.append( backend.get_drop_column_sql( db_table, suspect_field ) )
+ output.append( '-- end warning' )
+ return output
+
def get_sql_all(app):
"Returns a list of CREATE TABLE SQL, initial-data inserts, and CREATE INDEX SQL for the given module."
return get_sql_create(app) + get_custom_sql(app) + get_sql_indexes(app)
@@ -540,7 +762,7 @@ def syncdb(verbosity=1, interactive=True):
# Create the model's database table, if it doesn't already exist.
if verbosity >= 2:
print "Processing %s.%s model" % (app_name, model._meta.object_name)
- if table_name_converter(model._meta.db_table) in table_list:
+ if table_name_converter(model._meta.db_table) in table_list or table_name_converter(model._meta.aka) in table_list or len(set(model._meta.aka) & set(table_list))>0:
continue
sql, references = _get_sql_model_create(model, seen_models)
seen_models.add(model)
@@ -568,6 +790,10 @@ def syncdb(verbosity=1, interactive=True):
for statement in sql:
cursor.execute(statement)
+ for sql in get_sql_evolution(app):
+ print sql
+# cursor.execute(sql)
+
transaction.commit_unless_managed()
# Send the post_syncdb signal, so individual apps can do whatever they need
@@ -1521,6 +1747,7 @@ def dump_data(app_labels, format='json', indent=None):
'sqlinitialdata': get_sql_initial_data,
'sqlreset': get_sql_reset,
'sqlsequencereset': get_sql_sequence_reset,
+ 'sqlevolve': get_sql_evolution,
'startapp': startapp,
'startproject': startproject,
'syncdb': syncdb,
@@ -242,6 +242,48 @@ def get_sql_sequence_reset(style, model_list):
# No sequence reset required
return []
+def get_change_column_name_sql( table_name, indexes, old_col_name, new_col_name, col_def ):
+ # mysql doesn't support column renames (AFAIK), so we fake it
+ # TODO: only supports a single primary key so far
+ pk_name = None
+ for key in indexes.keys():
+ if indexes[key]['primary_key']: pk_name = key
+ output = []
+ output.append( 'ALTER TABLE '+ quote_name(table_name) +' CHANGE COLUMN '+ quote_name(old_col_name) +' '+ quote_name(new_col_name) +' '+ col_def + ';' )
+ return '\n'.join(output)
+
+def get_change_column_def_sql( table_name, col_name, col_type, null, unique, primary_key ):
+ output = []
+ col_def = col_type +' '+ ('%sNULL' % (not null and 'NOT ' or ''))
+ if unique:
+ col_def += ' '+ 'UNIQUE'
+ if primary_key:
+ col_def += ' '+ 'PRIMARY KEY'
+ output.append( 'ALTER TABLE '+ quote_name(table_name) +' MODIFY COLUMN '+ quote_name(col_name) +' '+ col_def + ';' )
+ return '\n'.join(output)
+
+def get_add_column_sql( table_name, col_name, col_type, null, unique, primary_key ):
+ output = []
+ field_output = []
+ field_output.append('ALTER TABLE')
+ field_output.append(quote_name(table_name))
+ field_output.append('ADD COLUMN')
+ field_output.append(quote_name(col_name))
+ field_output.append(col_type)
+ field_output.append(('%sNULL' % (not null and 'NOT ' or '')))
+ if unique:
+ field_output.append(('UNIQUE'))
+ if primary_key:
+ field_output.append(('PRIMARY KEY'))
+ output.append(' '.join(field_output) + ';')
+ return '\n'.join(output)
+
+def get_drop_column_sql( table_name, col_name ):
+ output = []
+ output.append( '-- ALTER TABLE '+ quote_name(table_name) +' DROP COLUMN '+ quote_name(col_name) + ';' )
+ return '\n'.join(output)
+
+
OPERATOR_MAPPING = {
'exact': '= %s',
'iexact': 'LIKE %s',
@@ -73,6 +73,43 @@ def get_indexes(cursor, table_name):
indexes[row[4]] = {'primary_key': (row[2] == 'PRIMARY'), 'unique': not bool(row[1])}
return indexes
+def get_columns(cursor, table_name):
+ try:
+ cursor.execute("describe %s" % quote_name(table_name))
+ return [row[0] for row in cursor.fetchall()]
+ except:
+ return []
+
+def get_known_column_flags( cursor, table_name, column_name ):
+ cursor.execute("describe %s" % quote_name(table_name))
+ dict = {}
+ for row in cursor.fetchall():
+ if row[0] == column_name:
+
+ # maxlength check goes here
+ if row[1][0:7]=='varchar':
+ dict['maxlength'] = row[1][8:len(row[1])-1]
+
+ # default flag check goes here
+ if row[2]=='YES': dict['allow_null'] = True
+ else: dict['allow_null'] = False
+
+ # primary/foreign/unique key flag check goes here
+ if row[3]=='PRI': dict['primary_key'] = True
+ else: dict['primary_key'] = False
+ if row[3]=='FOR': dict['foreign_key'] = True
+ else: dict['foreign_key'] = False
+ if row[3]=='UNI': dict['unique'] = True
+ else: dict['unique'] = False
+
+ # default value check goes here
+ # if row[4]=='NULL': dict['default'] = None
+ # else: dict['default'] = row[4]
+ dict['default'] = row[4]
+
+ # print table_name, column_name, dict
+ return dict
+
DATA_TYPES_REVERSE = {
FIELD_TYPE.BLOB: 'TextField',
FIELD_TYPE.CHAR: 'CharField',
@@ -282,6 +282,42 @@ def typecast_string(s):
return s
return smart_unicode(s)
+def get_change_column_name_sql( table_name, indexes, old_col_name, new_col_name, col_def ):
+ # TODO: only supports a single primary key so far
+ pk_name = None
+ for key in indexes.keys():
+ if indexes[key]['primary_key']: pk_name = key
+ output = []
+ output.append( 'ALTER TABLE '+ quote_name(table_name) +' RENAME COLUMN '+ quote_name(old_col_name) +' TO '+ quote_name(new_col_name) +';' )
+ return '\n'.join(output)
+
+def get_change_column_def_sql( table_name, col_name, col_type, null, unique, primary_key ):
+ output = []
+ output.append( 'ALTER TABLE '+ quote_name(table_name) +' ADD COLUMN '+ quote_name(col_name+'_tmp') +' '+ col_type + ';' )
+ output.append( 'UPDATE '+ quote_name(table_name) +' SET '+ quote_name(col_name+'_tmp') +' = '+ quote_name(col_name) + ';' )
+ output.append( 'ALTER TABLE '+ quote_name(table_name) +' DROP COLUMN '+ quote_name(col_name) +';' )
+ output.append( 'ALTER TABLE '+ quote_name(table_name) +' RENAME COLUMN '+ quote_name(col_name+'_tmp') +' TO '+ quote_name(col_name) + ';' )
+ if not null:
+ output.append( 'ALTER TABLE '+ quote_name(table_name) +' ALTER COLUMN '+ quote_name(col_name) +' SET NOT NULL;' )
+ if unique:
+ output.append( 'ALTER TABLE '+ quote_name(table_name) +' ADD CONSTRAINT '+ table_name +'_'+ col_name +'_unique_constraint UNIQUE('+ col_name +');' )
+
+ return '\n'.join(output)
+
+def get_add_column_sql( table_name, col_name, col_type, null, unique, primary_key ):
+ output = []
+ output.append( 'ALTER TABLE '+ quote_name(table_name) +' ADD COLUMN '+ quote_name(col_name) +' '+ col_type + ';' )
+ if not null:
+ output.append( 'ALTER TABLE '+ quote_name(table_name) +' ALTER COLUMN '+ quote_name(col_name) +' SET NOT NULL;' )
+ if unique:
+ output.append( 'ALTER TABLE '+ quote_name(table_name) +' ADD CONSTRAINT '+ table_name +'_'+ col_name +'_unique_constraint UNIQUE('+ col_name +');' )
+ return '\n'.join(output)
+
+def get_drop_column_sql( table_name, col_name ):
+ output = []
+ output.append( '-- ALTER TABLE '+ quote_name(table_name) +' DROP COLUMN '+ quote_name(col_name) + ';' )
+ return '\n'.join(output)
+
# Register these custom typecasts, because Django expects dates/times to be
# in Python's native (standard-library) datetime/time format, whereas psycopg
# use mx.DateTime by default.
Oops, something went wrong.

0 comments on commit dac5af3

Please sign in to comment.