Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

added south

  • Loading branch information...
commit 56e66e9c3d3b08a5dea363c55da0cb85ce4a1080 1 parent 2e960ed
@ednapiranha ednapiranha authored
Showing with 10,873 additions and 0 deletions.
  1. +9 −0 vendor-local/lib/python/south/__init__.py
  2. +5 −0 vendor-local/lib/python/south/creator/__init__.py
  3. +540 −0 vendor-local/lib/python/south/creator/actions.py
  4. +489 −0 vendor-local/lib/python/south/creator/changes.py
  5. +190 −0 vendor-local/lib/python/south/creator/freezer.py
  6. +81 −0 vendor-local/lib/python/south/db/__init__.py
  7. +322 −0 vendor-local/lib/python/south/db/firebird.py
  8. +1,139 −0 vendor-local/lib/python/south/db/generic.py
  9. +280 −0 vendor-local/lib/python/south/db/mysql.py
  10. +299 −0 vendor-local/lib/python/south/db/oracle.py
  11. +89 −0 vendor-local/lib/python/south/db/postgresql_psycopg2.py
  12. 0  vendor-local/lib/python/south/db/sql_server/__init__.py
  13. +434 −0 vendor-local/lib/python/south/db/sql_server/pyodbc.py
  14. +252 −0 vendor-local/lib/python/south/db/sqlite3.py
  15. +151 −0 vendor-local/lib/python/south/exceptions.py
  16. +10 −0 vendor-local/lib/python/south/hacks/__init__.py
  17. +107 −0 vendor-local/lib/python/south/hacks/django_1_0.py
  18. +10 −0 vendor-local/lib/python/south/introspection_plugins/__init__.py
  19. +11 −0 vendor-local/lib/python/south/introspection_plugins/annoying_autoonetoone.py
  20. +30 −0 vendor-local/lib/python/south/introspection_plugins/django_audit_log.py
  21. +16 −0 vendor-local/lib/python/south/introspection_plugins/django_objectpermissions.py
  22. +24 −0 vendor-local/lib/python/south/introspection_plugins/django_tagging.py
  23. +14 −0 vendor-local/lib/python/south/introspection_plugins/django_taggit.py
  24. +21 −0 vendor-local/lib/python/south/introspection_plugins/django_timezones.py
  25. +45 −0 vendor-local/lib/python/south/introspection_plugins/geodjango.py
  26. +38 −0 vendor-local/lib/python/south/logger.py
  27. 0  vendor-local/lib/python/south/management/__init__.py
  28. +40 −0 vendor-local/lib/python/south/management/commands/__init__.py
  29. +93 −0 vendor-local/lib/python/south/management/commands/convert_to_south.py
  30. +124 −0 vendor-local/lib/python/south/management/commands/datamigration.py
  31. +61 −0 vendor-local/lib/python/south/management/commands/graphmigrations.py
  32. +260 −0 vendor-local/lib/python/south/management/commands/migrate.py
  33. +67 −0 vendor-local/lib/python/south/management/commands/migrationcheck.py
  34. +215 −0 vendor-local/lib/python/south/management/commands/schemamigration.py
  35. +31 −0 vendor-local/lib/python/south/management/commands/startmigration.py
  36. +111 −0 vendor-local/lib/python/south/management/commands/syncdb.py
  37. +8 −0 vendor-local/lib/python/south/management/commands/test.py
  38. +8 −0 vendor-local/lib/python/south/management/commands/testserver.py
  39. +228 −0 vendor-local/lib/python/south/migration/__init__.py
  40. +439 −0 vendor-local/lib/python/south/migration/base.py
  41. +360 −0 vendor-local/lib/python/south/migration/migrators.py
  42. +83 −0 vendor-local/lib/python/south/migration/utils.py
  43. +37 −0 vendor-local/lib/python/south/models.py
  44. +462 −0 vendor-local/lib/python/south/modelsinspector.py
  45. +400 −0 vendor-local/lib/python/south/orm.py
  46. +24 −0 vendor-local/lib/python/south/signals.py
  47. +89 −0 vendor-local/lib/python/south/tests/__init__.py
  48. +353 −0 vendor-local/lib/python/south/tests/autodetection.py
  49. 0  vendor-local/lib/python/south/tests/brokenapp/__init__.py
  50. +13 −0 vendor-local/lib/python/south/tests/brokenapp/migrations/0001_depends_on_unmigrated.py
  51. +13 −0 vendor-local/lib/python/south/tests/brokenapp/migrations/0002_depends_on_unknown.py
  52. +13 −0 vendor-local/lib/python/south/tests/brokenapp/migrations/0003_depends_on_higher.py
  53. +11 −0 vendor-local/lib/python/south/tests/brokenapp/migrations/0004_higher.py
  54. 0  vendor-local/lib/python/south/tests/brokenapp/migrations/__init__.py
  55. +55 −0 vendor-local/lib/python/south/tests/brokenapp/models.py
  56. 0  vendor-local/lib/python/south/tests/circular_a/__init__.py
  57. +13 −0 vendor-local/lib/python/south/tests/circular_a/migrations/0001_first.py
  58. 0  vendor-local/lib/python/south/tests/circular_a/migrations/__init__.py
  59. 0  vendor-local/lib/python/south/tests/circular_a/models.py
  60. 0  vendor-local/lib/python/south/tests/circular_b/__init__.py
  61. +13 −0 vendor-local/lib/python/south/tests/circular_b/migrations/0001_first.py
  62. 0  vendor-local/lib/python/south/tests/circular_b/migrations/__init__.py
  63. 0  vendor-local/lib/python/south/tests/circular_b/models.py
  64. +857 −0 vendor-local/lib/python/south/tests/db.py
  65. +165 −0 vendor-local/lib/python/south/tests/db_mysql.py
  66. 0  vendor-local/lib/python/south/tests/deps_a/__init__.py
  67. +11 −0 vendor-local/lib/python/south/tests/deps_a/migrations/0001_a.py
  68. +11 −0 vendor-local/lib/python/south/tests/deps_a/migrations/0002_a.py
  69. +11 −0 vendor-local/lib/python/south/tests/deps_a/migrations/0003_a.py
  70. +13 −0 vendor-local/lib/python/south/tests/deps_a/migrations/0004_a.py
  71. +11 −0 vendor-local/lib/python/south/tests/deps_a/migrations/0005_a.py
  72. 0  vendor-local/lib/python/south/tests/deps_a/migrations/__init__.py
  73. 0  vendor-local/lib/python/south/tests/deps_a/models.py
  74. 0  vendor-local/lib/python/south/tests/deps_b/__init__.py
  75. +11 −0 vendor-local/lib/python/south/tests/deps_b/migrations/0001_b.py
  76. +13 −0 vendor-local/lib/python/south/tests/deps_b/migrations/0002_b.py
  77. +13 −0 vendor-local/lib/python/south/tests/deps_b/migrations/0003_b.py
  78. +11 −0 vendor-local/lib/python/south/tests/deps_b/migrations/0004_b.py
  79. +11 −0 vendor-local/lib/python/south/tests/deps_b/migrations/0005_b.py
  80. 0  vendor-local/lib/python/south/tests/deps_b/migrations/__init__.py
  81. 0  vendor-local/lib/python/south/tests/deps_b/models.py
  82. 0  vendor-local/lib/python/south/tests/deps_c/__init__.py
  83. +11 −0 vendor-local/lib/python/south/tests/deps_c/migrations/0001_c.py
  84. +11 −0 vendor-local/lib/python/south/tests/deps_c/migrations/0002_c.py
  85. +11 −0 vendor-local/lib/python/south/tests/deps_c/migrations/0003_c.py
  86. +11 −0 vendor-local/lib/python/south/tests/deps_c/migrations/0004_c.py
  87. +13 −0 vendor-local/lib/python/south/tests/deps_c/migrations/0005_c.py
  88. 0  vendor-local/lib/python/south/tests/deps_c/migrations/__init__.py
  89. 0  vendor-local/lib/python/south/tests/deps_c/models.py
  90. 0  vendor-local/lib/python/south/tests/emptyapp/__init__.py
  91. 0  vendor-local/lib/python/south/tests/emptyapp/migrations/__init__.py
  92. 0  vendor-local/lib/python/south/tests/emptyapp/models.py
  93. 0  vendor-local/lib/python/south/tests/fakeapp/__init__.py
  94. +17 −0 vendor-local/lib/python/south/tests/fakeapp/migrations/0001_spam.py
  95. +20 −0 vendor-local/lib/python/south/tests/fakeapp/migrations/0002_eggs.py
  96. +18 −0 vendor-local/lib/python/south/tests/fakeapp/migrations/0003_alter_spam.py
  97. 0  vendor-local/lib/python/south/tests/fakeapp/migrations/__init__.py
  98. +111 −0 vendor-local/lib/python/south/tests/fakeapp/models.py
  99. +15 −0 vendor-local/lib/python/south/tests/freezer.py
  100. +109 −0 vendor-local/lib/python/south/tests/inspector.py
  101. +82 −0 vendor-local/lib/python/south/tests/logger.py
  102. +898 −0 vendor-local/lib/python/south/tests/logic.py
  103. 0  vendor-local/lib/python/south/tests/non_managed/__init__.py
  104. 0  vendor-local/lib/python/south/tests/non_managed/migrations/__init__.py
  105. +16 −0 vendor-local/lib/python/south/tests/non_managed/models.py
  106. 0  vendor-local/lib/python/south/tests/otherfakeapp/__init__.py
  107. +15 −0 vendor-local/lib/python/south/tests/otherfakeapp/migrations/0001_first.py
  108. +11 −0 vendor-local/lib/python/south/tests/otherfakeapp/migrations/0002_second.py
  109. +14 −0 vendor-local/lib/python/south/tests/otherfakeapp/migrations/0003_third.py
  110. 0  vendor-local/lib/python/south/tests/otherfakeapp/migrations/__init__.py
  111. +1 −0  vendor-local/lib/python/south/tests/otherfakeapp/models.py
  112. +73 −0 vendor-local/lib/python/south/utils/__init__.py
  113. +28 −0 vendor-local/lib/python/south/utils/datetime_utils.py
  114. +19 −0 vendor-local/lib/python/south/v2.py
  115. +1 −0  vendor-local/vendor.pth
View
9 vendor-local/lib/python/south/__init__.py
@@ -0,0 +1,9 @@
+"""
+South - Useable migrations for Django apps
+"""
+
+__version__ = "0.7.6"
+__authors__ = [
+ "Andrew Godwin <andrew@aeracode.org>",
+ "Andy McCurdy <andy@andymccurdy.com>"
+]
View
5 vendor-local/lib/python/south/creator/__init__.py
@@ -0,0 +1,5 @@
+"""
+The creator module is responsible for making new migration files, either
+as blank templates or autodetecting changes. It contains code that used to
+all be in startmigration.py.
+"""
View
540 vendor-local/lib/python/south/creator/actions.py
@@ -0,0 +1,540 @@
+"""
+Actions - things like 'a model was removed' or 'a field was changed'.
+Each one has a class, which can take the action description and insert code
+blocks into the forwards() and backwards() methods, in the right place.
+"""
+
+import sys
+
+from django.db.models.fields.related import RECURSIVE_RELATIONSHIP_CONSTANT
+from django.db.models.fields import FieldDoesNotExist, NOT_PROVIDED, CharField, TextField
+
+from south.modelsinspector import value_clean
+from south.creator.freezer import remove_useless_attributes, model_key
+from south.utils import datetime_utils
+
+
+class Action(object):
+ """
+ Generic base Action class. Contains utility methods for inserting into
+ the forwards() and backwards() method lists.
+ """
+
+ prepend_forwards = False
+ prepend_backwards = False
+
+ def forwards_code(self):
+ raise NotImplementedError
+
+ def backwards_code(self):
+ raise NotImplementedError
+
+ def add_forwards(self, forwards):
+ if self.prepend_forwards:
+ forwards.insert(0, self.forwards_code())
+ else:
+ forwards.append(self.forwards_code())
+
+ def add_backwards(self, backwards):
+ if self.prepend_backwards:
+ backwards.insert(0, self.backwards_code())
+ else:
+ backwards.append(self.backwards_code())
+
+ def console_line(self):
+ "Returns the string to print on the console, e.g. ' + Added field foo'"
+ raise NotImplementedError
+
+ @classmethod
+ def triples_to_defs(cls, fields):
+ # Turn the (class, args, kwargs) format into a string
+ for field, triple in fields.items():
+ fields[field] = cls.triple_to_def(triple)
+ return fields
+
+ @classmethod
+ def triple_to_def(cls, triple):
+ "Turns a single triple into a definition."
+ return "self.gf(%r)(%s)" % (
+ triple[0], # Field full path
+ ", ".join(triple[1] + ["%s=%s" % (kwd, val) for kwd, val in triple[2].items()]), # args and kwds
+ )
+
+
+class AddModel(Action):
+ """
+ Addition of a model. Takes the Model subclass that is being created.
+ """
+
+ FORWARDS_TEMPLATE = '''
+ # Adding model '%(model_name)s'
+ db.create_table(%(table_name)r, (
+ %(field_defs)s
+ ))
+ db.send_create_signal(%(app_label)r, [%(model_name)r])'''[1:] + "\n"
+
+ BACKWARDS_TEMPLATE = '''
+ # Deleting model '%(model_name)s'
+ db.delete_table(%(table_name)r)'''[1:] + "\n"
+
+ def __init__(self, model, model_def):
+ self.model = model
+ self.model_def = model_def
+
+ def console_line(self):
+ "Returns the string to print on the console, e.g. ' + Added field foo'"
+ return " + Added model %s.%s" % (
+ self.model._meta.app_label,
+ self.model._meta.object_name,
+ )
+
+ def forwards_code(self):
+ "Produces the code snippet that gets put into forwards()"
+ field_defs = ",\n ".join([
+ "(%r, %s)" % (name, defn) for name, defn
+ in self.triples_to_defs(self.model_def).items()
+ ]) + ","
+
+ return self.FORWARDS_TEMPLATE % {
+ "model_name": self.model._meta.object_name,
+ "table_name": self.model._meta.db_table,
+ "app_label": self.model._meta.app_label,
+ "field_defs": field_defs,
+ }
+
+ def backwards_code(self):
+ "Produces the code snippet that gets put into backwards()"
+ return self.BACKWARDS_TEMPLATE % {
+ "model_name": self.model._meta.object_name,
+ "table_name": self.model._meta.db_table,
+ }
+
+
+class DeleteModel(AddModel):
+ """
+ Deletion of a model. Takes the Model subclass that is being created.
+ """
+
+ def console_line(self):
+ "Returns the string to print on the console, e.g. ' + Added field foo'"
+ return " - Deleted model %s.%s" % (
+ self.model._meta.app_label,
+ self.model._meta.object_name,
+ )
+
+ def forwards_code(self):
+ return AddModel.backwards_code(self)
+
+ def backwards_code(self):
+ return AddModel.forwards_code(self)
+
+
+class _NullIssuesField(object):
+ """
+ A field that might need to ask a question about rogue NULL values.
+ """
+
+ allow_third_null_option = False
+ irreversible = False
+
+ IRREVERSIBLE_TEMPLATE = '''
+ # User chose to not deal with backwards NULL issues for '%(model_name)s.%(field_name)s'
+ raise RuntimeError("Cannot reverse this migration. '%(model_name)s.%(field_name)s' and its values cannot be restored.")'''
+
+ def deal_with_not_null_no_default(self, field, field_def):
+ # If it's a CharField or TextField that's blank, skip this step.
+ if isinstance(field, (CharField, TextField)) and field.blank:
+ field_def[2]['default'] = repr("")
+ return
+ # Oh dear. Ask them what to do.
+ print " ? The field '%s.%s' does not have a default specified, yet is NOT NULL." % (
+ self.model._meta.object_name,
+ field.name,
+ )
+ print " ? Since you are %s, you MUST specify a default" % self.null_reason
+ print " ? value to use for existing rows. Would you like to:"
+ print " ? 1. Quit now, and add a default to the field in models.py"
+ print " ? 2. Specify a one-off value to use for existing columns now"
+ if self.allow_third_null_option:
+ print " ? 3. Disable the backwards migration by raising an exception."
+ while True:
+ choice = raw_input(" ? Please select a choice: ")
+ if choice == "1":
+ sys.exit(1)
+ elif choice == "2":
+ break
+ elif choice == "3" and self.allow_third_null_option:
+ break
+ else:
+ print " ! Invalid choice."
+ if choice == "2":
+ self.add_one_time_default(field, field_def)
+ elif choice == "3":
+ self.irreversible = True
+
+ def add_one_time_default(self, field, field_def):
+ # OK, they want to pick their own one-time default. Who are we to refuse?
+ print " ? Please enter Python code for your one-off default value."
+ print " ? The datetime module is available, so you can do e.g. datetime.date.today()"
+ while True:
+ code = raw_input(" >>> ")
+ if not code:
+ print " ! Please enter some code, or 'exit' (with no quotes) to exit."
+ elif code == "exit":
+ sys.exit(1)
+ else:
+ try:
+ result = eval(code, {}, {"datetime": datetime_utils})
+ except (SyntaxError, NameError), e:
+ print " ! Invalid input: %s" % e
+ else:
+ break
+ # Right, add the default in.
+ field_def[2]['default'] = value_clean(result)
+
+ def irreversable_code(self, field):
+ return self.IRREVERSIBLE_TEMPLATE % {
+ "model_name": self.model._meta.object_name,
+ "table_name": self.model._meta.db_table,
+ "field_name": field.name,
+ "field_column": field.column,
+ }
+
+
+class AddField(Action, _NullIssuesField):
+ """
+ Adds a field to a model. Takes a Model class and the field name.
+ """
+
+ null_reason = "adding this field"
+
+ FORWARDS_TEMPLATE = '''
+ # Adding field '%(model_name)s.%(field_name)s'
+ db.add_column(%(table_name)r, %(field_name)r,
+ %(field_def)s,
+ keep_default=False)'''[1:] + "\n"
+
+ BACKWARDS_TEMPLATE = '''
+ # Deleting field '%(model_name)s.%(field_name)s'
+ db.delete_column(%(table_name)r, %(field_column)r)'''[1:] + "\n"
+
+ def __init__(self, model, field, field_def):
+ self.model = model
+ self.field = field
+ self.field_def = field_def
+
+ # See if they've made a NOT NULL column but also have no default (far too common)
+ is_null = self.field.null
+ default = (self.field.default is not None) and (self.field.default is not NOT_PROVIDED)
+
+ if not is_null and not default:
+ self.deal_with_not_null_no_default(self.field, self.field_def)
+
+ def console_line(self):
+ "Returns the string to print on the console, e.g. ' + Added field foo'"
+ return " + Added field %s on %s.%s" % (
+ self.field.name,
+ self.model._meta.app_label,
+ self.model._meta.object_name,
+ )
+
+ def forwards_code(self):
+
+ return self.FORWARDS_TEMPLATE % {
+ "model_name": self.model._meta.object_name,
+ "table_name": self.model._meta.db_table,
+ "field_name": self.field.name,
+ "field_column": self.field.column,
+ "field_def": self.triple_to_def(self.field_def),
+ }
+
+ def backwards_code(self):
+ return self.BACKWARDS_TEMPLATE % {
+ "model_name": self.model._meta.object_name,
+ "table_name": self.model._meta.db_table,
+ "field_name": self.field.name,
+ "field_column": self.field.column,
+ }
+
+
+class DeleteField(AddField):
+ """
+ Removes a field from a model. Takes a Model class and the field name.
+ """
+
+ null_reason = "removing this field"
+ allow_third_null_option = True
+
+ def console_line(self):
+ "Returns the string to print on the console, e.g. ' + Added field foo'"
+ return " - Deleted field %s on %s.%s" % (
+ self.field.name,
+ self.model._meta.app_label,
+ self.model._meta.object_name,
+ )
+
+ def forwards_code(self):
+ return AddField.backwards_code(self)
+
+ def backwards_code(self):
+ if not self.irreversible:
+ return AddField.forwards_code(self)
+ else:
+ return self.irreversable_code(self.field)
+
+
+class ChangeField(Action, _NullIssuesField):
+ """
+ Changes a field's type/options on a model.
+ """
+
+ null_reason = "making this field non-nullable"
+
+ FORWARDS_TEMPLATE = BACKWARDS_TEMPLATE = '''
+ # Changing field '%(model_name)s.%(field_name)s'
+ db.alter_column(%(table_name)r, %(field_column)r, %(field_def)s)'''
+
+ RENAME_TEMPLATE = '''
+ # Renaming column for '%(model_name)s.%(field_name)s' to match new field type.
+ db.rename_column(%(table_name)r, %(old_column)r, %(new_column)r)'''
+
+ def __init__(self, model, old_field, new_field, old_def, new_def):
+ self.model = model
+ self.old_field = old_field
+ self.new_field = new_field
+ self.old_def = old_def
+ self.new_def = new_def
+
+ # See if they've changed a not-null field to be null
+ new_default = (self.new_field.default is not None) and (self.new_field.default is not NOT_PROVIDED)
+ old_default = (self.old_field.default is not None) and (self.old_field.default is not NOT_PROVIDED)
+ if self.old_field.null and not self.new_field.null and not new_default:
+ self.deal_with_not_null_no_default(self.new_field, self.new_def)
+ if not self.old_field.null and self.new_field.null and not old_default:
+ self.null_reason = "making this field nullable"
+ self.allow_third_null_option = True
+ self.deal_with_not_null_no_default(self.old_field, self.old_def)
+
+ def console_line(self):
+ "Returns the string to print on the console, e.g. ' + Added field foo'"
+ return " ~ Changed field %s on %s.%s" % (
+ self.new_field.name,
+ self.model._meta.app_label,
+ self.model._meta.object_name,
+ )
+
+ def _code(self, old_field, new_field, new_def):
+
+ output = ""
+
+ if self.old_field.column != self.new_field.column:
+ output += self.RENAME_TEMPLATE % {
+ "model_name": self.model._meta.object_name,
+ "table_name": self.model._meta.db_table,
+ "field_name": new_field.name,
+ "old_column": old_field.column,
+ "new_column": new_field.column,
+ }
+
+ output += self.FORWARDS_TEMPLATE % {
+ "model_name": self.model._meta.object_name,
+ "table_name": self.model._meta.db_table,
+ "field_name": new_field.name,
+ "field_column": new_field.column,
+ "field_def": self.triple_to_def(new_def),
+ }
+
+ return output
+
+ def forwards_code(self):
+ return self._code(self.old_field, self.new_field, self.new_def)
+
+ def backwards_code(self):
+ if not self.irreversible:
+ return self._code(self.new_field, self.old_field, self.old_def)
+ else:
+ return self.irreversable_code(self.old_field)
+
+
+class AddUnique(Action):
+ """
+ Adds a unique constraint to a model. Takes a Model class and the field names.
+ """
+
+ FORWARDS_TEMPLATE = '''
+ # Adding unique constraint on '%(model_name)s', fields %(field_names)s
+ db.create_unique(%(table_name)r, %(fields)r)'''[1:] + "\n"
+
+ BACKWARDS_TEMPLATE = '''
+ # Removing unique constraint on '%(model_name)s', fields %(field_names)s
+ db.delete_unique(%(table_name)r, %(fields)r)'''[1:] + "\n"
+
+ prepend_backwards = True
+
+ def __init__(self, model, fields):
+ self.model = model
+ self.fields = fields
+
+ def console_line(self):
+ "Returns the string to print on the console, e.g. ' + Added field foo'"
+ return " + Added unique constraint for %s on %s.%s" % (
+ [x.name for x in self.fields],
+ self.model._meta.app_label,
+ self.model._meta.object_name,
+ )
+
+ def forwards_code(self):
+
+ return self.FORWARDS_TEMPLATE % {
+ "model_name": self.model._meta.object_name,
+ "table_name": self.model._meta.db_table,
+ "fields": [field.column for field in self.fields],
+ "field_names": [field.name for field in self.fields],
+ }
+
+ def backwards_code(self):
+ return self.BACKWARDS_TEMPLATE % {
+ "model_name": self.model._meta.object_name,
+ "table_name": self.model._meta.db_table,
+ "fields": [field.column for field in self.fields],
+ "field_names": [field.name for field in self.fields],
+ }
+
+
+class DeleteUnique(AddUnique):
+ """
+ Removes a unique constraint from a model. Takes a Model class and the field names.
+ """
+
+ prepend_forwards = True
+ prepend_backwards = False
+
+ def console_line(self):
+ "Returns the string to print on the console, e.g. ' + Added field foo'"
+ return " - Deleted unique constraint for %s on %s.%s" % (
+ [x.name for x in self.fields],
+ self.model._meta.app_label,
+ self.model._meta.object_name,
+ )
+
+ def forwards_code(self):
+ return AddUnique.backwards_code(self)
+
+ def backwards_code(self):
+ return AddUnique.forwards_code(self)
+
+
+class AddIndex(AddUnique):
+ """
+ Adds an index to a model field[s]. Takes a Model class and the field names.
+ """
+
+ FORWARDS_TEMPLATE = '''
+ # Adding index on '%(model_name)s', fields %(field_names)s
+ db.create_index(%(table_name)r, %(fields)r)'''[1:] + "\n"
+
+ BACKWARDS_TEMPLATE = '''
+ # Removing index on '%(model_name)s', fields %(field_names)s
+ db.delete_index(%(table_name)r, %(fields)r)'''[1:] + "\n"
+
+ def console_line(self):
+ "Returns the string to print on the console, e.g. ' + Added field foo'"
+ return " + Added index for %s on %s.%s" % (
+ [x.name for x in self.fields],
+ self.model._meta.app_label,
+ self.model._meta.object_name,
+ )
+
+
+class DeleteIndex(AddIndex):
+ """
+ Deletes an index off a model field[s]. Takes a Model class and the field names.
+ """
+
+ def console_line(self):
+ "Returns the string to print on the console, e.g. ' + Added field foo'"
+ return " + Deleted index for %s on %s.%s" % (
+ [x.name for x in self.fields],
+ self.model._meta.app_label,
+ self.model._meta.object_name,
+ )
+
+ def forwards_code(self):
+ return AddIndex.backwards_code(self)
+
+ def backwards_code(self):
+ return AddIndex.forwards_code(self)
+
+
+class AddM2M(Action):
+ """
+ Adds a unique constraint to a model. Takes a Model class and the field names.
+ """
+
+ FORWARDS_TEMPLATE = '''
+ # Adding M2M table for field %(field_name)s on '%(model_name)s'
+ db.create_table(%(table_name)r, (
+ ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
+ (%(left_field)r, models.ForeignKey(orm[%(left_model_key)r], null=False)),
+ (%(right_field)r, models.ForeignKey(orm[%(right_model_key)r], null=False))
+ ))
+ db.create_unique(%(table_name)r, [%(left_column)r, %(right_column)r])'''[1:] + "\n"
+
+ BACKWARDS_TEMPLATE = '''
+ # Removing M2M table for field %(field_name)s on '%(model_name)s'
+ db.delete_table('%(table_name)s')'''[1:] + "\n"
+
+ def __init__(self, model, field):
+ self.model = model
+ self.field = field
+
+ def console_line(self):
+ "Returns the string to print on the console, e.g. ' + Added field foo'"
+ return " + Added M2M table for %s on %s.%s" % (
+ self.field.name,
+ self.model._meta.app_label,
+ self.model._meta.object_name,
+ )
+
+ def forwards_code(self):
+
+ return self.FORWARDS_TEMPLATE % {
+ "model_name": self.model._meta.object_name,
+ "field_name": self.field.name,
+ "table_name": self.field.m2m_db_table(),
+ "left_field": self.field.m2m_column_name()[:-3], # Remove the _id part
+ "left_column": self.field.m2m_column_name(),
+ "left_model_key": model_key(self.model),
+ "right_field": self.field.m2m_reverse_name()[:-3], # Remove the _id part
+ "right_column": self.field.m2m_reverse_name(),
+ "right_model_key": model_key(self.field.rel.to),
+ }
+
+ def backwards_code(self):
+
+ return self.BACKWARDS_TEMPLATE % {
+ "model_name": self.model._meta.object_name,
+ "field_name": self.field.name,
+ "table_name": self.field.m2m_db_table(),
+ }
+
+
+class DeleteM2M(AddM2M):
+ """
+ Adds a unique constraint to a model. Takes a Model class and the field names.
+ """
+
+ def console_line(self):
+ "Returns the string to print on the console, e.g. ' + Added field foo'"
+ return " - Deleted M2M table for %s on %s.%s" % (
+ self.field.name,
+ self.model._meta.app_label,
+ self.model._meta.object_name,
+ )
+
+ def forwards_code(self):
+ return AddM2M.backwards_code(self)
+
+ def backwards_code(self):
+ return AddM2M.forwards_code(self)
+
View
489 vendor-local/lib/python/south/creator/changes.py
@@ -0,0 +1,489 @@
+"""
+Contains things to detect changes - either using options passed in on the
+commandline, or by using autodetection, etc.
+"""
+
+from django.db import models
+from django.contrib.contenttypes.generic import GenericRelation
+from django.utils.datastructures import SortedDict
+
+from south.creator.freezer import remove_useless_attributes, freeze_apps, model_key
+from south.utils import auto_through
+
+class BaseChanges(object):
+ """
+ Base changes class.
+ """
+ def suggest_name(self):
+ return ''
+
+ def split_model_def(self, model, model_def):
+ """
+ Given a model and its model def (a dict of field: triple), returns three
+ items: the real fields dict, the Meta dict, and the M2M fields dict.
+ """
+ real_fields = SortedDict()
+ meta = SortedDict()
+ m2m_fields = SortedDict()
+ for name, triple in model_def.items():
+ if name == "Meta":
+ meta = triple
+ elif isinstance(model._meta.get_field_by_name(name)[0], models.ManyToManyField):
+ m2m_fields[name] = triple
+ else:
+ real_fields[name] = triple
+ return real_fields, meta, m2m_fields
+
+ def current_model_from_key(self, key):
+ app_label, model_name = key.split(".")
+ return models.get_model(app_label, model_name)
+
+ def current_field_from_key(self, key, fieldname):
+ app_label, model_name = key.split(".")
+ # Special, for the magical field from order_with_respect_to
+ if fieldname == "_order":
+ field = models.IntegerField()
+ field.name = "_order"
+ field.attname = "_order"
+ field.column = "_order"
+ field.default = 0
+ return field
+ # Otherwise, normal.
+ return models.get_model(app_label, model_name)._meta.get_field_by_name(fieldname)[0]
+
+
+class AutoChanges(BaseChanges):
+ """
+ Detects changes by 'diffing' two sets of frozen model definitions.
+ """
+
+ # Field types we don't generate add/remove field changes for.
+ IGNORED_FIELD_TYPES = [
+ GenericRelation,
+ ]
+
+ def __init__(self, migrations, old_defs, old_orm, new_defs):
+ self.migrations = migrations
+ self.old_defs = old_defs
+ self.old_orm = old_orm
+ self.new_defs = new_defs
+
+ def suggest_name(self):
+ parts = ["auto"]
+ for change_name, params in self.get_changes():
+ if change_name == "AddModel":
+ parts.append("add_%s" % params['model']._meta.object_name.lower())
+ elif change_name == "DeleteModel":
+ parts.append("del_%s" % params['model']._meta.object_name.lower())
+ elif change_name == "AddField":
+ parts.append("add_field_%s_%s" % (
+ params['model']._meta.object_name.lower(),
+ params['field'].name,
+ ))
+ elif change_name == "DeleteField":
+ parts.append("del_field_%s_%s" % (
+ params['model']._meta.object_name.lower(),
+ params['field'].name,
+ ))
+ elif change_name == "ChangeField":
+ parts.append("chg_field_%s_%s" % (
+ params['model']._meta.object_name.lower(),
+ params['new_field'].name,
+ ))
+ elif change_name == "AddUnique":
+ parts.append("add_unique_%s_%s" % (
+ params['model']._meta.object_name.lower(),
+ "_".join([x.name for x in params['fields']]),
+ ))
+ elif change_name == "DeleteUnique":
+ parts.append("del_unique_%s_%s" % (
+ params['model']._meta.object_name.lower(),
+ "_".join([x.name for x in params['fields']]),
+ ))
+ return ("__".join(parts))[:70]
+
+ def get_changes(self):
+ """
+ Returns the difference between the old and new sets of models as a 5-tuple:
+ added_models, deleted_models, added_fields, deleted_fields, changed_fields
+ """
+
+ deleted_models = set()
+
+ # See if anything's vanished
+ for key in self.old_defs:
+ if key not in self.new_defs:
+ # We shouldn't delete it if it was managed=False
+ old_fields, old_meta, old_m2ms = self.split_model_def(self.old_orm[key], self.old_defs[key])
+ if old_meta.get("managed", "True") != "False":
+ # Alright, delete it.
+ yield ("DeleteModel", {
+ "model": self.old_orm[key],
+ "model_def": old_fields,
+ })
+ # Also make sure we delete any M2Ms it had.
+ for fieldname in old_m2ms:
+ # Only delete its stuff if it wasn't a through=.
+ field = self.old_orm[key + ":" + fieldname]
+ if auto_through(field):
+ yield ("DeleteM2M", {"model": self.old_orm[key], "field": field})
+ # And any unique constraints it had
+ unique_together = eval(old_meta.get("unique_together", "[]"))
+ if unique_together:
+ # If it's only a single tuple, make it into the longer one
+ if isinstance(unique_together[0], basestring):
+ unique_together = [unique_together]
+ # For each combination, make an action for it
+ for fields in unique_together:
+ yield ("DeleteUnique", {
+ "model": self.old_orm[key],
+ "fields": [self.old_orm[key]._meta.get_field_by_name(x)[0] for x in fields],
+ })
+ # We always add it in here so we ignore it later
+ deleted_models.add(key)
+
+ # Or appeared
+ for key in self.new_defs:
+ if key not in self.old_defs:
+ # We shouldn't add it if it's managed=False
+ new_fields, new_meta, new_m2ms = self.split_model_def(self.current_model_from_key(key), self.new_defs[key])
+ if new_meta.get("managed", "True") != "False":
+ yield ("AddModel", {
+ "model": self.current_model_from_key(key),
+ "model_def": new_fields,
+ })
+ # Also make sure we add any M2Ms it has.
+ for fieldname in new_m2ms:
+ # Only create its stuff if it wasn't a through=.
+ field = self.current_field_from_key(key, fieldname)
+ if auto_through(field):
+ yield ("AddM2M", {"model": self.current_model_from_key(key), "field": field})
+ # And any unique constraints it has
+ unique_together = eval(new_meta.get("unique_together", "[]"))
+ if unique_together:
+ # If it's only a single tuple, make it into the longer one
+ if isinstance(unique_together[0], basestring):
+ unique_together = [unique_together]
+ # For each combination, make an action for it
+ for fields in unique_together:
+ yield ("AddUnique", {
+ "model": self.current_model_from_key(key),
+ "fields": [self.current_model_from_key(key)._meta.get_field_by_name(x)[0] for x in fields],
+ })
+
+ # Now, for every model that's stayed the same, check its fields.
+ for key in self.old_defs:
+ if key not in deleted_models:
+
+ old_fields, old_meta, old_m2ms = self.split_model_def(self.old_orm[key], self.old_defs[key])
+ new_fields, new_meta, new_m2ms = self.split_model_def(self.current_model_from_key(key), self.new_defs[key])
+
+ # Do nothing for models which are now not managed.
+ if new_meta.get("managed", "True") == "False":
+ continue
+
+ # Find fields that have vanished.
+ for fieldname in old_fields:
+ if fieldname not in new_fields:
+ # Don't do it for any fields we're ignoring
+ field = self.old_orm[key + ":" + fieldname]
+ field_allowed = True
+ for field_type in self.IGNORED_FIELD_TYPES:
+ if isinstance(field, field_type):
+ field_allowed = False
+ if field_allowed:
+ # Looks alright.
+ yield ("DeleteField", {
+ "model": self.old_orm[key],
+ "field": field,
+ "field_def": old_fields[fieldname],
+ })
+
+ # And ones that have appeared
+ for fieldname in new_fields:
+ if fieldname not in old_fields:
+ # Don't do it for any fields we're ignoring
+ field = self.current_field_from_key(key, fieldname)
+ field_allowed = True
+ for field_type in self.IGNORED_FIELD_TYPES:
+ if isinstance(field, field_type):
+ field_allowed = False
+ if field_allowed:
+ # Looks alright.
+ yield ("AddField", {
+ "model": self.current_model_from_key(key),
+ "field": field,
+ "field_def": new_fields[fieldname],
+ })
+
+ # Find M2Ms that have vanished
+ for fieldname in old_m2ms:
+ if fieldname not in new_m2ms:
+ # Only delete its stuff if it wasn't a through=.
+ field = self.old_orm[key + ":" + fieldname]
+ if auto_through(field):
+ yield ("DeleteM2M", {"model": self.old_orm[key], "field": field})
+
+ # Find M2Ms that have appeared
+ for fieldname in new_m2ms:
+ if fieldname not in old_m2ms:
+ # Only create its stuff if it wasn't a through=.
+ field = self.current_field_from_key(key, fieldname)
+ if auto_through(field):
+ yield ("AddM2M", {"model": self.current_model_from_key(key), "field": field})
+
+ # For the ones that exist in both models, see if they were changed
+ for fieldname in set(old_fields).intersection(set(new_fields)):
+ # Non-index changes
+ if self.different_attributes(
+ remove_useless_attributes(old_fields[fieldname], True, True),
+ remove_useless_attributes(new_fields[fieldname], True, True)):
+ yield ("ChangeField", {
+ "model": self.current_model_from_key(key),
+ "old_field": self.old_orm[key + ":" + fieldname],
+ "new_field": self.current_field_from_key(key, fieldname),
+ "old_def": old_fields[fieldname],
+ "new_def": new_fields[fieldname],
+ })
+ # Index changes
+ old_field = self.old_orm[key + ":" + fieldname]
+ new_field = self.current_field_from_key(key, fieldname)
+ if not old_field.db_index and new_field.db_index:
+ # They've added an index.
+ yield ("AddIndex", {
+ "model": self.current_model_from_key(key),
+ "fields": [new_field],
+ })
+ if old_field.db_index and not new_field.db_index:
+ # They've removed an index.
+ yield ("DeleteIndex", {
+ "model": self.old_orm[key],
+ "fields": [old_field],
+ })
+ # See if their uniques have changed
+ if old_field.unique != new_field.unique:
+ # Make sure we look at the one explicitly given to see what happened
+ if new_field.unique:
+ yield ("AddUnique", {
+ "model": self.current_model_from_key(key),
+ "fields": [new_field],
+ })
+ else:
+ yield ("DeleteUnique", {
+ "model": self.old_orm[key],
+ "fields": [old_field],
+ })
+
+ # See if there's any M2Ms that have changed.
+ for fieldname in set(old_m2ms).intersection(set(new_m2ms)):
+ old_field = self.old_orm[key + ":" + fieldname]
+ new_field = self.current_field_from_key(key, fieldname)
+ # Have they _added_ a through= ?
+ if auto_through(old_field) and not auto_through(new_field):
+ yield ("DeleteM2M", {"model": self.old_orm[key], "field": old_field})
+ # Have they _removed_ a through= ?
+ if not auto_through(old_field) and auto_through(new_field):
+ yield ("AddM2M", {"model": self.current_model_from_key(key), "field": new_field})
+
+ ## See if the unique_togethers have changed
+ # First, normalise them into lists of sets.
+ old_unique_together = eval(old_meta.get("unique_together", "[]"))
+ new_unique_together = eval(new_meta.get("unique_together", "[]"))
+ if old_unique_together and isinstance(old_unique_together[0], basestring):
+ old_unique_together = [old_unique_together]
+ if new_unique_together and isinstance(new_unique_together[0], basestring):
+ new_unique_together = [new_unique_together]
+ old_unique_together = map(set, old_unique_together)
+ new_unique_together = map(set, new_unique_together)
+ # See if any appeared or disappeared
+ for item in old_unique_together:
+ if item not in new_unique_together:
+ yield ("DeleteUnique", {
+ "model": self.old_orm[key],
+ "fields": [self.old_orm[key + ":" + x] for x in item],
+ })
+ for item in new_unique_together:
+ if item not in old_unique_together:
+ yield ("AddUnique", {
+ "model": self.current_model_from_key(key),
+ "fields": [self.current_field_from_key(key, x) for x in item],
+ })
+
+ @classmethod
+ def is_triple(cls, triple):
+ "Returns whether the argument is a triple."
+ return isinstance(triple, (list, tuple)) and len(triple) == 3 and \
+ isinstance(triple[0], (str, unicode)) and \
+ isinstance(triple[1], (list, tuple)) and \
+ isinstance(triple[2], dict)
+
+ @classmethod
+ def different_attributes(cls, old, new):
+ """
+ Backwards-compat comparison that ignores orm. on the RHS and not the left
+ and which knows django.db.models.fields.CharField = models.CharField.
+ Has a whole load of tests in tests/autodetection.py.
+ """
+
+ # If they're not triples, just do normal comparison
+ if not cls.is_triple(old) or not cls.is_triple(new):
+ return old != new
+
+ # Expand them out into parts
+ old_field, old_pos, old_kwd = old
+ new_field, new_pos, new_kwd = new
+
+ # Copy the positional and keyword arguments so we can compare them and pop off things
+ old_pos, new_pos = old_pos[:], new_pos[:]
+ old_kwd = dict(old_kwd.items())
+ new_kwd = dict(new_kwd.items())
+
+ # Remove comparison of the existence of 'unique', that's done elsewhere.
+ # TODO: Make this work for custom fields where unique= means something else?
+ if "unique" in old_kwd:
+ del old_kwd['unique']
+ if "unique" in new_kwd:
+ del new_kwd['unique']
+
+ # If the first bit is different, check it's not by dj.db.models...
+ if old_field != new_field:
+ if old_field.startswith("models.") and (new_field.startswith("django.db.models") \
+ or new_field.startswith("django.contrib.gis")):
+ if old_field.split(".")[-1] != new_field.split(".")[-1]:
+ return True
+ else:
+ # Remove those fields from the final comparison
+ old_field = new_field = ""
+
+ # If there's a positional argument in the first, and a 'to' in the second,
+ # see if they're actually comparable.
+ if (old_pos and "to" in new_kwd) and ("orm" in new_kwd['to'] and "orm" not in old_pos[0]):
+ # Do special comparison to fix #153
+ try:
+ if old_pos[0] != new_kwd['to'].split("'")[1].split(".")[1]:
+ return True
+ except IndexError:
+ pass # Fall back to next comparison
+ # Remove those attrs from the final comparison
+ old_pos = old_pos[1:]
+ del new_kwd['to']
+
+ return old_field != new_field or old_pos != new_pos or old_kwd != new_kwd
+
+
+class ManualChanges(BaseChanges):
+ """
+ Detects changes by reading the command line.
+ """
+
+ def __init__(self, migrations, added_models, added_fields, added_indexes):
+ self.migrations = migrations
+ self.added_models = added_models
+ self.added_fields = added_fields
+ self.added_indexes = added_indexes
+
+ def suggest_name(self):
+ bits = []
+ for model_name in self.added_models:
+ bits.append('add_model_%s' % model_name)
+ for field_name in self.added_fields:
+ bits.append('add_field_%s' % field_name)
+ for index_name in self.added_indexes:
+ bits.append('add_index_%s' % index_name)
+ return '_'.join(bits).replace('.', '_')
+
+ def get_changes(self):
+ # Get the model defs so we can use them for the yield later
+ model_defs = freeze_apps([self.migrations.app_label()])
+ # Make the model changes
+ for model_name in self.added_models:
+ model = models.get_model(self.migrations.app_label(), model_name)
+ real_fields, meta, m2m_fields = self.split_model_def(model, model_defs[model_key(model)])
+ yield ("AddModel", {
+ "model": model,
+ "model_def": real_fields,
+ })
+ # And the field changes
+ for field_desc in self.added_fields:
+ try:
+ model_name, field_name = field_desc.split(".")
+ except (TypeError, ValueError):
+ raise ValueError("%r is not a valid field description." % field_desc)
+ model = models.get_model(self.migrations.app_label(), model_name)
+ real_fields, meta, m2m_fields = self.split_model_def(model, model_defs[model_key(model)])
+ yield ("AddField", {
+ "model": model,
+ "field": model._meta.get_field_by_name(field_name)[0],
+ "field_def": real_fields[field_name],
+ })
+ # And the indexes
+ for field_desc in self.added_indexes:
+ try:
+ model_name, field_name = field_desc.split(".")
+ except (TypeError, ValueError):
+ print "%r is not a valid field description." % field_desc
+ model = models.get_model(self.migrations.app_label(), model_name)
+ yield ("AddIndex", {
+ "model": model,
+ "fields": [model._meta.get_field_by_name(field_name)[0]],
+ })
+
+
+class InitialChanges(BaseChanges):
+ """
+ Creates all models; handles --initial.
+ """
+ def suggest_name(self):
+ return 'initial'
+
+ def __init__(self, migrations):
+ self.migrations = migrations
+
+ def get_changes(self):
+ # Get the frozen models for this app
+ model_defs = freeze_apps([self.migrations.app_label()])
+
+ for model in models.get_models(models.get_app(self.migrations.app_label())):
+
+ # Don't do anything for unmanaged, abstract or proxy models
+ if model._meta.abstract or getattr(model._meta, "proxy", False) or not getattr(model._meta, "managed", True):
+ continue
+
+ real_fields, meta, m2m_fields = self.split_model_def(model, model_defs[model_key(model)])
+
+ # Firstly, add the main table and fields
+ yield ("AddModel", {
+ "model": model,
+ "model_def": real_fields,
+ })
+
+ # Then, add any uniqueness that's around
+ if meta:
+ unique_together = eval(meta.get("unique_together", "[]"))
+ if unique_together:
+ # If it's only a single tuple, make it into the longer one
+ if isinstance(unique_together[0], basestring):
+ unique_together = [unique_together]
+ # For each combination, make an action for it
+ for fields in unique_together:
+ yield ("AddUnique", {
+ "model": model,
+ "fields": [model._meta.get_field_by_name(x)[0] for x in fields],
+ })
+
+ # Finally, see if there's some M2M action
+ for name, triple in m2m_fields.items():
+ field = model._meta.get_field_by_name(name)[0]
+ # But only if it's not through=foo (#120)
+ if field.rel.through:
+ try:
+ # Django 1.1 and below
+ through_model = field.rel.through_model
+ except AttributeError:
+ # Django 1.2
+ through_model = field.rel.through
+ if (not field.rel.through) or getattr(through_model._meta, "auto_created", False):
+ yield ("AddM2M", {
+ "model": model,
+ "field": field,
+ })
View
190 vendor-local/lib/python/south/creator/freezer.py
@@ -0,0 +1,190 @@
+"""
+Handles freezing of models into FakeORMs.
+"""
+
+import sys
+
+from django.db import models
+from django.db.models.base import ModelBase, Model
+from django.contrib.contenttypes.generic import GenericRelation
+
+from south.orm import FakeORM
+from south.utils import get_attribute, auto_through
+from south import modelsinspector
+
+def freeze_apps(apps):
+ """
+ Takes a list of app labels, and returns a string of their frozen form.
+ """
+ if isinstance(apps, basestring):
+ apps = [apps]
+ frozen_models = set()
+ # For each app, add in all its models
+ for app in apps:
+ for model in models.get_models(models.get_app(app)):
+ # Only add if it's not abstract or proxy
+ if not model._meta.abstract and not getattr(model._meta, "proxy", False):
+ frozen_models.add(model)
+ # Now, add all the dependencies
+ for model in list(frozen_models):
+ frozen_models.update(model_dependencies(model))
+ # Serialise!
+ model_defs = {}
+ model_classes = {}
+ for model in frozen_models:
+ model_defs[model_key(model)] = prep_for_freeze(model)
+ model_classes[model_key(model)] = model
+ # Check for any custom fields that failed to freeze.
+ missing_fields = False
+ for key, fields in model_defs.items():
+ for field_name, value in fields.items():
+ if value is None:
+ missing_fields = True
+ model_class = model_classes[key]
+ field_class = model_class._meta.get_field_by_name(field_name)[0]
+ print " ! Cannot freeze field '%s.%s'" % (key, field_name)
+ print " ! (this field has class %s.%s)" % (field_class.__class__.__module__, field_class.__class__.__name__)
+ if missing_fields:
+ print ""
+ print " ! South cannot introspect some fields; this is probably because they are custom"
+ print " ! fields. If they worked in 0.6 or below, this is because we have removed the"
+ print " ! models parser (it often broke things)."
+ print " ! To fix this, read http://south.aeracode.org/wiki/MyFieldsDontWork"
+ sys.exit(1)
+
+ return model_defs
+
+def freeze_apps_to_string(apps):
+ return pprint_frozen_models(freeze_apps(apps))
+
+###
+
+def model_key(model):
+ "For a given model, return 'appname.modelname'."
+ return "%s.%s" % (model._meta.app_label, model._meta.object_name.lower())
+
+def prep_for_freeze(model):
+ """
+ Takes a model and returns the ready-to-serialise dict (all you need
+ to do is just pretty-print it).
+ """
+ fields = modelsinspector.get_model_fields(model, m2m=True)
+ # Remove useless attributes (like 'choices')
+ for name, field in fields.items():
+ fields[name] = remove_useless_attributes(field)
+ # See if there's a Meta
+ fields['Meta'] = remove_useless_meta(modelsinspector.get_model_meta(model))
+ # Add in our own special items to track the object name and managed
+ fields['Meta']['object_name'] = model._meta.object_name # Special: not eval'able.
+ if not getattr(model._meta, "managed", True):
+ fields['Meta']['managed'] = repr(model._meta.managed)
+ return fields
+
+### Dependency resolvers
+
+def model_dependencies(model, checked_models=None):
+ """
+ Returns a set of models this one depends on to be defined; things like
+ OneToOneFields as ID, ForeignKeys everywhere, etc.
+ """
+ depends = set()
+ checked_models = checked_models or set()
+ # Get deps for each field
+ for field in model._meta.fields + model._meta.many_to_many:
+ depends.update(field_dependencies(field, checked_models))
+ # Add in any non-abstract bases
+ for base in model.__bases__:
+ if issubclass(base, models.Model) and hasattr(base, '_meta') and not base._meta.abstract:
+ depends.add(base)
+ # Now recurse
+ new_to_check = depends - checked_models
+ while new_to_check:
+ checked_model = new_to_check.pop()
+ if checked_model == model or checked_model in checked_models:
+ continue
+ checked_models.add(checked_model)
+ deps = model_dependencies(checked_model, checked_models)
+ # Loop through dependencies...
+ for dep in deps:
+ # If the new dep is not already checked, add to the queue
+ if (dep not in depends) and (dep not in new_to_check) and (dep not in checked_models):
+ new_to_check.add(dep)
+ depends.add(dep)
+ return depends
+
+def field_dependencies(field, checked_models=None):
+ checked_models = checked_models or set()
+ depends = set()
+ arg_defs, kwarg_defs = modelsinspector.matching_details(field)
+ for attrname, options in arg_defs + kwarg_defs.values():
+ if options.get("ignore_if_auto_through", False) and auto_through(field):
+ continue
+ if options.get("is_value", False):
+ value = attrname
+ elif attrname == 'rel.through' and hasattr(getattr(field, 'rel', None), 'through_model'):
+ # Hack for django 1.1 and below, where the through model is stored
+ # in rel.through_model while rel.through stores only the model name.
+ value = field.rel.through_model
+ else:
+ try:
+ value = get_attribute(field, attrname)
+ except AttributeError:
+ if options.get("ignore_missing", False):
+ continue
+ raise
+ if isinstance(value, Model):
+ value = value.__class__
+ if not isinstance(value, ModelBase):
+ continue
+ if getattr(value._meta, "proxy", False):
+ value = value._meta.proxy_for_model
+ if value in checked_models:
+ continue
+ checked_models.add(value)
+ depends.add(value)
+ depends.update(model_dependencies(value, checked_models))
+
+ return depends
+
+### Prettyprinters
+
+def pprint_frozen_models(models):
+ return "{\n %s\n }" % ",\n ".join([
+ "%r: %s" % (name, pprint_fields(fields))
+ for name, fields in sorted(models.items())
+ ])
+
+def pprint_fields(fields):
+ return "{\n %s\n }" % ",\n ".join([
+ "%r: %r" % (name, defn)
+ for name, defn in sorted(fields.items())
+ ])
+
+### Output sanitisers
+
+USELESS_KEYWORDS = ["choices", "help_text", "verbose_name"]
+USELESS_DB_KEYWORDS = ["related_name", "default", "blank"] # Important for ORM, not for DB.
+INDEX_KEYWORDS = ["db_index"]
+
+def remove_useless_attributes(field, db=False, indexes=False):
+ "Removes useless (for database) attributes from the field's defn."
+ # Work out what to remove, and remove it.
+ keywords = USELESS_KEYWORDS[:]
+ if db:
+ keywords += USELESS_DB_KEYWORDS[:]
+ if indexes:
+ keywords += INDEX_KEYWORDS[:]
+ if field:
+ for name in keywords:
+ if name in field[2]:
+ del field[2][name]
+ return field
+
+USELESS_META = ["verbose_name", "verbose_name_plural"]
+def remove_useless_meta(meta):
+ "Removes useless (for database) attributes from the table's meta."
+ if meta:
+ for name in USELESS_META:
+ if name in meta:
+ del meta[name]
+ return meta
View
81 vendor-local/lib/python/south/db/__init__.py
@@ -0,0 +1,81 @@
+
+# Establish the common DatabaseOperations instance, which we call 'db'.
+# Much thanks to cmkmrr for a lot of the code base here
+
+from django.conf import settings
+import sys
+
+# A few aliases, because there's FQMNs now
+engine_modules = {
+ 'django.db.backends.postgresql_psycopg2': 'postgresql_psycopg2',
+ 'django.db.backends.sqlite3': 'sqlite3',
+ 'django.db.backends.mysql': 'mysql',
+ 'django.db.backends.oracle': 'oracle',
+ 'sql_server.pyodbc': 'sql_server.pyodbc', #django-pyodbc
+ 'sqlserver_ado': 'sql_server.pyodbc', #django-mssql
+ 'firebird': 'firebird', #django-firebird
+ 'django.contrib.gis.db.backends.postgis': 'postgresql_psycopg2',
+ 'django.contrib.gis.db.backends.spatialite': 'sqlite3',
+ 'django.contrib.gis.db.backends.mysql': 'mysql',
+ 'django.contrib.gis.db.backends.oracle': 'oracle',
+ 'doj.backends.zxjdbc.postgresql': 'postgresql_psycopg2', #django-jython
+ 'doj.backends.zxjdbc.mysql': 'mysql', #django-jython
+ 'doj.backends.zxjdbc.oracle': 'oracle', #django-jython
+}
+
+# First, work out if we're multi-db or not, and which databases we have
+try:
+ from django.db import DEFAULT_DB_ALIAS
+except ImportError:
+ #### 1.1 or below ####
+ # We'll 'fake' multi-db; set the default alias
+ DEFAULT_DB_ALIAS = 'default'
+ # SOUTH_DATABASE_ADAPTER is an optional override if you have a different module
+ engine = getattr(settings, "SOUTH_DATABASE_ADAPTER", "south.db.%s" % settings.DATABASE_ENGINE)
+ # And then, we have one database with one engine
+ db_engines = {DEFAULT_DB_ALIAS: engine}
+else:
+ #### 1.2 or above ####
+ # Loop over the defined databases, gathering up their engines
+ db_engines = dict([
+ # Note we check to see if contrib.gis has overridden us.
+ (alias, "south.db.%s" % engine_modules[db_settings['ENGINE']])
+ for alias, db_settings in settings.DATABASES.items()
+ if db_settings['ENGINE'] in engine_modules
+ ])
+ # Update with any overrides
+ db_engines.update(getattr(settings, "SOUTH_DATABASE_ADAPTERS", {}))
+ # Check there's no None engines, or...
+ for alias, engine in db_engines.items():
+ if engine is None:
+ # They've used a backend we don't support
+ sys.stderr.write(
+ (
+ "There is no South database module for your database backend '%s'. " + \
+ "Please either choose a supported database, check for " + \
+ "SOUTH_DATABASE_ADAPTER[S] settings, " + \
+ "or remove South from INSTALLED_APPS.\n"
+ ) % (settings.DATABASES[alias]['ENGINE'],)
+ )
+ sys.exit(1)
+
+# Now, turn that into a dict of <alias: south db module>
+dbs = {}
+try:
+ for alias, module_name in db_engines.items():
+ module = __import__(module_name, {}, {}, [''])
+ dbs[alias] = module.DatabaseOperations(alias)
+except ImportError:
+ # This error should only be triggered on 1.1 and below.
+ sys.stderr.write(
+ (
+ "There is no South database module '%s' for your database. " + \
+ "Please either choose a supported database, check for " + \
+ "SOUTH_DATABASE_ADAPTER[S] settings, " + \
+ "or remove South from INSTALLED_APPS.\n"
+ ) % (module_name,)
+ )
+ sys.exit(1)
+
+# Finally, to make old migrations work, keep 'db' around as the default database
+db = dbs[DEFAULT_DB_ALIAS]
View
322 vendor-local/lib/python/south/db/firebird.py
@@ -0,0 +1,322 @@
+# firebird
+
+import datetime
+
+from django.db import connection, models
+from django.core.management.color import no_style
+from django.db.utils import DatabaseError
+
+from south.db import generic
+
+class DatabaseOperations(generic.DatabaseOperations):
+ backend_name = 'firebird'
+ alter_string_set_type = 'ALTER %(column)s TYPE %(type)s'
+ alter_string_set_default = 'ALTER %(column)s SET DEFAULT %(default)s;'
+ alter_string_drop_null = ''
+ add_column_string = 'ALTER TABLE %s ADD %s;'
+ delete_column_string = 'ALTER TABLE %s DROP %s;'
+ allows_combined_alters = False
+
+ def _fill_constraint_cache(self, db_name, table_name):
+ self._constraint_cache.setdefault(db_name, {})
+ self._constraint_cache[db_name][table_name] = {}
+
+ rows = self.execute("""
+ SELECT
+ rc.RDB$CONSTRAINT_NAME,
+ rc.RDB$CONSTRAINT_TYPE,
+ cc.RDB$TRIGGER_NAME
+ FROM rdb$relation_constraints rc
+ JOIN rdb$check_constraints cc
+ ON rc.rdb$constraint_name = cc.rdb$constraint_name
+ WHERE rc.rdb$constraint_type = 'NOT NULL'
+ AND rc.rdb$relation_name = '%s'
+ """ % table_name)
+
+ for constraint, kind, column in rows:
+ self._constraint_cache[db_name][table_name].setdefault(column, set())
+ self._constraint_cache[db_name][table_name][column].add((kind, constraint))
+ return
+
+ def _alter_column_set_null(self, table_name, column_name, is_null):
+ sql = """
+ UPDATE RDB$RELATION_FIELDS SET RDB$NULL_FLAG = %(null_flag)s
+ WHERE RDB$FIELD_NAME = '%(column)s'
+ AND RDB$RELATION_NAME = '%(table_name)s'
+ """
+ null_flag = 'NULL' if is_null else '1'
+ return sql % {
+ 'null_flag': null_flag,
+ 'column': column_name.upper(),
+ 'table_name': table_name.upper()
+ }
+
+ def _column_has_default(self, params):
+ sql = """
+ SELECT a.RDB$DEFAULT_VALUE
+ FROM RDB$RELATION_FIELDS a
+ WHERE a.RDB$FIELD_NAME = '%(column)s'
+ AND a.RDB$RELATION_NAME = '%(table_name)s'
+ """
+ value = self.execute(sql % params)
+ return True if value else False
+
+
+ def _alter_set_defaults(self, field, name, params, sqls):
+ "Subcommand of alter_column that sets default values (overrideable)"
+ # Next, set any default
+ if not field.null and field.has_default():
+ default = field.get_default()
+ sqls.append(('ALTER COLUMN %s SET DEFAULT %%s ' % (self.quote_name(name),), [default]))
+ elif self._column_has_default(params):
+ sqls.append(('ALTER COLUMN %s DROP DEFAULT' % (self.quote_name(name),), []))
+
+
+ @generic.invalidate_table_constraints
+ def create_table(self, table_name, fields):
+ qn = self.quote_name(table_name)
+ columns = []
+ autoinc_sql = ''
+
+ for field_name, field in fields:
+ col = self.column_sql(table_name, field_name, field)
+ if not col:
+ continue
+ #col = self.adj_column_sql(col)
+
+ columns.append(col)
+ if isinstance(field, models.AutoField):
+ field_name = field.db_column or field.column
+ autoinc_sql = connection.ops.autoinc_sql(table_name, field_name)
+
+ sql = 'CREATE TABLE %s (%s);' % (qn, ', '.join([col for col in columns]))
+ self.execute(sql)
+ if autoinc_sql:
+ self.execute(autoinc_sql[0])
+ self.execute(autoinc_sql[1])
+
+
+ def column_sql(self, table_name, field_name, field, tablespace='', with_name=True, field_prepared=False):
+ """
+ Creates the SQL snippet for a column. Used by add_column and add_table.
+ """
+
+ # If the field hasn't already been told its attribute name, do so.
+ if not field_prepared:
+ field.set_attributes_from_name(field_name)
+
+ # hook for the field to do any resolution prior to it's attributes being queried
+ if hasattr(field, 'south_init'):
+ field.south_init()
+
+ # Possible hook to fiddle with the fields (e.g. defaults & TEXT on MySQL)
+ field = self._field_sanity(field)
+
+ try:
+ sql = field.db_type(connection=self._get_connection())
+ except TypeError:
+ sql = field.db_type()
+
+ if sql:
+ # Some callers, like the sqlite stuff, just want the extended type.
+ if with_name:
+ field_output = [self.quote_name(field.column), sql]
+ else:
+ field_output = [sql]
+
+ if field.primary_key:
+ field_output.append('NOT NULL PRIMARY KEY')
+ elif field.unique:
+ # Just use UNIQUE (no indexes any more, we have delete_unique)
+ field_output.append('UNIQUE')
+
+ sql = ' '.join(field_output)
+ sqlparams = ()
+
+ # if the field is "NOT NULL" and a default value is provided, create the column with it
+ # this allows the addition of a NOT NULL field to a table with existing rows
+ if not getattr(field, '_suppress_default', False):
+ if field.has_default():
+ default = field.get_default()
+ # If the default is actually None, don't add a default term
+ if default is not None:
+ # If the default is a callable, then call it!
+ if callable(default):
+ default = default()
+ # Now do some very cheap quoting. TODO: Redesign return values to avoid this.
+ if isinstance(default, basestring):
+ default = "'%s'" % default.replace("'", "''")
+ elif isinstance(default, (datetime.date, datetime.time, datetime.datetime)):
+ default = "'%s'" % default
+ elif isinstance(default, bool):
+ default = int(default)
+ # Escape any % signs in the output (bug #317)
+ if isinstance(default, basestring):
+ default = default.replace("%", "%%")
+ # Add it in
+ sql += " DEFAULT %s"
+ sqlparams = (default)
+ elif (not field.null and field.blank) or (field.get_default() == ''):
+ if field.empty_strings_allowed and self._get_connection().features.interprets_empty_strings_as_nulls:
+ sql += " DEFAULT ''"
+ # Error here would be nice, but doesn't seem to play fair.
+ #else:
+ # raise ValueError("Attempting to add a non null column that isn't character based without an explicit default value.")
+
+ # Firebird need set not null after of default value keyword
+ if not field.primary_key and not field.null:
+ sql += ' NOT NULL'
+
+ if field.rel and self.supports_foreign_keys:
+ self.add_deferred_sql(
+ self.foreign_key_sql(
+ table_name,
+ field.column,
+ field.rel.to._meta.db_table,
+ field.rel.to._meta.get_field(field.rel.field_name).column
+ )
+ )
+
+ # Things like the contrib.gis module fields have this in 1.1 and below
+ if hasattr(field, 'post_create_sql'):
+ for stmt in field.post_create_sql(no_style(), table_name):
+ self.add_deferred_sql(stmt)
+
+ # In 1.2 and above, you have to ask the DatabaseCreation stuff for it.
+ # This also creates normal indexes in 1.1.
+ if hasattr(self._get_connection().creation, "sql_indexes_for_field"):
+ # Make a fake model to pass in, with only db_table
+ model = self.mock_model("FakeModelForGISCreation", table_name)
+ for stmt in self._get_connection().creation.sql_indexes_for_field(model, field, no_style()):
+ self.add_deferred_sql(stmt)
+
+ if sql:
+ return sql % sqlparams
+ else:
+ return None
+
+
+ def _drop_constraints(self, table_name, name, field):
+ if self.has_check_constraints:
+ check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK")
+ for constraint in check_constraints:
+ self.execute(self.delete_check_sql % {
+ 'table': self.quote_name(table_name),
+ 'constraint': self.quote_name(constraint),
+ })
+
+ # Drop or add UNIQUE constraint
+ unique_constraint = list(self._constraints_affecting_columns(table_name, [name], "UNIQUE"))
+ if field.unique and not unique_constraint:
+ self.create_unique(table_name, [name])
+ elif not field.unique and unique_constraint:
+ self.delete_unique(table_name, [name])
+
+ # Drop all foreign key constraints
+ try:
+ self.delete_foreign_key(table_name, name)
+ except ValueError:
+ # There weren't any
+ pass
+
+
+ @generic.invalidate_table_constraints
+ def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False):
+ """
+ Alters the given column name so it will match the given field.
+ Note that conversion between the two by the database must be possible.
+ Will not automatically add _id by default; to have this behavour, pass
+ explicit_name=False.
+
+ @param table_name: The name of the table to add the column to
+ @param name: The name of the column to alter
+ @param field: The new field definition to use
+ """
+
+ if self.dry_run:
+ return
+
+ # hook for the field to do any resolution prior to it's attributes being queried
+ if hasattr(field, 'south_init'):
+ field.south_init()
+
+ # Add _id or whatever if we need to
+ field.set_attributes_from_name(name)
+ if not explicit_name:
+ name = field.column
+ else:
+ field.column = name
+
+ if not ignore_constraints:
+ # Drop all check constraints. Note that constraints will be added back
+ # with self.alter_string_set_type and self.alter_string_drop_null.
+ self._drop_constraints(table_name, name, field)
+
+ # First, change the type
+ params = {
+ "column": self.quote_name(name),
+ "type": self._db_type_for_alter_column(field),
+ "table_name": table_name
+ }
+
+ # SQLs is a list of (SQL, values) pairs.
+ sqls = []
+ sqls_extra = []
+
+ # Only alter the column if it has a type (Geometry ones sometimes don't)
+ if params["type"] is not None:
+ sqls.append((self.alter_string_set_type % params, []))
+
+ # Add any field- and backend- specific modifications
+ self._alter_add_column_mods(field, name, params, sqls)
+
+ # Next, nullity: modified, firebird doesn't support DROP NOT NULL
+ sqls_extra.append(self._alter_column_set_null(table_name, name, field.null))
+
+ # Next, set any default
+ self._alter_set_defaults(field, name, params, sqls)
+
+ # Finally, actually change the column
+ if self.allows_combined_alters:
+ sqls, values = zip(*sqls)
+ self.execute(
+ "ALTER TABLE %s %s;" % (self.quote_name(table_name), ", ".join(sqls)),
+ generic.flatten(values),
+ )
+ else:
+ # Databases like e.g. MySQL don't like more than one alter at once.
+ for sql, values in sqls:
+ try:
+ self.execute("ALTER TABLE %s %s;" % (self.quote_name(table_name), sql), values)
+ except DatabaseError, e:
+ print e
+
+
+ # Execute extra sql, which don't need ALTER TABLE statement
+ for sql in sqls_extra:
+ self.execute(sql)
+
+ if not ignore_constraints:
+ # Add back FK constraints if needed
+ if field.rel and self.supports_foreign_keys:
+ self.execute(
+ self.foreign_key_sql(
+ table_name,
+ field.column,
+ field.rel.to._meta.db_table,
+ field.rel.to._meta.get_field(field.rel.field_name).column
+ )
+ )
+
+ @generic.copy_column_constraints
+ @generic.delete_column_constraints
+ def rename_column(self, table_name, old, new):
+ if old == new:
+ # Short-circuit out
+ return []
+
+ self.execute('ALTER TABLE %s ALTER %s TO %s;' % (
+ self.quote_name(table_name),
+ self.quote_name(old),
+ self.quote_name(new),
+ ))
View
1,139 vendor-local/lib/python/south/db/generic.py
@@ -0,0 +1,1139 @@
+import re
+import sys
+
+from django.core.management.color import no_style
+from django.db import transaction, models
+from django.db.utils import DatabaseError
+from django.db.backends.util import truncate_name
+from django.db.backends.creation import BaseDatabaseCreation
+from django.db.models.fields import NOT_PROVIDED
+from django.dispatch import dispatcher
+from django.conf import settings
+from django.utils.datastructures import SortedDict
+try:
+ from django.utils.functional import cached_property
+except ImportError:
+ class cached_property(object):
+ """
+ Decorator that creates converts a method with a single
+ self argument into a property cached on the instance.
+ """
+ def __init__(self, func):
+ self.func = func
+
+ def __get__(self, instance, type):
+ res = instance.__dict__[self.func.__name__] = self.func(instance)
+ return res
+
+from south.logger import get_logger
+
+
+def alias(attrname):
+ """
+ Returns a function which calls 'attrname' - for function aliasing.
+ We can't just use foo = bar, as this breaks subclassing.
+ """
+ def func(self, *args, **kwds):
+ return getattr(self, attrname)(*args, **kwds)
+ return func
+
+
+def invalidate_table_constraints(func):
+ def _cache_clear(self, table, *args, **opts):
+ self._set_cache(table, value=INVALID)
+ return func(self, table, *args, **opts)
+ return _cache_clear
+
+
+def delete_column_constraints(func):
+ def _column_rm(self, table, column, *args, **opts):
+ self._set_cache(table, column, value=[])
+ return func(self, table, column, *args, **opts)
+ return _column_rm
+
+
+def copy_column_constraints(func):
+ def _column_cp(self, table, column_old, column_new, *args, **opts):
+ db_name = self._get_setting('NAME')
+ self._set_cache(table, column_new, value=self.lookup_constraint(db_name, table, column_old))
+ return func(self, table, column_old, column_new, *args, **opts)
+ return _column_cp
+
+
+class INVALID(Exception):
+ def __repr__(self):
+ return 'INVALID'
+
+
+class DryRunError(ValueError):
+ pass
+
+
+class DatabaseOperations(object):
+ """
+ Generic SQL implementation of the DatabaseOperations.
+ Some of this code comes from Django Evolution.
+ """
+
+ alter_string_set_type = 'ALTER COLUMN %(column)s TYPE %(type)s'
+ alter_string_set_null = 'ALTER COLUMN %(column)s DROP NOT NULL'
+ alter_string_drop_null = 'ALTER COLUMN %(column)s SET NOT NULL'
+ delete_check_sql = 'ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s'
+ add_column_string = 'ALTER TABLE %s ADD COLUMN %s;'
+ delete_unique_sql = "ALTER TABLE %s DROP CONSTRAINT %s"
+ delete_foreign_key_sql = 'ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s'
+ max_index_name_length = 63
+ drop_index_string = 'DROP INDEX %(index_name)s'
+ delete_column_string = 'ALTER TABLE %s DROP COLUMN %s CASCADE;'
+ create_primary_key_string = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s PRIMARY KEY (%(columns)s)"
+ delete_primary_key_sql = "ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s"
+ add_check_constraint_fragment = "ADD CONSTRAINT %(constraint)s CHECK (%(check)s)"
+ rename_table_sql = "ALTER TABLE %s RENAME TO %s;"
+ backend_name = None
+ default_schema_name = "public"
+
+ # Features
+ allows_combined_alters = True
+ supports_foreign_keys = True
+ has_check_constraints = True
+ has_booleans = True
+
+ @cached_property
+ def has_ddl_transactions(self):
+ """
+ Tests the database using feature detection to see if it has
+ transactional DDL support.
+ """
+ self._possibly_initialise()
+ connection = self._get_connection()
+ if hasattr(connection.features, "confirm") and not connection.features._confirmed:
+ connection.features.confirm()
+ # Django 1.3's MySQLdb backend doesn't raise DatabaseError
+ exceptions = (DatabaseError, )
+ try:
+ from MySQLdb import OperationalError
+ exceptions += (OperationalError, )
+ except ImportError:
+ pass
+ # Now do the test
+ if getattr(connection.features, 'supports_transactions', True):
+ cursor = connection.cursor()
+ self.start_transaction()
+ cursor.execute('CREATE TABLE DDL_TRANSACTION_TEST (X INT)')
+ self.rollback_transaction()
+ try:
+ try:
+ cursor.execute('CREATE TABLE DDL_TRANSACTION_TEST (X INT)')
+ except exceptions:
+ return False
+ else:
+ return True
+ finally:
+ cursor.execute('DROP TABLE DDL_TRANSACTION_TEST')
+ else:
+ return False
+
+ def __init__(self, db_alias):
+ self.debug = False
+ self.deferred_sql = []
+ self.dry_run = False
+ self.pending_transactions = 0
+ self.pending_create_signals = []
+ self.db_alias = db_alias
+ self._constraint_cache = {}
+ self._initialised = False
+
+ def lookup_constraint(self, db_name, table_name, column_name=None):
+ """ return a set() of constraints for db_name.table_name.column_name """
+ def _lookup():
+ table = self._constraint_cache[db_name][table_name]
+ if table is INVALID:
+ raise INVALID
+ elif column_name is None:
+ return table.items()
+ else:
+ return table[column_name]
+
+ try:
+ ret = _lookup()
+ return ret
+ except INVALID:
+ del self._constraint_cache[db_name][table_name]
+ self._fill_constraint_cache(db_name, table_name)
+ except KeyError:
+ if self._is_valid_cache(db_name, table_name):
+ return []
+ self._fill_constraint_cache(db_name, table_name)
+
+ return self.lookup_constraint(db_name, table_name, column_name)
+
+ def _set_cache(self, table_name, column_name=None, value=INVALID):
+ db_name = self._get_setting('NAME')
+ try:
+ if column_name is not None:
+ self._constraint_cache[db_name][table_name][column_name] = value
+ else:
+ self._constraint_cache[db_name][table_name] = value
+ except (LookupError, TypeError):
+ pass
+
+ def _is_valid_cache(self, db_name, table_name):
+ # we cache per-table so if the table is there it is valid
+ try:
+ return self._constraint_cache[db_name][table_name] is not INVALID
+ except KeyError:
+ return False
+
+ def _is_multidb(self):
+ try:
+ from django.db import connections
+ connections # Prevents "unused import" warning
+ except ImportError:
+ return False
+ else:
+ return True
+
+ def _get_connection(self):
+ """
+ Returns a django connection for a given DB Alias
+ """
+ if self._is_multidb():
+ from django.db import connections
+ return connections[self.db_alias]
+ else:
+ from django.db import connection
+ return connection
+
+ def _get_setting(self, setting_name):
+ """
+ Allows code to get a setting (like, for example, STORAGE_ENGINE)
+ """
+ setting_name = setting_name.upper()
+ connection = self._get_connection()
+ if self._is_multidb():
+ # Django 1.2 and above
+ return connection.settings_dict[setting_name]
+ else:
+ # Django 1.1 and below
+ return getattr(settings, "DATABASE_%s" % setting_name)
+
+ def _has_setting(self, setting_name):
+ """
+ Existence-checking version of _get_setting.
+ """
+ try:
+ self._get_setting(setting_name)
+ except (KeyError, AttributeError):
+ return False
+ else:
+ return True
+
+ def _get_schema_name(self):
+ try:
+ return self._get_setting('schema')
+ except (KeyError, AttributeError):
+ return self.default_schema_name
+
+ def _possibly_initialise(self):
+ if not self._initialised:
+ self.connection_init()
+ self._initialised = True
+
+ def connection_init(self):
+ """
+ Run before any SQL to let database-specific config be sent as a command,
+ e.g. which storage engine (MySQL) or transaction serialisability level.
+ """
+ pass
+
+ def quote_name(self, name):
+ """
+ Uses the database backend to quote the given table/column name.
+ """
+ return self._get_connection().ops.quote_name(name)
+
+ def execute(self, sql, params=[]):
+ """
+ Executes the given SQL statement, with optional parameters.
+ If the instance's debug attribute is True, prints out what it executes.
+ """
+
+ self._possibly_initialise()
+
+ cursor = self._get_connection().cursor()
+ if self.debug:
+ print " = %s" % sql, params
+
+ if self.dry_run:
+ return []
+
+ get_logger().debug('execute "%s" with params "%s"' % (sql, params))
+
+ try:
+ cursor.execute(sql, params)
+ except DatabaseError, e:
+ print >> sys.stderr, 'FATAL ERROR - The following SQL query failed: %s' % sql
+ print >> sys.stderr, 'The error was: %s' % e
+ raise
+
+ try:
+ return cursor.fetchall()
+ except:
+ return []
+
+ def execute_many(self, sql, regex=r"(?mx) ([^';]* (?:'[^']*'[^';]*)*)", comment_regex=r"(?mx) (?:^\s*$)|(?:--.*$)"):
+ """
+ Takes a SQL file and executes it as many separate statements.
+ (Some backends, such as Postgres, don't work otherwise.)
+ """
+ # Be warned: This function is full of dark magic. Make sure you really
+ # know regexes before trying to edit it.
+ # First, strip comments
+ sql = "\n".join([x.strip().replace("%", "%%") for x in re.split(comment_regex, sql) if x.strip()])
+ # Now execute each statement
+ for st in re.split(regex, sql)[1:][::2]:
+ self.execute(st)
+
+ def add_deferred_sql(self, sql):
+ """
+ Add a SQL statement to the deferred list, that won't be executed until
+ this instance's execute_deferred_sql method is run.
+ """
+ self.deferred_sql.append(sql)
+
+ def execute_deferred_sql(self):
+ """
+ Executes all deferred SQL, resetting the deferred_sql list
+ """
+ for sql in self.deferred_sql:
+ self.execute(sql)
+
+ self.deferred_sql = []
+
+ def clear_deferred_sql(self):
+ """
+ Resets the deferred_sql list to empty.
+ """
+ self.deferred_sql = []
+
+ def clear_run_data(self, pending_creates = None):
+ """
+ Resets variables to how they should be before a run. Used for dry runs.
+ If you want, pass in an old panding_creates to reset to.
+ """
+ self.clear_deferred_sql()
+ self.pending_create_signals = pending_creates or []
+
+ def get_pending_creates(self):
+ return self.pending_create_signals
+
+ @invalidate_table_constraints
+ def create_table(self, table_name, fields):
+ """
+ Creates the table 'table_name'. 'fields' is a tuple of fields,
+ each repsented by a 2-part tuple of field name and a
+ django.db.models.fields.Field object
+ """
+
+ if len(table_name) > 63:
+ print " ! WARNING: You have a table name longer than 63 characters; this will not fully work on PostgreSQL or MySQL."
+
+ # avoid default values in CREATE TABLE statements (#925)
+ for field_name, field in fields:
+ field._suppress_default = True
+
+ columns = [
+ self.column_sql(table_name, field_name, field)
+ for field_name, field in fields
+ ]
+
+ self.execute('CREATE TABLE %s (%s);' % (
+ self.quote_name(table_name),
+ ', '.join([col for col in columns if col]),
+ ))
+
+ add_table = alias('create_table') # Alias for consistency's sake
+
+ @invalidate_table_constraints
+ def rename_table(self, old_table_name, table_name):
+ """
+ Renames the table 'old_table_name' to 'table_name'.
+ """
+ if old_table_name == table_name:
+ # Short-circuit out.
+ return
+ params = (self.quote_name(old_table_name), self.quote_name(table_name))
+ self.execute(self.rename_table_sql % params)
+ # Invalidate the not-yet-indexed table
+ self._set_cache(table_name, value=INVALID)
+
+ @invalidate_table_constraints
+ def delete_table(self, table_name, cascade=True):
+ """
+ Deletes the table 'table_name'.
+ """
+ params = (self.quote_name(table_name), )
+ if cascade:
+ self.execute('DROP TABLE %s CASCADE;' % params)
+ else:
+ self.execute('DROP TABLE %s;' % params)
+
+ drop_table = alias('delete_table')
+
+ @invalidate_table_constraints
+ def clear_table(self, table_name):
+ """
+ Deletes all rows from 'table_name'.
+ """
+ params = (self.quote_name(table_name), )
+ self.execute('DELETE FROM %s;' % params)
+
+ @invalidate_table_constraints
+ def add_column(self, table_name, name, field, keep_default=True):
+ """
+ Adds the column 'name' to the table 'table_name'.
+ Uses the 'field' paramater, a django.db.models.fields.Field instance,
+ to generate the necessary sql
+
+ @param table_name: The name of the table to add the column to
+ @param name: The name of the column to add
+ @param field: The field to use
+ """
+ sql = self.column_sql(table_name, name, field)
+ if sql:
+ params = (
+ self.quote_name(table_name),
+ sql,
+ )
+ sql = self.add_column_string % params
+ self.execute(sql)
+
+ # Now, drop the default if we need to
+ if not keep_default and field.default is not None:
+ field.default = NOT_PROVIDED
+ self.alter_column(table_name, name, field, explicit_name=False, ignore_constraints=True)
+
+ def _db_type_for_alter_column(self, field):
+ """
+ Returns a field's type suitable for ALTER COLUMN.
+ By default it just returns field.db_type().
+ To be overriden by backend specific subclasses
+ @param field: The field to generate type for
+ """
+ try:
+ return field.db_type(connection=self._get_connection())
+ except TypeError:
+ return field.db_type()
+
+ def _alter_add_column_mods(self, field, name, params, sqls):
+ """
+ Subcommand of alter_column that modifies column definitions beyond
+ the type string -- e.g. adding constraints where they cannot be specified
+ as part of the type (overrideable)
+ """
+ pass
+
+ def _alter_set_defaults(self, field, name, params, sqls):
+ "Subcommand of alter_column that sets default values (overrideable)"
+ # Next, set any default
+ if not field.null and field.has_default():
+ default = field.get_default()
+ sqls.append(('ALTER COLUMN %s SET DEFAULT %%s ' % (self.quote_name(name),), [default]))
+ else:
+ sqls.append(('ALTER COLUMN %s DROP DEFAULT' % (self.quote_name(name),), []))
+
+ @invalidate_table_constraints
+ def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False):
+ """
+ Alters the given column name so it will match the given field.
+ Note that conversion between the two by the database must be possible.
+ Will not automatically add _id by default; to have this behavour, pass
+ explicit_name=False.
+
+ @param table_name: The name of the table to add the column to
+ @param name: The name of the column to alter
+ @param field: The new field definition to use
+ """
+
+ if self.dry_run:
+ if self.debug:
+ print ' - no dry run output for alter_column() due to dynamic DDL, sorry'
+ return
+
+ # hook for the field to do any resolution prior to it's attributes being queried
+ if hasattr(field, 'south_init'):
+ field.south_init()
+
+ # Add _id or whatever if we need to
+ field.set_attributes_from_name(name)
+ if not explicit_name:
+ name = field.column
+ else:
+ field.column = name
+
+ if not ignore_constraints:
+ # Drop all check constraints. Note that constraints will be added back
+ # with self.alter_string_set_type and self.alter_string_drop_null.
+ if self.has_check_constraints:
+ check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK")
+ for constraint in check_constraints:
+ self.execute(self.delete_check_sql % {
+ 'table': self.quote_name(table_name),
+ 'constraint': self.quote_name(constraint),
+ })
+
+ # Drop all foreign key constraints
+ try:
+ self.delete_foreign_key(table_name, name)
+ except ValueError:
+ # There weren't any
+ pass
+
+ # First, change the type
+ params = {
+ "column": self.quote_name(name),
+ "type": self._db_type_for_alter_column(field),
+ "table_name": table_name
+ }
+
+ # SQLs is a list of (SQL, values) pairs.
+ sqls = []
+
+ # Only alter the column if it has a type (Geometry ones sometimes don't)
+ if params["type"] is not None:
+ sqls.append((self.alter_string_set_type % params, []))
+
+