summaryrefslogtreecommitdiff
path: root/lib/python2.7/site-packages/south
diff options
context:
space:
mode:
Diffstat (limited to 'lib/python2.7/site-packages/south')
-rw-r--r--lib/python2.7/site-packages/south/__init__.py9
-rw-r--r--lib/python2.7/site-packages/south/creator/__init__.py5
-rw-r--r--lib/python2.7/site-packages/south/creator/actions.py559
-rw-r--r--lib/python2.7/site-packages/south/creator/changes.py506
-rw-r--r--lib/python2.7/site-packages/south/creator/freezer.py192
-rw-r--r--lib/python2.7/site-packages/south/db/__init__.py83
-rw-r--r--lib/python2.7/site-packages/south/db/firebird.py362
-rw-r--r--lib/python2.7/site-packages/south/db/generic.py1164
-rw-r--r--lib/python2.7/site-packages/south/db/mysql.py290
-rw-r--r--lib/python2.7/site-packages/south/db/oracle.py345
-rw-r--r--lib/python2.7/site-packages/south/db/postgresql_psycopg2.py96
-rw-r--r--lib/python2.7/site-packages/south/db/sql_server/__init__.py0
-rw-r--r--lib/python2.7/site-packages/south/db/sql_server/pyodbc.py444
-rw-r--r--lib/python2.7/site-packages/south/db/sqlite3.py272
-rw-r--r--lib/python2.7/site-packages/south/exceptions.py160
-rw-r--r--lib/python2.7/site-packages/south/hacks/__init__.py10
-rw-r--r--lib/python2.7/site-packages/south/hacks/django_1_0.py110
-rw-r--r--lib/python2.7/site-packages/south/introspection_plugins/__init__.py11
-rw-r--r--lib/python2.7/site-packages/south/introspection_plugins/annoying_autoonetoone.py11
-rw-r--r--lib/python2.7/site-packages/south/introspection_plugins/django_audit_log.py30
-rw-r--r--lib/python2.7/site-packages/south/introspection_plugins/django_objectpermissions.py16
-rw-r--r--lib/python2.7/site-packages/south/introspection_plugins/django_tagging.py24
-rw-r--r--lib/python2.7/site-packages/south/introspection_plugins/django_taggit.py14
-rw-r--r--lib/python2.7/site-packages/south/introspection_plugins/django_timezones.py21
-rw-r--r--lib/python2.7/site-packages/south/introspection_plugins/geodjango.py45
-rw-r--r--lib/python2.7/site-packages/south/logger.py38
-rw-r--r--lib/python2.7/site-packages/south/management/__init__.py0
-rw-r--r--lib/python2.7/site-packages/south/management/commands/__init__.py40
-rw-r--r--lib/python2.7/site-packages/south/management/commands/convert_to_south.py95
-rw-r--r--lib/python2.7/site-packages/south/management/commands/datamigration.py139
-rw-r--r--lib/python2.7/site-packages/south/management/commands/graphmigrations.py63
-rw-r--r--lib/python2.7/site-packages/south/management/commands/migrate.py264
-rw-r--r--lib/python2.7/site-packages/south/management/commands/migrationcheck.py67
-rw-r--r--lib/python2.7/site-packages/south/management/commands/schemamigration.py229
-rw-r--r--lib/python2.7/site-packages/south/management/commands/startmigration.py33
-rw-r--r--lib/python2.7/site-packages/south/management/commands/syncdb.py115
-rw-r--r--lib/python2.7/site-packages/south/management/commands/test.py8
-rw-r--r--lib/python2.7/site-packages/south/management/commands/testserver.py8
-rw-r--r--lib/python2.7/site-packages/south/migration/__init__.py235
-rw-r--r--lib/python2.7/site-packages/south/migration/base.py440
-rw-r--r--lib/python2.7/site-packages/south/migration/migrators.py379
-rw-r--r--lib/python2.7/site-packages/south/migration/utils.py94
-rw-r--r--lib/python2.7/site-packages/south/models.py37
-rw-r--r--lib/python2.7/site-packages/south/modelsinspector.py464
-rw-r--r--lib/python2.7/site-packages/south/orm.py407
-rw-r--r--lib/python2.7/site-packages/south/signals.py24
-rw-r--r--lib/python2.7/site-packages/south/test_shim.py6
-rw-r--r--lib/python2.7/site-packages/south/tests/__init__.py109
-rw-r--r--lib/python2.7/site-packages/south/tests/autodetection.py360
-rw-r--r--lib/python2.7/site-packages/south/tests/brokenapp/__init__.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/brokenapp/migrations/0001_depends_on_unmigrated.py13
-rw-r--r--lib/python2.7/site-packages/south/tests/brokenapp/migrations/0002_depends_on_unknown.py13
-rw-r--r--lib/python2.7/site-packages/south/tests/brokenapp/migrations/0003_depends_on_higher.py13
-rw-r--r--lib/python2.7/site-packages/south/tests/brokenapp/migrations/0004_higher.py11
-rw-r--r--lib/python2.7/site-packages/south/tests/brokenapp/migrations/__init__.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/brokenapp/models.py55
-rw-r--r--lib/python2.7/site-packages/south/tests/circular_a/__init__.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/circular_a/migrations/0001_first.py13
-rw-r--r--lib/python2.7/site-packages/south/tests/circular_a/migrations/__init__.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/circular_a/models.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/circular_b/__init__.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/circular_b/migrations/0001_first.py13
-rw-r--r--lib/python2.7/site-packages/south/tests/circular_b/migrations/__init__.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/circular_b/models.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/db.py1060
-rw-r--r--lib/python2.7/site-packages/south/tests/db_firebird.py39
-rw-r--r--lib/python2.7/site-packages/south/tests/db_mysql.py164
-rw-r--r--lib/python2.7/site-packages/south/tests/deps_a/__init__.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/deps_a/migrations/0001_a.py11
-rw-r--r--lib/python2.7/site-packages/south/tests/deps_a/migrations/0002_a.py11
-rw-r--r--lib/python2.7/site-packages/south/tests/deps_a/migrations/0003_a.py11
-rw-r--r--lib/python2.7/site-packages/south/tests/deps_a/migrations/0004_a.py13
-rw-r--r--lib/python2.7/site-packages/south/tests/deps_a/migrations/0005_a.py11
-rw-r--r--lib/python2.7/site-packages/south/tests/deps_a/migrations/__init__.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/deps_a/models.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/deps_b/__init__.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/deps_b/migrations/0001_b.py11
-rw-r--r--lib/python2.7/site-packages/south/tests/deps_b/migrations/0002_b.py13
-rw-r--r--lib/python2.7/site-packages/south/tests/deps_b/migrations/0003_b.py13
-rw-r--r--lib/python2.7/site-packages/south/tests/deps_b/migrations/0004_b.py11
-rw-r--r--lib/python2.7/site-packages/south/tests/deps_b/migrations/0005_b.py11
-rw-r--r--lib/python2.7/site-packages/south/tests/deps_b/migrations/__init__.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/deps_b/models.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/deps_c/__init__.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/deps_c/migrations/0001_c.py11
-rw-r--r--lib/python2.7/site-packages/south/tests/deps_c/migrations/0002_c.py11
-rw-r--r--lib/python2.7/site-packages/south/tests/deps_c/migrations/0003_c.py11
-rw-r--r--lib/python2.7/site-packages/south/tests/deps_c/migrations/0004_c.py11
-rw-r--r--lib/python2.7/site-packages/south/tests/deps_c/migrations/0005_c.py13
-rw-r--r--lib/python2.7/site-packages/south/tests/deps_c/migrations/__init__.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/deps_c/models.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/emptyapp/__init__.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/emptyapp/migrations/__init__.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/emptyapp/models.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/fakeapp/__init__.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/fakeapp/migrations/0001_spam.py17
-rw-r--r--lib/python2.7/site-packages/south/tests/fakeapp/migrations/0002_eggs.py20
-rw-r--r--lib/python2.7/site-packages/south/tests/fakeapp/migrations/0003_alter_spam.py18
-rw-r--r--lib/python2.7/site-packages/south/tests/fakeapp/migrations/__init__.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/fakeapp/models.py111
-rw-r--r--lib/python2.7/site-packages/south/tests/freezer.py15
-rw-r--r--lib/python2.7/site-packages/south/tests/inspector.py109
-rw-r--r--lib/python2.7/site-packages/south/tests/logger.py82
-rw-r--r--lib/python2.7/site-packages/south/tests/logic.py902
-rw-r--r--lib/python2.7/site-packages/south/tests/non_managed/__init__.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/non_managed/migrations/__init__.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/non_managed/models.py16
-rw-r--r--lib/python2.7/site-packages/south/tests/otherfakeapp/__init__.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/otherfakeapp/migrations/0001_first.py15
-rw-r--r--lib/python2.7/site-packages/south/tests/otherfakeapp/migrations/0002_second.py11
-rw-r--r--lib/python2.7/site-packages/south/tests/otherfakeapp/migrations/0003_third.py14
-rw-r--r--lib/python2.7/site-packages/south/tests/otherfakeapp/migrations/__init__.py0
-rw-r--r--lib/python2.7/site-packages/south/tests/otherfakeapp/models.py1
-rw-r--r--lib/python2.7/site-packages/south/utils/__init__.py73
-rw-r--r--lib/python2.7/site-packages/south/utils/datetime_utils.py28
-rw-r--r--lib/python2.7/site-packages/south/utils/py3.py28
-rw-r--r--lib/python2.7/site-packages/south/v2.py25
117 files changed, 11485 insertions, 0 deletions
diff --git a/lib/python2.7/site-packages/south/__init__.py b/lib/python2.7/site-packages/south/__init__.py
new file mode 100644
index 0000000..86642c6
--- /dev/null
+++ b/lib/python2.7/site-packages/south/__init__.py
@@ -0,0 +1,9 @@
+"""
+South - Useable migrations for Django apps
+"""
+
+__version__ = "0.8.4"
+__authors__ = [
+ "Andrew Godwin <andrew@aeracode.org>",
+ "Andy McCurdy <andy@andymccurdy.com>"
+]
diff --git a/lib/python2.7/site-packages/south/creator/__init__.py b/lib/python2.7/site-packages/south/creator/__init__.py
new file mode 100644
index 0000000..96a1a80
--- /dev/null
+++ b/lib/python2.7/site-packages/south/creator/__init__.py
@@ -0,0 +1,5 @@
+"""
+The creator module is responsible for making new migration files, either
+as blank templates or autodetecting changes. It contains code that used to
+all be in startmigration.py.
+"""
diff --git a/lib/python2.7/site-packages/south/creator/actions.py b/lib/python2.7/site-packages/south/creator/actions.py
new file mode 100644
index 0000000..2ffc8ca
--- /dev/null
+++ b/lib/python2.7/site-packages/south/creator/actions.py
@@ -0,0 +1,559 @@
+"""
+Actions - things like 'a model was removed' or 'a field was changed'.
+Each one has a class, which can take the action description and insert code
+blocks into the forwards() and backwards() methods, in the right place.
+"""
+
+from __future__ import print_function
+
+import sys
+
+from django.db.models.fields.related import RECURSIVE_RELATIONSHIP_CONSTANT
+from django.db.models.fields import FieldDoesNotExist, NOT_PROVIDED, CharField, TextField
+
+from south.modelsinspector import value_clean
+from south.creator.freezer import remove_useless_attributes, model_key
+from south.utils import datetime_utils
+from south.utils.py3 import raw_input
+
+
+class Action(object):
+ """
+ Generic base Action class. Contains utility methods for inserting into
+ the forwards() and backwards() method lists.
+ """
+
+ prepend_forwards = False
+ prepend_backwards = False
+
+ def forwards_code(self):
+ raise NotImplementedError
+
+ def backwards_code(self):
+ raise NotImplementedError
+
+ def add_forwards(self, forwards):
+ if self.prepend_forwards:
+ forwards.insert(0, self.forwards_code())
+ else:
+ forwards.append(self.forwards_code())
+
+ def add_backwards(self, backwards):
+ if self.prepend_backwards:
+ backwards.insert(0, self.backwards_code())
+ else:
+ backwards.append(self.backwards_code())
+
+ def console_line(self):
+ "Returns the string to print on the console, e.g. ' + Added field foo'"
+ raise NotImplementedError
+
+ @classmethod
+ def triples_to_defs(cls, fields):
+ # Turn the (class, args, kwargs) format into a string
+ for field, triple in fields.items():
+ fields[field] = cls.triple_to_def(triple)
+ return fields
+
+ @classmethod
+ def triple_to_def(cls, triple):
+ "Turns a single triple into a definition."
+ return "self.gf(%r)(%s)" % (
+ triple[0], # Field full path
+ ", ".join(triple[1] + ["%s=%s" % (kwd, val) for kwd, val in triple[2].items()]), # args and kwds
+ )
+
+
+class AddModel(Action):
+ """
+ Addition of a model. Takes the Model subclass that is being created.
+ """
+
+ FORWARDS_TEMPLATE = '''
+ # Adding model '%(model_name)s'
+ db.create_table(%(table_name)r, (
+ %(field_defs)s
+ ))
+ db.send_create_signal(%(app_label)r, [%(model_name)r])'''[1:] + "\n"
+
+ BACKWARDS_TEMPLATE = '''
+ # Deleting model '%(model_name)s'
+ db.delete_table(%(table_name)r)'''[1:] + "\n"
+
+ def __init__(self, model, model_def):
+ self.model = model
+ self.model_def = model_def
+
+ def console_line(self):
+ "Returns the string to print on the console, e.g. ' + Added field foo'"
+ return " + Added model %s.%s" % (
+ self.model._meta.app_label,
+ self.model._meta.object_name,
+ )
+
+ def forwards_code(self):
+ "Produces the code snippet that gets put into forwards()"
+ field_defs = ",\n ".join([
+ "(%r, %s)" % (name, defn) for name, defn
+ in self.triples_to_defs(self.model_def).items()
+ ]) + ","
+
+ return self.FORWARDS_TEMPLATE % {
+ "model_name": self.model._meta.object_name,
+ "table_name": self.model._meta.db_table,
+ "app_label": self.model._meta.app_label,
+ "field_defs": field_defs,
+ }
+
+ def backwards_code(self):
+ "Produces the code snippet that gets put into backwards()"
+ return self.BACKWARDS_TEMPLATE % {
+ "model_name": self.model._meta.object_name,
+ "table_name": self.model._meta.db_table,
+ }
+
+
+class DeleteModel(AddModel):
+ """
+ Deletion of a model. Takes the Model subclass that is being created.
+ """
+
+ def console_line(self):
+ "Returns the string to print on the console, e.g. ' + Added field foo'"
+ return " - Deleted model %s.%s" % (
+ self.model._meta.app_label,
+ self.model._meta.object_name,
+ )
+
+ def forwards_code(self):
+ return AddModel.backwards_code(self)
+
+ def backwards_code(self):
+ return AddModel.forwards_code(self)
+
+
+class _NullIssuesField(object):
+ """
+ A field that might need to ask a question about rogue NULL values.
+ """
+
+ issue_with_backward_migration = False
+ irreversible = False
+
+ IRREVERSIBLE_TEMPLATE = '''
+ # User chose to not deal with backwards NULL issues for '%(model_name)s.%(field_name)s'
+ raise RuntimeError("Cannot reverse this migration. '%(model_name)s.%(field_name)s' and its values cannot be restored.")
+
+ # The following code is provided here to aid in writing a correct migration'''
+
+ def deal_with_not_null_no_default(self, field, field_def):
+ # If it's a CharField or TextField that's blank, skip this step.
+ if isinstance(field, (CharField, TextField)) and field.blank:
+ field_def[2]['default'] = repr("")
+ return
+ # Oh dear. Ask them what to do.
+ print(" ? The field '%s.%s' does not have a default specified, yet is NOT NULL." % (
+ self.model._meta.object_name,
+ field.name,
+ ))
+ print(" ? Since you are %s, you MUST specify a default" % self.null_reason)
+ print(" ? value to use for existing rows. Would you like to:")
+ print(" ? 1. Quit now"+("." if self.issue_with_backward_migration else ", and add a default to the field in models.py" ))
+ print(" ? 2. Specify a one-off value to use for existing columns now")
+ if self.issue_with_backward_migration:
+ print(" ? 3. Disable the backwards migration by raising an exception; you can edit the migration to fix it later")
+ while True:
+ choice = raw_input(" ? Please select a choice: ")
+ if choice == "1":
+ sys.exit(1)
+ elif choice == "2":
+ break
+ elif choice == "3" and self.issue_with_backward_migration:
+ break
+ else:
+ print(" ! Invalid choice.")
+ if choice == "2":
+ self.add_one_time_default(field, field_def)
+ elif choice == "3":
+ self.irreversible = True
+
+ def add_one_time_default(self, field, field_def):
+ # OK, they want to pick their own one-time default. Who are we to refuse?
+ print(" ? Please enter Python code for your one-off default value.")
+ print(" ? The datetime module is available, so you can do e.g. datetime.date.today()")
+ while True:
+ code = raw_input(" >>> ")
+ if not code:
+ print(" ! Please enter some code, or 'exit' (with no quotes) to exit.")
+ elif code == "exit":
+ sys.exit(1)
+ else:
+ try:
+ result = eval(code, {}, {"datetime": datetime_utils})
+ except (SyntaxError, NameError) as e:
+ print(" ! Invalid input: %s" % e)
+ else:
+ break
+ # Right, add the default in.
+ field_def[2]['default'] = value_clean(result)
+
+ def irreversable_code(self, field):
+ return self.IRREVERSIBLE_TEMPLATE % {
+ "model_name": self.model._meta.object_name,
+ "table_name": self.model._meta.db_table,
+ "field_name": field.name,
+ "field_column": field.column,
+ }
+
+
+class AddField(Action, _NullIssuesField):
+ """
+ Adds a field to a model. Takes a Model class and the field name.
+ """
+
+ null_reason = "adding this field"
+
+ FORWARDS_TEMPLATE = '''
+ # Adding field '%(model_name)s.%(field_name)s'
+ db.add_column(%(table_name)r, %(field_name)r,
+ %(field_def)s,
+ keep_default=False)'''[1:] + "\n"
+
+ BACKWARDS_TEMPLATE = '''
+ # Deleting field '%(model_name)s.%(field_name)s'
+ db.delete_column(%(table_name)r, %(field_column)r)'''[1:] + "\n"
+
+ def __init__(self, model, field, field_def):
+ self.model = model
+ self.field = field
+ self.field_def = field_def
+
+ # See if they've made a NOT NULL column but also have no default (far too common)
+ is_null = self.field.null
+ default = (self.field.default is not None) and (self.field.default is not NOT_PROVIDED)
+
+ if not is_null and not default:
+ self.deal_with_not_null_no_default(self.field, self.field_def)
+
+ def console_line(self):
+ "Returns the string to print on the console, e.g. ' + Added field foo'"
+ return " + Added field %s on %s.%s" % (
+ self.field.name,
+ self.model._meta.app_label,
+ self.model._meta.object_name,
+ )
+
+ def forwards_code(self):
+
+ return self.FORWARDS_TEMPLATE % {
+ "model_name": self.model._meta.object_name,
+ "table_name": self.model._meta.db_table,
+ "field_name": self.field.name,
+ "field_column": self.field.column,
+ "field_def": self.triple_to_def(self.field_def),
+ }
+
+ def backwards_code(self):
+ return self.BACKWARDS_TEMPLATE % {
+ "model_name": self.model._meta.object_name,
+ "table_name": self.model._meta.db_table,
+ "field_name": self.field.name,
+ "field_column": self.field.column,
+ }
+
+
+class DeleteField(AddField):
+ """
+ Removes a field from a model. Takes a Model class and the field name.
+ """
+
+ null_reason = "removing this field"
+ issue_with_backward_migration = True
+
+ def console_line(self):
+ "Returns the string to print on the console, e.g. ' + Added field foo'"
+ return " - Deleted field %s on %s.%s" % (
+ self.field.name,
+ self.model._meta.app_label,
+ self.model._meta.object_name,
+ )
+
+ def forwards_code(self):
+ return AddField.backwards_code(self)
+
+ def backwards_code(self):
+ if not self.irreversible:
+ return AddField.forwards_code(self)
+ else:
+ return self.irreversable_code(self.field) + AddField.forwards_code(self)
+
+
+class ChangeField(Action, _NullIssuesField):
+ """
+ Changes a field's type/options on a model.
+ """
+
+ null_reason = "making this field non-nullable"
+
+ FORWARDS_TEMPLATE = BACKWARDS_TEMPLATE = '''
+ # Changing field '%(model_name)s.%(field_name)s'
+ db.alter_column(%(table_name)r, %(field_column)r, %(field_def)s)'''
+
+ RENAME_TEMPLATE = '''
+ # Renaming column for '%(model_name)s.%(field_name)s' to match new field type.
+ db.rename_column(%(table_name)r, %(old_column)r, %(new_column)r)'''
+
+ def __init__(self, model, old_field, new_field, old_def, new_def):
+ self.model = model
+ self.old_field = old_field
+ self.new_field = new_field
+ self.old_def = old_def
+ self.new_def = new_def
+
+ # See if they've changed a not-null field to be null
+ new_default = (self.new_field.default is not None) and (self.new_field.default is not NOT_PROVIDED)
+ old_default = (self.old_field.default is not None) and (self.old_field.default is not NOT_PROVIDED)
+ if self.old_field.null and not self.new_field.null and not new_default:
+ self.deal_with_not_null_no_default(self.new_field, self.new_def)
+ if not self.old_field.null and self.new_field.null and not old_default:
+ self.null_reason = "making this field nullable"
+ self.issue_with_backward_migration = True
+ self.deal_with_not_null_no_default(self.old_field, self.old_def)
+
+ def console_line(self):
+ "Returns the string to print on the console, e.g. ' + Added field foo'"
+ return " ~ Changed field %s on %s.%s" % (
+ self.new_field.name,
+ self.model._meta.app_label,
+ self.model._meta.object_name,
+ )
+
+ def _code(self, old_field, new_field, new_def):
+
+ output = ""
+
+ if self.old_field.column != self.new_field.column:
+ output += self.RENAME_TEMPLATE % {
+ "model_name": self.model._meta.object_name,
+ "table_name": self.model._meta.db_table,
+ "field_name": new_field.name,
+ "old_column": old_field.column,
+ "new_column": new_field.column,
+ }
+
+ output += self.FORWARDS_TEMPLATE % {
+ "model_name": self.model._meta.object_name,
+ "table_name": self.model._meta.db_table,
+ "field_name": new_field.name,
+ "field_column": new_field.column,
+ "field_def": self.triple_to_def(new_def),
+ }
+
+ return output
+
+ def forwards_code(self):
+ return self._code(self.old_field, self.new_field, self.new_def)
+
+ def backwards_code(self):
+ change_code = self._code(self.new_field, self.old_field, self.old_def)
+ if not self.irreversible:
+ return change_code
+ else:
+ return self.irreversable_code(self.old_field) + change_code
+
+
+class AddUnique(Action):
+ """
+ Adds a unique constraint to a model. Takes a Model class and the field names.
+ """
+
+ FORWARDS_TEMPLATE = '''
+ # Adding unique constraint on '%(model_name)s', fields %(field_names)s
+ db.create_unique(%(table_name)r, %(fields)r)'''[1:] + "\n"
+
+ BACKWARDS_TEMPLATE = '''
+ # Removing unique constraint on '%(model_name)s', fields %(field_names)s
+ db.delete_unique(%(table_name)r, %(fields)r)'''[1:] + "\n"
+
+ prepend_backwards = True
+
+ def __init__(self, model, fields):
+ self.model = model
+ self.fields = fields
+
+ def console_line(self):
+ "Returns the string to print on the console, e.g. ' + Added field foo'"
+ return " + Added unique constraint for %s on %s.%s" % (
+ [x.name for x in self.fields],
+ self.model._meta.app_label,
+ self.model._meta.object_name,
+ )
+
+ def forwards_code(self):
+
+ return self.FORWARDS_TEMPLATE % {
+ "model_name": self.model._meta.object_name,
+ "table_name": self.model._meta.db_table,
+ "fields": [field.column for field in self.fields],
+ "field_names": [field.name for field in self.fields],
+ }
+
+ def backwards_code(self):
+ return self.BACKWARDS_TEMPLATE % {
+ "model_name": self.model._meta.object_name,
+ "table_name": self.model._meta.db_table,
+ "fields": [field.column for field in self.fields],
+ "field_names": [field.name for field in self.fields],
+ }
+
+
+class DeleteUnique(AddUnique):
+ """
+ Removes a unique constraint from a model. Takes a Model class and the field names.
+ """
+
+ prepend_forwards = True
+ prepend_backwards = False
+
+ def console_line(self):
+ "Returns the string to print on the console, e.g. ' + Added field foo'"
+ return " - Deleted unique constraint for %s on %s.%s" % (
+ [x.name for x in self.fields],
+ self.model._meta.app_label,
+ self.model._meta.object_name,
+ )
+
+ def forwards_code(self):
+ return AddUnique.backwards_code(self)
+
+ def backwards_code(self):
+ return AddUnique.forwards_code(self)
+
+
+class AddIndex(AddUnique):
+ """
+ Adds an index to a model field[s]. Takes a Model class and the field names.
+ """
+
+ FORWARDS_TEMPLATE = '''
+ # Adding index on '%(model_name)s', fields %(field_names)s
+ db.create_index(%(table_name)r, %(fields)r)'''[1:] + "\n"
+
+ BACKWARDS_TEMPLATE = '''
+ # Removing index on '%(model_name)s', fields %(field_names)s
+ db.delete_index(%(table_name)r, %(fields)r)'''[1:] + "\n"
+
+ def console_line(self):
+ "Returns the string to print on the console, e.g. ' + Added field foo'"
+ return " + Added index for %s on %s.%s" % (
+ [x.name for x in self.fields],
+ self.model._meta.app_label,
+ self.model._meta.object_name,
+ )
+
+
+class DeleteIndex(AddIndex):
+ """
+ Deletes an index off a model field[s]. Takes a Model class and the field names.
+ """
+
+ def console_line(self):
+ "Returns the string to print on the console, e.g. ' + Added field foo'"
+ return " + Deleted index for %s on %s.%s" % (
+ [x.name for x in self.fields],
+ self.model._meta.app_label,
+ self.model._meta.object_name,
+ )
+
+ def forwards_code(self):
+ return AddIndex.backwards_code(self)
+
+ def backwards_code(self):
+ return AddIndex.forwards_code(self)
+
+
+class AddM2M(Action):
+ """
+ Adds a unique constraint to a model. Takes a Model class and the field names.
+ """
+
+ FORWARDS_TEMPLATE = '''
+ # Adding M2M table for field %(field_name)s on '%(model_name)s'
+ m2m_table_name = %(table_name)s
+ db.create_table(m2m_table_name, (
+ ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
+ (%(left_field)r, models.ForeignKey(orm[%(left_model_key)r], null=False)),
+ (%(right_field)r, models.ForeignKey(orm[%(right_model_key)r], null=False))
+ ))
+ db.create_unique(m2m_table_name, [%(left_column)r, %(right_column)r])'''[1:] + "\n"
+
+ BACKWARDS_TEMPLATE = '''
+ # Removing M2M table for field %(field_name)s on '%(model_name)s'
+ db.delete_table(%(table_name)s)'''[1:] + "\n"
+
+ def __init__(self, model, field):
+ self.model = model
+ self.field = field
+
+ def console_line(self):
+ "Returns the string to print on the console, e.g. ' + Added field foo'"
+ return " + Added M2M table for %s on %s.%s" % (
+ self.field.name,
+ self.model._meta.app_label,
+ self.model._meta.object_name,
+ )
+
+ def table_name(self):
+ # This is part of a workaround for the fact that Django uses
+ # different shortening for automatically generated m2m table names
+ # (as opposed to any explicitly specified table name)
+ f = self.field
+ explicit = f.db_table
+ if explicit:
+ return "%r" % explicit
+ else:
+ auto = "%s_%s" % (self.model._meta.db_table, f.name)
+ return 'db.shorten_name(%r)' % auto
+
+ def forwards_code(self):
+
+ return self.FORWARDS_TEMPLATE % {
+ "model_name": self.model._meta.object_name,
+ "field_name": self.field.name,
+ "table_name": self.table_name(),
+ "left_field": self.field.m2m_column_name()[:-3], # Remove the _id part
+ "left_column": self.field.m2m_column_name(),
+ "left_model_key": model_key(self.model),
+ "right_field": self.field.m2m_reverse_name()[:-3], # Remove the _id part
+ "right_column": self.field.m2m_reverse_name(),
+ "right_model_key": model_key(self.field.rel.to),
+ }
+
+ def backwards_code(self):
+
+ return self.BACKWARDS_TEMPLATE % {
+ "model_name": self.model._meta.object_name,
+ "field_name": self.field.name,
+ "table_name": self.table_name(),
+ }
+
+
+class DeleteM2M(AddM2M):
+ """
+ Adds a unique constraint to a model. Takes a Model class and the field names.
+ """
+
+ def console_line(self):
+ "Returns the string to print on the console, e.g. ' + Added field foo'"
+ return " - Deleted M2M table for %s on %s.%s" % (
+ self.field.name,
+ self.model._meta.app_label,
+ self.model._meta.object_name,
+ )
+
+ def forwards_code(self):
+ return AddM2M.backwards_code(self)
+
+ def backwards_code(self):
+ return AddM2M.forwards_code(self)
+
diff --git a/lib/python2.7/site-packages/south/creator/changes.py b/lib/python2.7/site-packages/south/creator/changes.py
new file mode 100644
index 0000000..6cdbd19
--- /dev/null
+++ b/lib/python2.7/site-packages/south/creator/changes.py
@@ -0,0 +1,506 @@
+"""
+Contains things to detect changes - either using options passed in on the
+commandline, or by using autodetection, etc.
+"""
+
+from __future__ import print_function
+
+from django.db import models
+from django.contrib.contenttypes.generic import GenericRelation
+from django.utils.datastructures import SortedDict
+
+from south.creator.freezer import remove_useless_attributes, freeze_apps, model_key
+from south.utils import auto_through
+from south.utils.py3 import string_types
+
+class BaseChanges(object):
+ """
+ Base changes class.
+ """
+ def suggest_name(self):
+ return ''
+
+ def split_model_def(self, model, model_def):
+ """
+ Given a model and its model def (a dict of field: triple), returns three
+ items: the real fields dict, the Meta dict, and the M2M fields dict.
+ """
+ real_fields = SortedDict()
+ meta = SortedDict()
+ m2m_fields = SortedDict()
+ for name, triple in model_def.items():
+ if name == "Meta":
+ meta = triple
+ elif isinstance(model._meta.get_field_by_name(name)[0], models.ManyToManyField):
+ m2m_fields[name] = triple
+ else:
+ real_fields[name] = triple
+ return real_fields, meta, m2m_fields
+
+ def current_model_from_key(self, key):
+ app_label, model_name = key.split(".")
+ return models.get_model(app_label, model_name)
+
+ def current_field_from_key(self, key, fieldname):
+ app_label, model_name = key.split(".")
+ # Special, for the magical field from order_with_respect_to
+ if fieldname == "_order":
+ field = models.IntegerField()
+ field.name = "_order"
+ field.attname = "_order"
+ field.column = "_order"
+ field.default = 0
+ return field
+ # Otherwise, normal.
+ return models.get_model(app_label, model_name)._meta.get_field_by_name(fieldname)[0]
+
+
+class AutoChanges(BaseChanges):
+ """
+ Detects changes by 'diffing' two sets of frozen model definitions.
+ """
+
+ # Field types we don't generate add/remove field changes for.
+ IGNORED_FIELD_TYPES = [
+ GenericRelation,
+ ]
+
+ def __init__(self, migrations, old_defs, old_orm, new_defs):
+ self.migrations = migrations
+ self.old_defs = old_defs
+ self.old_orm = old_orm
+ self.new_defs = new_defs
+
+ def suggest_name(self):
+ parts = ["auto"]
+ for change_name, params in self.get_changes():
+ if change_name == "AddModel":
+ parts.append("add_%s" % params['model']._meta.object_name.lower())
+ elif change_name == "DeleteModel":
+ parts.append("del_%s" % params['model']._meta.object_name.lower())
+ elif change_name == "AddField":
+ parts.append("add_field_%s_%s" % (
+ params['model']._meta.object_name.lower(),
+ params['field'].name,
+ ))
+ elif change_name == "DeleteField":
+ parts.append("del_field_%s_%s" % (
+ params['model']._meta.object_name.lower(),
+ params['field'].name,
+ ))
+ elif change_name == "ChangeField":
+ parts.append("chg_field_%s_%s" % (
+ params['model']._meta.object_name.lower(),
+ params['new_field'].name,
+ ))
+ elif change_name == "AddUnique":
+ parts.append("add_unique_%s_%s" % (
+ params['model']._meta.object_name.lower(),
+ "_".join([x.name for x in params['fields']]),
+ ))
+ elif change_name == "DeleteUnique":
+ parts.append("del_unique_%s_%s" % (
+ params['model']._meta.object_name.lower(),
+ "_".join([x.name for x in params['fields']]),
+ ))
+ elif change_name == "AddIndex":
+ parts.append("add_index_%s_%s" % (
+ params['model']._meta.object_name.lower(),
+ "_".join([x.name for x in params['fields']]),
+ ))
+ elif change_name == "DeleteIndex":
+ parts.append("del_index_%s_%s" % (
+ params['model']._meta.object_name.lower(),
+ "_".join([x.name for x in params['fields']]),
+ ))
+ return ("__".join(parts))[:70]
+
+ def get_changes(self):
+ """
+ Returns the difference between the old and new sets of models as a 5-tuple:
+ added_models, deleted_models, added_fields, deleted_fields, changed_fields
+ """
+
+ deleted_models = set()
+
+ # See if anything's vanished
+ for key in self.old_defs:
+ if key not in self.new_defs:
+ # We shouldn't delete it if it was managed=False
+ old_fields, old_meta, old_m2ms = self.split_model_def(self.old_orm[key], self.old_defs[key])
+ if old_meta.get("managed", "True") != "False":
+ # Alright, delete it.
+ yield ("DeleteModel", {
+ "model": self.old_orm[key],
+ "model_def": old_fields,
+ })
+ # Also make sure we delete any M2Ms it had.
+ for fieldname in old_m2ms:
+ # Only delete its stuff if it wasn't a through=.
+ field = self.old_orm[key + ":" + fieldname]
+ if auto_through(field):
+ yield ("DeleteM2M", {"model": self.old_orm[key], "field": field})
+ # And any index/uniqueness constraints it had
+ for attr, operation in (("unique_together", "DeleteUnique"), ("index_together", "DeleteIndex")):
+ together = eval(old_meta.get(attr, "[]"))
+ if together:
+ # If it's only a single tuple, make it into the longer one
+ if isinstance(together[0], string_types):
+ together = [together]
+ # For each combination, make an action for it
+ for fields in together:
+ yield (operation, {
+ "model": self.old_orm[key],
+ "fields": [self.old_orm[key]._meta.get_field_by_name(x)[0] for x in fields],
+ })
+ # We always add it in here so we ignore it later
+ deleted_models.add(key)
+
+ # Or appeared
+ for key in self.new_defs:
+ if key not in self.old_defs:
+ # We shouldn't add it if it's managed=False
+ new_fields, new_meta, new_m2ms = self.split_model_def(self.current_model_from_key(key), self.new_defs[key])
+ if new_meta.get("managed", "True") != "False":
+ yield ("AddModel", {
+ "model": self.current_model_from_key(key),
+ "model_def": new_fields,
+ })
+ # Also make sure we add any M2Ms it has.
+ for fieldname in new_m2ms:
+ # Only create its stuff if it wasn't a through=.
+ field = self.current_field_from_key(key, fieldname)
+ if auto_through(field):
+ yield ("AddM2M", {"model": self.current_model_from_key(key), "field": field})
+ # And any index/uniqueness constraints it has
+ for attr, operation in (("unique_together", "AddUnique"), ("index_together", "AddIndex")):
+ together = eval(new_meta.get(attr, "[]"))
+ if together:
+ # If it's only a single tuple, make it into the longer one
+ if isinstance(together[0], string_types):
+ together = [together]
+ # For each combination, make an action for it
+ for fields in together:
+ yield (operation, {
+ "model": self.current_model_from_key(key),
+ "fields": [self.current_model_from_key(key)._meta.get_field_by_name(x)[0] for x in fields],
+ })
+
+ # Now, for every model that's stayed the same, check its fields.
+ for key in self.old_defs:
+ if key not in deleted_models:
+
+ old_fields, old_meta, old_m2ms = self.split_model_def(self.old_orm[key], self.old_defs[key])
+ new_fields, new_meta, new_m2ms = self.split_model_def(self.current_model_from_key(key), self.new_defs[key])
+
+ # Do nothing for models which are now not managed.
+ if new_meta.get("managed", "True") == "False":
+ continue
+
+ # Find fields that have vanished.
+ for fieldname in old_fields:
+ if fieldname not in new_fields:
+ # Don't do it for any fields we're ignoring
+ field = self.old_orm[key + ":" + fieldname]
+ field_allowed = True
+ for field_type in self.IGNORED_FIELD_TYPES:
+ if isinstance(field, field_type):
+ field_allowed = False
+ if field_allowed:
+ # Looks alright.
+ yield ("DeleteField", {
+ "model": self.old_orm[key],
+ "field": field,
+ "field_def": old_fields[fieldname],
+ })
+
+ # And ones that have appeared
+ for fieldname in new_fields:
+ if fieldname not in old_fields:
+ # Don't do it for any fields we're ignoring
+ field = self.current_field_from_key(key, fieldname)
+ field_allowed = True
+ for field_type in self.IGNORED_FIELD_TYPES:
+ if isinstance(field, field_type):
+ field_allowed = False
+ if field_allowed:
+ # Looks alright.
+ yield ("AddField", {
+ "model": self.current_model_from_key(key),
+ "field": field,
+ "field_def": new_fields[fieldname],
+ })
+
+ # Find M2Ms that have vanished
+ for fieldname in old_m2ms:
+ if fieldname not in new_m2ms:
+ # Only delete its stuff if it wasn't a through=.
+ field = self.old_orm[key + ":" + fieldname]
+ if auto_through(field):
+ yield ("DeleteM2M", {"model": self.old_orm[key], "field": field})
+
+ # Find M2Ms that have appeared
+ for fieldname in new_m2ms:
+ if fieldname not in old_m2ms:
+ # Only create its stuff if it wasn't a through=.
+ field = self.current_field_from_key(key, fieldname)
+ if auto_through(field):
+ yield ("AddM2M", {"model": self.current_model_from_key(key), "field": field})
+
+ # For the ones that exist in both models, see if they were changed
+ for fieldname in set(old_fields).intersection(set(new_fields)):
+ # Non-index changes
+ if self.different_attributes(
+ remove_useless_attributes(old_fields[fieldname], True, True),
+ remove_useless_attributes(new_fields[fieldname], True, True)):
+ yield ("ChangeField", {
+ "model": self.current_model_from_key(key),
+ "old_field": self.old_orm[key + ":" + fieldname],
+ "new_field": self.current_field_from_key(key, fieldname),
+ "old_def": old_fields[fieldname],
+ "new_def": new_fields[fieldname],
+ })
+ # Index changes
+ old_field = self.old_orm[key + ":" + fieldname]
+ new_field = self.current_field_from_key(key, fieldname)
+ if not old_field.db_index and new_field.db_index:
+ # They've added an index.
+ yield ("AddIndex", {
+ "model": self.current_model_from_key(key),
+ "fields": [new_field],
+ })
+ if old_field.db_index and not new_field.db_index:
+ # They've removed an index.
+ yield ("DeleteIndex", {
+ "model": self.old_orm[key],
+ "fields": [old_field],
+ })
+ # See if their uniques have changed
+ if old_field.unique != new_field.unique:
+ # Make sure we look at the one explicitly given to see what happened
+ if new_field.unique:
+ yield ("AddUnique", {
+ "model": self.current_model_from_key(key),
+ "fields": [new_field],
+ })
+ else:
+ yield ("DeleteUnique", {
+ "model": self.old_orm[key],
+ "fields": [old_field],
+ })
+
+ # See if there's any M2Ms that have changed.
+ for fieldname in set(old_m2ms).intersection(set(new_m2ms)):
+ old_field = self.old_orm[key + ":" + fieldname]
+ new_field = self.current_field_from_key(key, fieldname)
+ # Have they _added_ a through= ?
+ if auto_through(old_field) and not auto_through(new_field):
+ yield ("DeleteM2M", {"model": self.old_orm[key], "field": old_field})
+ # Have they _removed_ a through= ?
+ if not auto_through(old_field) and auto_through(new_field):
+ yield ("AddM2M", {"model": self.current_model_from_key(key), "field": new_field})
+
+ ## See if the {index,unique}_togethers have changed
+ for attr, add_operation, del_operation in (("unique_together", "AddUnique", "DeleteUnique"), ("index_together", "AddIndex", "DeleteIndex")):
+ # First, normalise them into lists of sets.
+ old_together = eval(old_meta.get(attr, "[]"))
+ new_together = eval(new_meta.get(attr, "[]"))
+ if old_together and isinstance(old_together[0], string_types):
+ old_together = [old_together]
+ if new_together and isinstance(new_together[0], string_types):
+ new_together = [new_together]
+ old_together = frozenset(tuple(o) for o in old_together)
+ new_together = frozenset(tuple(n) for n in new_together)
+ # See if any appeared or disappeared
+ disappeared = old_together.difference(new_together)
+ appeared = new_together.difference(old_together)
+ for item in disappeared:
+ yield (del_operation, {
+ "model": self.old_orm[key],
+ "fields": [self.old_orm[key + ":" + x] for x in item],
+ })
+ for item in appeared:
+ yield (add_operation, {
+ "model": self.current_model_from_key(key),
+ "fields": [self.current_field_from_key(key, x) for x in item],
+ })
+
+ @classmethod
+ def is_triple(cls, triple):
+ "Returns whether the argument is a triple."
+ return isinstance(triple, (list, tuple)) and len(triple) == 3 and \
+ isinstance(triple[0], string_types) and \
+ isinstance(triple[1], (list, tuple)) and \
+ isinstance(triple[2], dict)
+
+ @classmethod
+ def different_attributes(cls, old, new):
+ """
+ Backwards-compat comparison that ignores orm. on the RHS and not the left
+ and which knows django.db.models.fields.CharField = models.CharField.
+ Has a whole load of tests in tests/autodetection.py.
+ """
+
+ # If they're not triples, just do normal comparison
+ if not cls.is_triple(old) or not cls.is_triple(new):
+ return old != new
+
+ # Expand them out into parts
+ old_field, old_pos, old_kwd = old
+ new_field, new_pos, new_kwd = new
+
+ # Copy the positional and keyword arguments so we can compare them and pop off things
+ old_pos, new_pos = old_pos[:], new_pos[:]
+ old_kwd = dict(old_kwd.items())
+ new_kwd = dict(new_kwd.items())
+
+ # Remove comparison of the existence of 'unique', that's done elsewhere.
+ # TODO: Make this work for custom fields where unique= means something else?
+ if "unique" in old_kwd:
+ del old_kwd['unique']
+ if "unique" in new_kwd:
+ del new_kwd['unique']
+
+ # If the first bit is different, check it's not by dj.db.models...
+ if old_field != new_field:
+ if old_field.startswith("models.") and (new_field.startswith("django.db.models") \
+ or new_field.startswith("django.contrib.gis")):
+ if old_field.split(".")[-1] != new_field.split(".")[-1]:
+ return True
+ else:
+ # Remove those fields from the final comparison
+ old_field = new_field = ""
+
+ # If there's a positional argument in the first, and a 'to' in the second,
+ # see if they're actually comparable.
+ if (old_pos and "to" in new_kwd) and ("orm" in new_kwd['to'] and "orm" not in old_pos[0]):
+ # Do special comparison to fix #153
+ try:
+ if old_pos[0] != new_kwd['to'].split("'")[1].split(".")[1]:
+ return True
+ except IndexError:
+ pass # Fall back to next comparison
+ # Remove those attrs from the final comparison
+ old_pos = old_pos[1:]
+ del new_kwd['to']
+
+ return old_field != new_field or old_pos != new_pos or old_kwd != new_kwd
+
+
+class ManualChanges(BaseChanges):
+ """
+ Detects changes by reading the command line.
+ """
+
+ def __init__(self, migrations, added_models, added_fields, added_indexes):
+ self.migrations = migrations
+ self.added_models = added_models
+ self.added_fields = added_fields
+ self.added_indexes = added_indexes
+
+ def suggest_name(self):
+ bits = []
+ for model_name in self.added_models:
+ bits.append('add_model_%s' % model_name)
+ for field_name in self.added_fields:
+ bits.append('add_field_%s' % field_name)
+ for index_name in self.added_indexes:
+ bits.append('add_index_%s' % index_name)
+ return '_'.join(bits).replace('.', '_')
+
+ def get_changes(self):
+ # Get the model defs so we can use them for the yield later
+ model_defs = freeze_apps([self.migrations.app_label()])
+ # Make the model changes
+ for model_name in self.added_models:
+ model = models.get_model(self.migrations.app_label(), model_name)
+ real_fields, meta, m2m_fields = self.split_model_def(model, model_defs[model_key(model)])
+ yield ("AddModel", {
+ "model": model,
+ "model_def": real_fields,
+ })
+ # And the field changes
+ for field_desc in self.added_fields:
+ try:
+ model_name, field_name = field_desc.split(".")
+ except (TypeError, ValueError):
+ raise ValueError("%r is not a valid field description." % field_desc)
+ model = models.get_model(self.migrations.app_label(), model_name)
+ real_fields, meta, m2m_fields = self.split_model_def(model, model_defs[model_key(model)])
+ yield ("AddField", {
+ "model": model,
+ "field": model._meta.get_field_by_name(field_name)[0],
+ "field_def": real_fields[field_name],
+ })
+ # And the indexes
+ for field_desc in self.added_indexes:
+ try:
+ model_name, field_name = field_desc.split(".")
+ except (TypeError, ValueError):
+ print("%r is not a valid field description." % field_desc)
+ model = models.get_model(self.migrations.app_label(), model_name)
+ yield ("AddIndex", {
+ "model": model,
+ "fields": [model._meta.get_field_by_name(field_name)[0]],
+ })
+
+
+class InitialChanges(BaseChanges):
+ """
+ Creates all models; handles --initial.
+ """
+ def suggest_name(self):
+ return 'initial'
+
+ def __init__(self, migrations):
+ self.migrations = migrations
+
+ def get_changes(self):
+ # Get the frozen models for this app
+ model_defs = freeze_apps([self.migrations.app_label()])
+
+ for model in models.get_models(models.get_app(self.migrations.app_label())):
+
+ # Don't do anything for unmanaged, abstract or proxy models
+ if model._meta.abstract or getattr(model._meta, "proxy", False) or not getattr(model._meta, "managed", True):
+ continue
+
+ real_fields, meta, m2m_fields = self.split_model_def(model, model_defs[model_key(model)])
+
+ # Firstly, add the main table and fields
+ yield ("AddModel", {
+ "model": model,
+ "model_def": real_fields,
+ })
+
+ # Then, add any indexing/uniqueness that's around
+ if meta:
+ for attr, operation in (("unique_together", "AddUnique"), ("index_together", "AddIndex")):
+ together = eval(meta.get(attr, "[]"))
+ if together:
+ # If it's only a single tuple, make it into the longer one
+ if isinstance(together[0], string_types):
+ together = [together]
+ # For each combination, make an action for it
+ for fields in together:
+ yield (operation, {
+ "model": model,
+ "fields": [model._meta.get_field_by_name(x)[0] for x in fields],
+ })
+
+ # Finally, see if there's some M2M action
+ for name, triple in m2m_fields.items():
+ field = model._meta.get_field_by_name(name)[0]
+ # But only if it's not through=foo (#120)
+ if field.rel.through:
+ try:
+ # Django 1.1 and below
+ through_model = field.rel.through_model
+ except AttributeError:
+ # Django 1.2
+ through_model = field.rel.through
+ if (not field.rel.through) or getattr(through_model._meta, "auto_created", False):
+ yield ("AddM2M", {
+ "model": model,
+ "field": field,
+ })
diff --git a/lib/python2.7/site-packages/south/creator/freezer.py b/lib/python2.7/site-packages/south/creator/freezer.py
new file mode 100644
index 0000000..0f98cea
--- /dev/null
+++ b/lib/python2.7/site-packages/south/creator/freezer.py
@@ -0,0 +1,192 @@
+"""
+Handles freezing of models into FakeORMs.
+"""
+
+from __future__ import print_function
+
+import sys
+
+from django.db import models
+from django.db.models.base import ModelBase, Model
+from django.contrib.contenttypes.generic import GenericRelation
+
+from south.utils import get_attribute, auto_through
+from south import modelsinspector
+from south.utils.py3 import string_types
+
+def freeze_apps(apps):
+ """
+ Takes a list of app labels, and returns a string of their frozen form.
+ """
+ if isinstance(apps, string_types):
+ apps = [apps]
+ frozen_models = set()
+ # For each app, add in all its models
+ for app in apps:
+ for model in models.get_models(models.get_app(app)):
+ # Only add if it's not abstract or proxy
+ if not model._meta.abstract and not getattr(model._meta, "proxy", False):
+ frozen_models.add(model)
+ # Now, add all the dependencies
+ for model in list(frozen_models):
+ frozen_models.update(model_dependencies(model))
+ # Serialise!
+ model_defs = {}
+ model_classes = {}
+ for model in frozen_models:
+ model_defs[model_key(model)] = prep_for_freeze(model)
+ model_classes[model_key(model)] = model
+ # Check for any custom fields that failed to freeze.
+ missing_fields = False
+ for key, fields in model_defs.items():
+ for field_name, value in fields.items():
+ if value is None:
+ missing_fields = True
+ model_class = model_classes[key]
+ field_class = model_class._meta.get_field_by_name(field_name)[0]
+ print(" ! Cannot freeze field '%s.%s'" % (key, field_name))
+ print(" ! (this field has class %s.%s)" % (field_class.__class__.__module__, field_class.__class__.__name__))
+ if missing_fields:
+ print("")
+ print(" ! South cannot introspect some fields; this is probably because they are custom")
+ print(" ! fields. If they worked in 0.6 or below, this is because we have removed the")
+ print(" ! models parser (it often broke things).")
+ print(" ! To fix this, read http://south.aeracode.org/wiki/MyFieldsDontWork")
+ sys.exit(1)
+
+ return model_defs
+
+def freeze_apps_to_string(apps):
+ return pprint_frozen_models(freeze_apps(apps))
+
+###
+
+def model_key(model):
+ "For a given model, return 'appname.modelname'."
+ return "%s.%s" % (model._meta.app_label, model._meta.object_name.lower())
+
+def prep_for_freeze(model):
+ """
+ Takes a model and returns the ready-to-serialise dict (all you need
+ to do is just pretty-print it).
+ """
+ fields = modelsinspector.get_model_fields(model, m2m=True)
+ # Remove useless attributes (like 'choices')
+ for name, field in fields.items():
+ fields[name] = remove_useless_attributes(field)
+ # See if there's a Meta
+ fields['Meta'] = remove_useless_meta(modelsinspector.get_model_meta(model))
+ # Add in our own special items to track the object name and managed
+ fields['Meta']['object_name'] = model._meta.object_name # Special: not eval'able.
+ if not getattr(model._meta, "managed", True):
+ fields['Meta']['managed'] = repr(model._meta.managed)
+ return fields
+
+### Dependency resolvers
+
+def model_dependencies(model, checked_models=None):
+ """
+ Returns a set of models this one depends on to be defined; things like
+ OneToOneFields as ID, ForeignKeys everywhere, etc.
+ """
+ depends = set()
+ checked_models = checked_models or set()
+ # Get deps for each field
+ for field in model._meta.fields + model._meta.many_to_many:
+ depends.update(field_dependencies(field, checked_models))
+ # Add in any non-abstract bases
+ for base in model.__bases__:
+ if issubclass(base, models.Model) and hasattr(base, '_meta') and not base._meta.abstract:
+ depends.add(base)
+ # Now recurse
+ new_to_check = depends - checked_models
+ while new_to_check:
+ checked_model = new_to_check.pop()
+ if checked_model == model or checked_model in checked_models:
+ continue
+ checked_models.add(checked_model)
+ deps = model_dependencies(checked_model, checked_models)
+ # Loop through dependencies...
+ for dep in deps:
+ # If the new dep is not already checked, add to the queue
+ if (dep not in depends) and (dep not in new_to_check) and (dep not in checked_models):
+ new_to_check.add(dep)
+ depends.add(dep)
+ return depends
+
+def field_dependencies(field, checked_models=None):
+ checked_models = checked_models or set()
+ depends = set()
+ arg_defs, kwarg_defs = modelsinspector.matching_details(field)
+ for attrname, options in arg_defs + list(kwarg_defs.values()):
+ if options.get("ignore_if_auto_through", False) and auto_through(field):
+ continue
+ if options.get("is_value", False):
+ value = attrname
+ elif attrname == 'rel.through' and hasattr(getattr(field, 'rel', None), 'through_model'):
+ # Hack for django 1.1 and below, where the through model is stored
+ # in rel.through_model while rel.through stores only the model name.
+ value = field.rel.through_model
+ else:
+ try:
+ value = get_attribute(field, attrname)
+ except AttributeError:
+ if options.get("ignore_missing", False):
+ continue
+ raise
+ if isinstance(value, Model):
+ value = value.__class__
+ if not isinstance(value, ModelBase):
+ continue
+ if getattr(value._meta, "proxy", False):
+ value = value._meta.proxy_for_model
+ if value in checked_models:
+ continue
+ checked_models.add(value)
+ depends.add(value)
+ depends.update(model_dependencies(value, checked_models))
+
+ return depends
+
+### Prettyprinters
+
+def pprint_frozen_models(models):
+ return "{\n %s\n }" % ",\n ".join([
+ "%r: %s" % (name, pprint_fields(fields))
+ for name, fields in sorted(models.items())
+ ])
+
+def pprint_fields(fields):
+ return "{\n %s\n }" % ",\n ".join([
+ "%r: %r" % (name, defn)
+ for name, defn in sorted(fields.items())
+ ])
+
+### Output sanitisers
+
+USELESS_KEYWORDS = ["choices", "help_text", "verbose_name"]
+USELESS_DB_KEYWORDS = ["related_name", "default", "blank"] # Important for ORM, not for DB.
+INDEX_KEYWORDS = ["db_index"]
+
+def remove_useless_attributes(field, db=False, indexes=False):
+ "Removes useless (for database) attributes from the field's defn."
+ # Work out what to remove, and remove it.
+ keywords = USELESS_KEYWORDS[:]
+ if db:
+ keywords += USELESS_DB_KEYWORDS[:]
+ if indexes:
+ keywords += INDEX_KEYWORDS[:]
+ if field:
+ for name in keywords:
+ if name in field[2]:
+ del field[2][name]
+ return field
+
+USELESS_META = ["verbose_name", "verbose_name_plural"]
+def remove_useless_meta(meta):
+ "Removes useless (for database) attributes from the table's meta."
+ if meta:
+ for name in USELESS_META:
+ if name in meta:
+ del meta[name]
+ return meta
diff --git a/lib/python2.7/site-packages/south/db/__init__.py b/lib/python2.7/site-packages/south/db/__init__.py
new file mode 100644
index 0000000..b9b7168
--- /dev/null
+++ b/lib/python2.7/site-packages/south/db/__init__.py
@@ -0,0 +1,83 @@
+
+# Establish the common DatabaseOperations instance, which we call 'db'.
+# Much thanks to cmkmrr for a lot of the code base here
+
+from django.conf import settings
+import sys
+
+# A few aliases, because there's FQMNs now
+engine_modules = {
+ 'django.db.backends.postgresql_psycopg2': 'postgresql_psycopg2',
+ 'django.db.backends.sqlite3': 'sqlite3',
+ 'django.db.backends.mysql': 'mysql',
+ 'mysql_oursql.standard': 'mysql',
+ 'django.db.backends.oracle': 'oracle',
+ 'sql_server.pyodbc': 'sql_server.pyodbc', #django-pyodbc-azure
+ 'django_pyodbc': 'sql_server.pyodbc', #django-pyodbc
+ 'sqlserver_ado': 'sql_server.pyodbc', #django-mssql
+ 'firebird': 'firebird', #django-firebird
+ 'django.contrib.gis.db.backends.postgis': 'postgresql_psycopg2',
+ 'django.contrib.gis.db.backends.spatialite': 'sqlite3',
+ 'django.contrib.gis.db.backends.mysql': 'mysql',
+ 'django.contrib.gis.db.backends.oracle': 'oracle',
+ 'doj.backends.zxjdbc.postgresql': 'postgresql_psycopg2', #django-jython
+ 'doj.backends.zxjdbc.mysql': 'mysql', #django-jython
+ 'doj.backends.zxjdbc.oracle': 'oracle', #django-jython
+}
+
+# First, work out if we're multi-db or not, and which databases we have
+try:
+ from django.db import DEFAULT_DB_ALIAS
+except ImportError:
+ #### 1.1 or below ####
+ # We'll 'fake' multi-db; set the default alias
+ DEFAULT_DB_ALIAS = 'default'
+ # SOUTH_DATABASE_ADAPTER is an optional override if you have a different module
+ engine = getattr(settings, "SOUTH_DATABASE_ADAPTER", "south.db.%s" % settings.DATABASE_ENGINE)
+ # And then, we have one database with one engine
+ db_engines = {DEFAULT_DB_ALIAS: engine}
+else:
+ #### 1.2 or above ####
+ # Loop over the defined databases, gathering up their engines
+ db_engines = dict([
+ # Note we check to see if contrib.gis has overridden us.
+ (alias, "south.db.%s" % engine_modules[db_settings['ENGINE']])
+ for alias, db_settings in settings.DATABASES.items()
+ if db_settings['ENGINE'] in engine_modules
+ ])
+ # Update with any overrides
+ db_engines.update(getattr(settings, "SOUTH_DATABASE_ADAPTERS", {}))
+ # Check there's no None engines, or...
+ for alias, engine in db_engines.items():
+ if engine is None:
+ # They've used a backend we don't support
+ sys.stderr.write(
+ (
+ "There is no South database module for your database backend '%s'. " + \
+ "Please either choose a supported database, check for " + \
+ "SOUTH_DATABASE_ADAPTER[S] settings, " + \
+ "or remove South from INSTALLED_APPS.\n"
+ ) % (settings.DATABASES[alias]['ENGINE'],)
+ )
+ sys.exit(1)
+
+# Now, turn that into a dict of <alias: south db module>
+dbs = {}
+try:
+ for alias, module_name in db_engines.items():
+ module = __import__(module_name, {}, {}, [''])
+ dbs[alias] = module.DatabaseOperations(alias)
+except ImportError:
+ # This error should only be triggered on 1.1 and below.
+ sys.stderr.write(
+ (
+ "There is no South database module '%s' for your database. " + \
+ "Please either choose a supported database, check for " + \
+ "SOUTH_DATABASE_ADAPTER[S] settings, " + \
+ "or remove South from INSTALLED_APPS.\n"
+ ) % (module_name,)
+ )
+ sys.exit(1)
+
+# Finally, to make old migrations work, keep 'db' around as the default database
+db = dbs[DEFAULT_DB_ALIAS]
diff --git a/lib/python2.7/site-packages/south/db/firebird.py b/lib/python2.7/site-packages/south/db/firebird.py
new file mode 100644
index 0000000..a079819
--- /dev/null
+++ b/lib/python2.7/site-packages/south/db/firebird.py
@@ -0,0 +1,362 @@
+# firebird
+
+from __future__ import print_function
+
+import datetime
+
+from django.db import connection, models
+from django.core.management.color import no_style
+from django.db.utils import DatabaseError
+
+from south.db import generic
+from south.utils.py3 import string_types
+
+class DatabaseOperations(generic.DatabaseOperations):
+ backend_name = 'firebird'
+ alter_string_set_type = 'ALTER %(column)s TYPE %(type)s'
+ alter_string_set_default = 'ALTER %(column)s SET DEFAULT %(default)s;'
+ alter_string_drop_null = ''
+ add_column_string = 'ALTER TABLE %s ADD %s;'
+ delete_column_string = 'ALTER TABLE %s DROP %s;'
+ rename_table_sql = ''
+
+ # Features
+ allows_combined_alters = False
+ has_booleans = False
+
+ def _fill_constraint_cache(self, db_name, table_name):
+ self._constraint_cache.setdefault(db_name, {})
+ self._constraint_cache[db_name][table_name] = {}
+
+ rows = self.execute("""
+ SELECT
+ rc.RDB$CONSTRAINT_NAME,
+ rc.RDB$CONSTRAINT_TYPE,
+ cc.RDB$TRIGGER_NAME
+ FROM rdb$relation_constraints rc
+ JOIN rdb$check_constraints cc
+ ON rc.rdb$constraint_name = cc.rdb$constraint_name
+ WHERE rc.rdb$constraint_type = 'NOT NULL'
+ AND rc.rdb$relation_name = '%s'
+ """ % table_name)
+
+ for constraint, kind, column in rows:
+ self._constraint_cache[db_name][table_name].setdefault(column, set())
+ self._constraint_cache[db_name][table_name][column].add((kind, constraint))
+ return
+
+ def _alter_column_set_null(self, table_name, column_name, is_null):
+ sql = """
+ UPDATE RDB$RELATION_FIELDS SET RDB$NULL_FLAG = %(null_flag)s
+ WHERE RDB$FIELD_NAME = '%(column)s'
+ AND RDB$RELATION_NAME = '%(table_name)s'
+ """
+ null_flag = 'NULL' if is_null else '1'
+ return sql % {
+ 'null_flag': null_flag,
+ 'column': column_name.upper(),
+ 'table_name': table_name.upper()
+ }
+
+ def _column_has_default(self, params):
+ sql = """
+ SELECT a.RDB$DEFAULT_VALUE
+ FROM RDB$RELATION_FIELDS a
+ WHERE a.RDB$FIELD_NAME = '%(column)s'
+ AND a.RDB$RELATION_NAME = '%(table_name)s'
+ """
+ value = self.execute(sql % params)
+ return True if value else False
+
+
+ def _alter_set_defaults(self, field, name, params, sqls):
+ "Subcommand of alter_column that sets default values (overrideable)"
+ # Historically, we used to set defaults here.
+ # But since South 0.8, we don't ever set defaults on alter-column -- we only
+ # use database-level defaults as scaffolding when adding columns.
+ # However, we still sometimes need to remove defaults in alter-column.
+ if self._column_has_default(params):
+ sqls.append(('ALTER COLUMN %s DROP DEFAULT' % (self.quote_name(name),), []))
+
+
+ @generic.invalidate_table_constraints
+ def create_table(self, table_name, fields):
+ columns = []
+ autoinc_sql = ''
+
+ for field_name, field in fields:
+ # avoid default values in CREATE TABLE statements (#925)
+ field._suppress_default = True
+
+ col = self.column_sql(table_name, field_name, field)
+ if not col:
+ continue
+
+ columns.append(col)
+ if isinstance(field, models.AutoField):
+ field_name = field.db_column or field.column
+ autoinc_sql = connection.ops.autoinc_sql(table_name, field_name)
+
+ self.execute(self.create_table_sql % {
+ "table": self.quote_name(table_name),
+ "columns": ', '.join([col for col in columns if col]),
+ })
+
+ if autoinc_sql:
+ self.execute(autoinc_sql[0])
+ self.execute(autoinc_sql[1])
+
+ def rename_table(self, old_table_name, table_name):
+ """
+ Renames table is not supported by firebird.
+ This involve recreate all related objects (store procedure, views, triggers, etc)
+ """
+ pass
+
+ @generic.invalidate_table_constraints
+ def delete_table(self, table_name, cascade=False):
+ """
+ Deletes the table 'table_name'.
+ Firebird will also delete any triggers associated with the table.
+ """
+ super(DatabaseOperations, self).delete_table(table_name, cascade=False)
+
+ # Also, drop sequence if exists
+ sql = connection.ops.drop_sequence_sql(table_name)
+ if sql:
+ try:
+ self.execute(sql)
+ except:
+ pass
+
+ def column_sql(self, table_name, field_name, field, tablespace='', with_name=True, field_prepared=False):
+ """
+ Creates the SQL snippet for a column. Used by add_column and add_table.
+ """
+
+ # If the field hasn't already been told its attribute name, do so.
+ if not field_prepared:
+ field.set_attributes_from_name(field_name)
+
+ # hook for the field to do any resolution prior to it's attributes being queried
+ if hasattr(field, 'south_init'):
+ field.south_init()
+
+ # Possible hook to fiddle with the fields (e.g. defaults & TEXT on MySQL)
+ field = self._field_sanity(field)
+
+ try:
+ sql = field.db_type(connection=self._get_connection())
+ except TypeError:
+ sql = field.db_type()
+
+ if sql:
+ # Some callers, like the sqlite stuff, just want the extended type.
+ if with_name:
+ field_output = [self.quote_name(field.column), sql]
+ else:
+ field_output = [sql]
+
+ if field.primary_key:
+ field_output.append('NOT NULL PRIMARY KEY')
+ elif field.unique:
+ # Just use UNIQUE (no indexes any more, we have delete_unique)
+ field_output.append('UNIQUE')
+
+ sql = ' '.join(field_output)
+ sqlparams = ()
+
+ # if the field is "NOT NULL" and a default value is provided, create the column with it
+ # this allows the addition of a NOT NULL field to a table with existing rows
+ if not getattr(field, '_suppress_default', False):
+ if field.has_default():
+ default = field.get_default()
+ # If the default is actually None, don't add a default term
+ if default is not None:
+ # If the default is a callable, then call it!
+ if callable(default):
+ default = default()
+ # Now do some very cheap quoting. TODO: Redesign return values to avoid this.
+ if isinstance(default, string_types):
+ default = "'%s'" % default.replace("'", "''")
+ elif isinstance(default, (datetime.date, datetime.time, datetime.datetime)):
+ default = "'%s'" % default
+ elif isinstance(default, bool):
+ default = int(default)
+ # Escape any % signs in the output (bug #317)
+ if isinstance(default, string_types):
+ default = default.replace("%", "%%")
+ # Add it in
+ sql += " DEFAULT %s"
+ sqlparams = (default)
+ elif (not field.null and field.blank) or (field.get_default() == ''):
+ if field.empty_strings_allowed and self._get_connection().features.interprets_empty_strings_as_nulls:
+ sql += " DEFAULT ''"
+ # Error here would be nice, but doesn't seem to play fair.
+ #else:
+ # raise ValueError("Attempting to add a non null column that isn't character based without an explicit default value.")
+
+ # Firebird need set not null after of default value keyword
+ if not field.primary_key and not field.null:
+ sql += ' NOT NULL'
+
+ if field.rel and self.supports_foreign_keys:
+ self.add_deferred_sql(
+ self.foreign_key_sql(
+ table_name,
+ field.column,
+ field.rel.to._meta.db_table,
+ field.rel.to._meta.get_field(field.rel.field_name).column
+ )
+ )
+
+ # Things like the contrib.gis module fields have this in 1.1 and below
+ if hasattr(field, 'post_create_sql'):
+ for stmt in field.post_create_sql(no_style(), table_name):
+ self.add_deferred_sql(stmt)
+
+ # Avoid double index creation (#1317)
+ # Firebird creates an index implicity for each foreign key field
+ # sql_indexes_for_field tries to create an index for that field too
+ if not field.rel:
+ # In 1.2 and above, you have to ask the DatabaseCreation stuff for it.
+ # This also creates normal indexes in 1.1.
+ if hasattr(self._get_connection().creation, "sql_indexes_for_field"):
+ # Make a fake model to pass in, with only db_table
+ model = self.mock_model("FakeModelForGISCreation", table_name)
+ for stmt in self._get_connection().creation.sql_indexes_for_field(model, field, no_style()):
+ self.add_deferred_sql(stmt)
+
+ if sql:
+ return sql % sqlparams
+ else:
+ return None
+
+
+ def _drop_constraints(self, table_name, name, field):
+ if self.has_check_constraints:
+ check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK")
+ for constraint in check_constraints:
+ self.execute(self.delete_check_sql % {
+ 'table': self.quote_name(table_name),
+ 'constraint': self.quote_name(constraint),
+ })
+
+ # Drop or add UNIQUE constraint
+ unique_constraint = list(self._constraints_affecting_columns(table_name, [name], "UNIQUE"))
+ if field.unique and not unique_constraint:
+ self.create_unique(table_name, [name])
+ elif not field.unique and unique_constraint:
+ self.delete_unique(table_name, [name])
+
+ # Drop all foreign key constraints
+ try:
+ self.delete_foreign_key(table_name, name)
+ except ValueError:
+ # There weren't any
+ pass
+
+
+ @generic.invalidate_table_constraints
+ def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False):
+ """
+ Alters the given column name so it will match the given field.
+ Note that conversion between the two by the database must be possible.
+ Will not automatically add _id by default; to have this behavour, pass
+ explicit_name=False.
+
+ @param table_name: The name of the table to add the column to
+ @param name: The name of the column to alter
+ @param field: The new field definition to use
+ """
+
+ if self.dry_run:
+ if self.debug:
+ print(' - no dry run output for alter_column() due to dynamic DDL, sorry')
+ return
+
+
+ # hook for the field to do any resolution prior to it's attributes being queried
+ if hasattr(field, 'south_init'):
+ field.south_init()
+
+ # Add _id or whatever if we need to
+ field.set_attributes_from_name(name)
+ if not explicit_name:
+ name = field.column
+ else:
+ field.column = name
+
+ if not ignore_constraints:
+ # Drop all check constraints. Note that constraints will be added back
+ # with self.alter_string_set_type and self.alter_string_drop_null.
+ self._drop_constraints(table_name, name, field)
+
+ # First, change the type
+ params = {
+ "column": self.quote_name(name),
+ "type": self._db_type_for_alter_column(field),
+ "table_name": table_name
+ }
+
+ # SQLs is a list of (SQL, values) pairs.
+ sqls = []
+ sqls_extra = []
+
+ # Only alter the column if it has a type (Geometry ones sometimes don't)
+ if params["type"] is not None:
+ sqls.append((self.alter_string_set_type % params, []))
+
+ # Add any field- and backend- specific modifications
+ self._alter_add_column_mods(field, name, params, sqls)
+
+ # Next, nullity: modified, firebird doesn't support DROP NOT NULL
+ sqls_extra.append(self._alter_column_set_null(table_name, name, field.null))
+
+ # Next, set any default
+ self._alter_set_defaults(field, name, params, sqls)
+
+ # Finally, actually change the column
+ if self.allows_combined_alters:
+ sqls, values = list(zip(*sqls))
+ self.execute(
+ "ALTER TABLE %s %s;" % (self.quote_name(table_name), ", ".join(sqls)),
+ generic.flatten(values),
+ )
+ else:
+ # Databases like e.g. MySQL don't like more than one alter at once.
+ for sql, values in sqls:
+ try:
+ self.execute("ALTER TABLE %s %s;" % (self.quote_name(table_name), sql), values)
+ except DatabaseError as e:
+ print(e)
+
+
+ # Execute extra sql, which don't need ALTER TABLE statement
+ for sql in sqls_extra:
+ self.execute(sql)
+
+ if not ignore_constraints:
+ # Add back FK constraints if needed
+ if field.rel and self.supports_foreign_keys:
+ self.execute(
+ self.foreign_key_sql(
+ table_name,
+ field.column,
+ field.rel.to._meta.db_table,
+ field.rel.to._meta.get_field(field.rel.field_name).column
+ )
+ )
+
+ @generic.copy_column_constraints
+ @generic.delete_column_constraints
+ def rename_column(self, table_name, old, new):
+ if old == new:
+ # Short-circuit out
+ return []
+
+ self.execute('ALTER TABLE %s ALTER %s TO %s;' % (
+ self.quote_name(table_name),
+ self.quote_name(old),
+ self.quote_name(new),
+ ))
diff --git a/lib/python2.7/site-packages/south/db/generic.py b/lib/python2.7/site-packages/south/db/generic.py
new file mode 100644
index 0000000..5c19354
--- /dev/null
+++ b/lib/python2.7/site-packages/south/db/generic.py
@@ -0,0 +1,1164 @@
+from __future__ import print_function
+
+import re
+import sys
+
+from django.core.management.color import no_style
+from django.db import transaction, models
+from django.db.utils import DatabaseError
+from django.db.backends.util import truncate_name
+from django.db.backends.creation import BaseDatabaseCreation
+from django.db.models.fields import NOT_PROVIDED
+from django.dispatch import dispatcher
+from django.conf import settings
+from django.utils.datastructures import SortedDict
+try:
+ from django.utils.functional import cached_property
+except ImportError:
+ class cached_property(object):
+ """
+ Decorator that creates converts a method with a single
+ self argument into a property cached on the instance.
+ """
+ def __init__(self, func):
+ self.func = func
+
+ def __get__(self, instance, type):
+ res = instance.__dict__[self.func.__name__] = self.func(instance)
+ return res
+
+from south.logger import get_logger
+from south.utils.py3 import string_types, text_type
+
+
+def alias(attrname):
+ """
+ Returns a function which calls 'attrname' - for function aliasing.
+ We can't just use foo = bar, as this breaks subclassing.
+ """
+ def func(self, *args, **kwds):
+ return getattr(self, attrname)(*args, **kwds)
+ return func
+
+
+def invalidate_table_constraints(func):
+ def _cache_clear(self, table, *args, **opts):
+ self._set_cache(table, value=INVALID)
+ return func(self, table, *args, **opts)
+ return _cache_clear
+
+
+def delete_column_constraints(func):
+ def _column_rm(self, table, column, *args, **opts):
+ self._set_cache(table, column, value=[])
+ return func(self, table, column, *args, **opts)
+ return _column_rm
+
+
+def copy_column_constraints(func):
+ def _column_cp(self, table, column_old, column_new, *args, **opts):
+ db_name = self._get_setting('NAME')
+ self._set_cache(table, column_new, value=self.lookup_constraint(db_name, table, column_old))
+ return func(self, table, column_old, column_new, *args, **opts)
+ return _column_cp
+
+
+class INVALID(Exception):
+ def __repr__(self):
+ return 'INVALID'
+
+
+class DryRunError(ValueError):
+ pass
+
+
+class DatabaseOperations(object):
+ """
+ Generic SQL implementation of the DatabaseOperations.
+ Some of this code comes from Django Evolution.
+ """
+
+ alter_string_set_type = 'ALTER COLUMN %(column)s TYPE %(type)s'
+ alter_string_set_null = 'ALTER COLUMN %(column)s DROP NOT NULL'
+ alter_string_drop_null = 'ALTER COLUMN %(column)s SET NOT NULL'
+ delete_check_sql = 'ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s'
+ add_column_string = 'ALTER TABLE %s ADD COLUMN %s;'
+ delete_unique_sql = "ALTER TABLE %s DROP CONSTRAINT %s"
+ delete_foreign_key_sql = 'ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s'
+ create_table_sql = 'CREATE TABLE %(table)s (%(columns)s)'
+ max_index_name_length = 63
+ drop_index_string = 'DROP INDEX %(index_name)s'
+ delete_column_string = 'ALTER TABLE %s DROP COLUMN %s CASCADE;'
+ create_primary_key_string = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s PRIMARY KEY (%(columns)s)"
+ delete_primary_key_sql = "ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s"
+ add_check_constraint_fragment = "ADD CONSTRAINT %(constraint)s CHECK (%(check)s)"
+ rename_table_sql = "ALTER TABLE %s RENAME TO %s;"
+ backend_name = None
+ default_schema_name = "public"
+
+ # Features
+ allows_combined_alters = True
+ supports_foreign_keys = True
+ has_check_constraints = True
+ has_booleans = True
+ raises_default_errors = True
+
+ @cached_property
+ def has_ddl_transactions(self):
+ """
+ Tests the database using feature detection to see if it has
+ transactional DDL support.
+ """
+ self._possibly_initialise()
+ connection = self._get_connection()
+ if hasattr(connection.features, "confirm") and not connection.features._confirmed:
+ connection.features.confirm()
+ # Django 1.3's MySQLdb backend doesn't raise DatabaseError
+ exceptions = (DatabaseError, )
+ try:
+ from MySQLdb import OperationalError
+ exceptions += (OperationalError, )
+ except ImportError:
+ pass
+ # Now do the test
+ if getattr(connection.features, 'supports_transactions', True):
+ cursor = connection.cursor()
+ self.start_transaction()
+ cursor.execute('CREATE TABLE DDL_TRANSACTION_TEST (X INT)')
+ self.rollback_transaction()
+ try:
+ try:
+ cursor.execute('CREATE TABLE DDL_TRANSACTION_TEST (X INT)')
+ except exceptions:
+ return False
+ else:
+ return True
+ finally:
+ cursor.execute('DROP TABLE DDL_TRANSACTION_TEST')
+ else:
+ return False
+
+ def __init__(self, db_alias):
+ self.debug = False
+ self.deferred_sql = []
+ self.dry_run = False
+ self.pending_transactions = 0
+ self.pending_create_signals = []
+ self.db_alias = db_alias
+ self._constraint_cache = {}
+ self._initialised = False
+
+ def lookup_constraint(self, db_name, table_name, column_name=None):
+ """ return a set() of constraints for db_name.table_name.column_name """
+ def _lookup():
+ table = self._constraint_cache[db_name][table_name]
+ if table is INVALID:
+ raise INVALID
+ elif column_name is None:
+ return list(table.items())
+ else:
+ return table[column_name]
+
+ try:
+ ret = _lookup()
+ return ret
+ except INVALID:
+ del self._constraint_cache[db_name][table_name]
+ self._fill_constraint_cache(db_name, table_name)
+ except KeyError:
+ if self._is_valid_cache(db_name, table_name):
+ return []
+ self._fill_constraint_cache(db_name, table_name)
+
+ return self.lookup_constraint(db_name, table_name, column_name)
+
+ def _set_cache(self, table_name, column_name=None, value=INVALID):
+ db_name = self._get_setting('NAME')
+ try:
+ if column_name is not None:
+ self._constraint_cache[db_name][table_name][column_name] = value
+ else:
+ self._constraint_cache[db_name][table_name] = value
+ except (LookupError, TypeError):
+ pass
+
+ def _is_valid_cache(self, db_name, table_name):
+ # we cache per-table so if the table is there it is valid
+ try:
+ return self._constraint_cache[db_name][table_name] is not INVALID
+ except KeyError:
+ return False
+
+ def _is_multidb(self):
+ try:
+ from django.db import connections
+ connections # Prevents "unused import" warning
+ except ImportError:
+ return False
+ else:
+ return True
+
+ def _get_connection(self):
+ """
+ Returns a django connection for a given DB Alias
+ """
+ if self._is_multidb():
+ from django.db import connections
+ return connections[self.db_alias]
+ else:
+ from django.db import connection
+ return connection
+
+ def _get_setting(self, setting_name):
+ """
+ Allows code to get a setting (like, for example, STORAGE_ENGINE)
+ """
+ setting_name = setting_name.upper()
+ connection = self._get_connection()
+ if self._is_multidb():
+ # Django 1.2 and above
+ return connection.settings_dict[setting_name]
+ else:
+ # Django 1.1 and below
+ return getattr(settings, "DATABASE_%s" % setting_name)
+
+ def _has_setting(self, setting_name):
+ """
+ Existence-checking version of _get_setting.
+ """
+ try:
+ self._get_setting(setting_name)
+ except (KeyError, AttributeError):
+ return False
+ else:
+ return True
+
+ def _get_schema_name(self):
+ try:
+ return self._get_setting('schema')
+ except (KeyError, AttributeError):
+ return self.default_schema_name
+
+ def _possibly_initialise(self):
+ if not self._initialised:
+ self.connection_init()
+ self._initialised = True
+
+ def connection_init(self):
+ """
+ Run before any SQL to let database-specific config be sent as a command,
+ e.g. which storage engine (MySQL) or transaction serialisability level.
+ """
+ pass
+
+ def quote_name(self, name):
+ """
+ Uses the database backend to quote the given table/column name.
+ """
+ return self._get_connection().ops.quote_name(name)
+
+ def _print_sql_error(self, e, sql, params=[]):
+ print('FATAL ERROR - The following SQL query failed: %s' % sql, file=sys.stderr)
+ print('The error was: %s' % e, file=sys.stderr)
+
+ def execute(self, sql, params=[], print_all_errors=True):
+ """
+ Executes the given SQL statement, with optional parameters.
+ If the instance's debug attribute is True, prints out what it executes.
+ """
+
+ self._possibly_initialise()
+
+ cursor = self._get_connection().cursor()
+ if self.debug:
+ print(" = %s" % sql, params)
+
+ if self.dry_run:
+ return []
+
+ get_logger().debug(text_type('execute "%s" with params "%s"' % (sql, params)))
+
+ try:
+ cursor.execute(sql, params)
+ except DatabaseError as e:
+ if print_all_errors:
+ self._print_sql_error(e, sql, params)
+ raise
+
+ try:
+ return cursor.fetchall()
+ except:
+ return []
+
+ def execute_many(self, sql, regex=r"(?mx) ([^';]* (?:'[^']*'[^';]*)*)", comment_regex=r"(?mx) (?:^\s*$)|(?:--.*$)"):
+ """
+ Takes a SQL file and executes it as many separate statements.
+ (Some backends, such as Postgres, don't work otherwise.)
+ """
+ # Be warned: This function is full of dark magic. Make sure you really
+ # know regexes before trying to edit it.
+ # First, strip comments
+ sql = "\n".join([x.strip().replace("%", "%%") for x in re.split(comment_regex, sql) if x.strip()])
+ # Now execute each statement
+ for st in re.split(regex, sql)[1:][::2]:
+ self.execute(st)
+
+ def add_deferred_sql(self, sql):
+ """
+ Add a SQL statement to the deferred list, that won't be executed until
+ this instance's execute_deferred_sql method is run.
+ """
+ self.deferred_sql.append(sql)
+
+ def execute_deferred_sql(self):
+ """
+ Executes all deferred SQL, resetting the deferred_sql list
+ """
+ for sql in self.deferred_sql:
+ self.execute(sql)
+
+ self.deferred_sql = []
+
+ def clear_deferred_sql(self):
+ """
+ Resets the deferred_sql list to empty.
+ """
+ self.deferred_sql = []
+
+ def clear_run_data(self, pending_creates = None):
+ """
+ Resets variables to how they should be before a run. Used for dry runs.
+ If you want, pass in an old panding_creates to reset to.
+ """
+ self.clear_deferred_sql()
+ self.pending_create_signals = pending_creates or []
+
+ def get_pending_creates(self):
+ return self.pending_create_signals
+
+ @invalidate_table_constraints
+ def create_table(self, table_name, fields):
+ """
+ Creates the table 'table_name'. 'fields' is a tuple of fields,
+ each repsented by a 2-part tuple of field name and a
+ django.db.models.fields.Field object
+ """
+
+ if len(table_name) > 63:
+ print(" ! WARNING: You have a table name longer than 63 characters; this will not fully work on PostgreSQL or MySQL.")
+
+ # avoid default values in CREATE TABLE statements (#925)
+ for field_name, field in fields:
+ field._suppress_default = True
+
+ columns = [
+ self.column_sql(table_name, field_name, field)
+ for field_name, field in fields
+ ]
+
+ self.execute(self.create_table_sql % {
+ "table": self.quote_name(table_name),
+ "columns": ', '.join([col for col in columns if col]),
+ })
+
+ add_table = alias('create_table') # Alias for consistency's sake
+
+ @invalidate_table_constraints
+ def rename_table(self, old_table_name, table_name):
+ """
+ Renames the table 'old_table_name' to 'table_name'.
+ """
+ if old_table_name == table_name:
+ # Short-circuit out.
+ return
+ params = (self.quote_name(old_table_name), self.quote_name(table_name))
+ self.execute(self.rename_table_sql % params)
+ # Invalidate the not-yet-indexed table
+ self._set_cache(table_name, value=INVALID)
+
+ @invalidate_table_constraints
+ def delete_table(self, table_name, cascade=True):
+ """
+ Deletes the table 'table_name'.
+ """
+ params = (self.quote_name(table_name), )
+ if cascade:
+ self.execute('DROP TABLE %s CASCADE;' % params)
+ else:
+ self.execute('DROP TABLE %s;' % params)
+
+ drop_table = alias('delete_table')
+
+ @invalidate_table_constraints
+ def clear_table(self, table_name):
+ """
+ Deletes all rows from 'table_name'.
+ """
+ params = (self.quote_name(table_name), )
+ self.execute('DELETE FROM %s;' % params)
+
+ @invalidate_table_constraints
+ def add_column(self, table_name, name, field, keep_default=True):
+ """
+ Adds the column 'name' to the table 'table_name'.
+ Uses the 'field' paramater, a django.db.models.fields.Field instance,
+ to generate the necessary sql
+
+ @param table_name: The name of the table to add the column to
+ @param name: The name of the column to add
+ @param field: The field to use
+ """
+ sql = self.column_sql(table_name, name, field)
+ if sql:
+ params = (
+ self.quote_name(table_name),
+ sql,
+ )
+ sql = self.add_column_string % params
+ self.execute(sql)
+
+ # Now, drop the default if we need to
+ if field.default is not None:
+ field.default = NOT_PROVIDED
+ self.alter_column(table_name, name, field, explicit_name=False, ignore_constraints=True)
+
+ def _db_type_for_alter_column(self, field):
+ """
+ Returns a field's type suitable for ALTER COLUMN.
+ By default it just returns field.db_type().
+ To be overriden by backend specific subclasses
+ @param field: The field to generate type for
+ """
+ try:
+ return field.db_type(connection=self._get_connection())
+ except TypeError:
+ return field.db_type()
+
+ def _alter_add_column_mods(self, field, name, params, sqls):
+ """
+ Subcommand of alter_column that modifies column definitions beyond
+ the type string -- e.g. adding constraints where they cannot be specified
+ as part of the type (overrideable)
+ """
+ pass
+
+ def _alter_set_defaults(self, field, name, params, sqls):
+ "Subcommand of alter_column that sets default values (overrideable)"
+ # Historically, we used to set defaults here.
+ # But since South 0.8, we don't ever set defaults on alter-column -- we only
+ # use database-level defaults as scaffolding when adding columns.
+ # However, we still sometimes need to remove defaults in alter-column.
+ sqls.append(('ALTER COLUMN %s DROP DEFAULT' % (self.quote_name(name),), []))
+
+ def _update_nulls_to_default(self, params, field):
+ "Subcommand of alter_column that updates nulls to default value (overrideable)"
+ default = field.get_db_prep_save(field.get_default(), connection=self._get_connection())
+ self.execute('UPDATE %(table_name)s SET %(column)s=%%s WHERE %(column)s IS NULL' % params, [default])
+
+ @invalidate_table_constraints
+ def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False):
+ """
+ Alters the given column name so it will match the given field.
+ Note that conversion between the two by the database must be possible.
+ Will not automatically add _id by default; to have this behavour, pass
+ explicit_name=False.
+
+ @param table_name: The name of the table to add the column to
+ @param name: The name of the column to alter
+ @param field: The new field definition to use
+ """
+
+ if self.dry_run:
+ if self.debug:
+ print(' - no dry run output for alter_column() due to dynamic DDL, sorry')
+ return
+
+ # hook for the field to do any resolution prior to it's attributes being queried
+ if hasattr(field, 'south_init'):
+ field.south_init()
+
+ # Add _id or whatever if we need to
+ field.set_attributes_from_name(name)
+ if not explicit_name:
+ name = field.column
+ else:
+ field.column = name
+
+ if not ignore_constraints:
+ # Drop all check constraints. Note that constraints will be added back
+ # with self.alter_string_set_type and self.alter_string_drop_null.
+ if self.has_check_constraints:
+ check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK")
+ for constraint in check_constraints:
+ self.execute(self.delete_check_sql % {
+ 'table': self.quote_name(table_name),
+ 'constraint': self.quote_name(constraint),
+ })
+
+ # Drop all foreign key constraints
+ try:
+ self.delete_foreign_key(table_name, name)
+ except ValueError:
+ # There weren't any
+ pass
+
+ # First, change the type
+ params = {
+ "column": self.quote_name(name),
+ "type": self._db_type_for_alter_column(field),
+ "table_name": self.quote_name(table_name)
+ }
+
+ # SQLs is a list of (SQL, values) pairs.
+ sqls = []
+
+ # Only alter the column if it has a type (Geometry ones sometimes don't)
+ if params["type"] is not None:
+ sqls.append((self.alter_string_set_type % params, []))
+
+ # Add any field- and backend- specific modifications
+ self._alter_add_column_mods(field, name, params, sqls)
+ # Next, nullity
+ if field.null or field.has_default():
+ sqls.append((self.alter_string_set_null % params, []))
+ else:
+ sqls.append((self.alter_string_drop_null % params, []))
+
+ # Do defaults
+ self._alter_set_defaults(field, name, params, sqls)
+
+ # Actually change the column (step 1 -- Nullity may need to be fixed)
+ if self.allows_combined_alters:
+ sqls, values = zip(*sqls)
+ self.execute(
+ "ALTER TABLE %s %s;" % (self.quote_name(table_name), ", ".join(sqls)),
+ flatten(values),
+ )
+ else:
+ # Databases like e.g. MySQL don't like more than one alter at once.
+ for sql, values in sqls:
+ self.execute("ALTER TABLE %s %s;" % (self.quote_name(table_name), sql), values)
+
+ if not field.null and field.has_default():
+ # Final fixes
+ self._update_nulls_to_default(params, field)
+ self.execute("ALTER TABLE %s %s;" % (self.quote_name(table_name), self.alter_string_drop_null % params), [])
+
+ if not ignore_constraints:
+ # Add back FK constraints if needed
+ if field.rel and self.supports_foreign_keys:
+ self.execute(
+ self.foreign_key_sql(
+ table_name,
+ field.column,
+ field.rel.to._meta.db_table,
+ field.rel.to._meta.get_field(field.rel.field_name).column
+ )
+ )
+
+ def _fill_constraint_cache(self, db_name, table_name):
+
+ schema = self._get_schema_name()
+ ifsc_tables = ["constraint_column_usage", "key_column_usage"]
+
+ self._constraint_cache.setdefault(db_name, {})
+ self._constraint_cache[db_name][table_name] = {}
+
+ for ifsc_table in ifsc_tables:
+ rows = self.execute("""
+ SELECT kc.constraint_name, kc.column_name, c.constraint_type
+ FROM information_schema.%s AS kc
+ JOIN information_schema.table_constraints AS c ON
+ kc.table_schema = c.table_schema AND
+ kc.table_name = c.table_name AND
+ kc.constraint_name = c.constraint_name
+ WHERE
+ kc.table_schema = %%s AND
+ kc.table_name = %%s
+ """ % ifsc_table, [schema, table_name])
+ for constraint, column, kind in rows:
+ self._constraint_cache[db_name][table_name].setdefault(column, set())
+ self._constraint_cache[db_name][table_name][column].add((kind, constraint))
+ return
+
+ def _constraints_affecting_columns(self, table_name, columns, type="UNIQUE"):
+ """
+ Gets the names of the constraints affecting the given columns.
+ If columns is None, returns all constraints of the type on the table.
+ """
+ if self.dry_run:
+ raise DryRunError("Cannot get constraints for columns.")
+
+ if columns is not None:
+ columns = set(map(lambda s: s.lower(), columns))
+
+ db_name = self._get_setting('NAME')
+
+ cnames = {}
+ for col, constraints in self.lookup_constraint(db_name, table_name):
+ for kind, cname in constraints:
+ if kind == type:
+ cnames.setdefault(cname, set())
+ cnames[cname].add(col.lower())
+
+ for cname, cols in cnames.items():
+ if cols == columns or columns is None:
+ yield cname
+
+ @invalidate_table_constraints
+ def create_unique(self, table_name, columns):
+ """
+ Creates a UNIQUE constraint on the columns on the given table.
+ """
+
+ if not isinstance(columns, (list, tuple)):
+ columns = [columns]
+
+ name = self.create_index_name(table_name, columns, suffix="_uniq")
+
+ cols = ", ".join(map(self.quote_name, columns))
+ self.execute("ALTER TABLE %s ADD CONSTRAINT %s UNIQUE (%s)" % (
+ self.quote_name(table_name),
+ self.quote_name(name),
+ cols,
+ ))
+ return name
+
+ @invalidate_table_constraints
+ def delete_unique(self, table_name, columns):
+ """
+ Deletes a UNIQUE constraint on precisely the columns on the given table.
+ """
+
+ if not isinstance(columns, (list, tuple)):
+ columns = [columns]
+
+ # Dry runs mean we can't do anything.
+ if self.dry_run:
+ if self.debug:
+ print(' - no dry run output for delete_unique_column() due to dynamic DDL, sorry')
+ return
+
+ constraints = list(self._constraints_affecting_columns(table_name, columns))
+ if not constraints:
+ raise ValueError("Cannot find a UNIQUE constraint on table %s, columns %r" % (table_name, columns))
+ for constraint in constraints:
+ self.execute(self.delete_unique_sql % (
+ self.quote_name(table_name),
+ self.quote_name(constraint),
+ ))
+
+ def column_sql(self, table_name, field_name, field, tablespace='', with_name=True, field_prepared=False):
+ """
+ Creates the SQL snippet for a column. Used by add_column and add_table.
+ """
+
+ # If the field hasn't already been told its attribute name, do so.
+ if not field_prepared:
+ field.set_attributes_from_name(field_name)
+
+ # hook for the field to do any resolution prior to it's attributes being queried
+ if hasattr(field, 'south_init'):
+ field.south_init()
+
+ # Possible hook to fiddle with the fields (e.g. defaults & TEXT on MySQL)
+ field = self._field_sanity(field)
+
+ try:
+ sql = field.db_type(connection=self._get_connection())
+ except TypeError:
+ sql = field.db_type()
+
+ if sql:
+
+ # Some callers, like the sqlite stuff, just want the extended type.
+ if with_name:
+ field_output = [self.quote_name(field.column), sql]
+ else:
+ field_output = [sql]
+
+ field_output.append('%sNULL' % (not field.null and 'NOT ' or ''))
+ if field.primary_key:
+ field_output.append('PRIMARY KEY')
+ elif field.unique:
+ # Just use UNIQUE (no indexes any more, we have delete_unique)
+ field_output.append('UNIQUE')
+
+ tablespace = field.db_tablespace or tablespace
+ if tablespace and getattr(self._get_connection().features, "supports_tablespaces", False) and field.unique:
+ # We must specify the index tablespace inline, because we
+ # won't be generating a CREATE INDEX statement for this field.
+ field_output.append(self._get_connection().ops.tablespace_sql(tablespace, inline=True))
+
+ sql = ' '.join(field_output)
+ sqlparams = ()
+ # if the field is "NOT NULL" and a default value is provided, create the column with it
+ # this allows the addition of a NOT NULL field to a table with existing rows
+ if not getattr(field, '_suppress_default', False):
+ if field.has_default():
+ default = field.get_default()
+ # If the default is actually None, don't add a default term
+ if default is not None:
+ # If the default is a callable, then call it!
+ if callable(default):
+ default = default()
+
+ default = field.get_db_prep_save(default, connection=self._get_connection())
+ default = self._default_value_workaround(default)
+ # Now do some very cheap quoting. TODO: Redesign return values to avoid this.
+ if isinstance(default, string_types):
+ default = "'%s'" % default.replace("'", "''")
+ # Escape any % signs in the output (bug #317)
+ if isinstance(default, string_types):
+ default = default.replace("%", "%%")
+ # Add it in
+ sql += " DEFAULT %s"
+ sqlparams = (default)
+ elif (not field.null and field.blank) or (field.get_default() == ''):
+ if field.empty_strings_allowed and self._get_connection().features.interprets_empty_strings_as_nulls:
+ sql += " DEFAULT ''"
+ # Error here would be nice, but doesn't seem to play fair.
+ #else:
+ # raise ValueError("Attempting to add a non null column that isn't character based without an explicit default value.")
+
+ if field.rel and self.supports_foreign_keys:
+ self.add_deferred_sql(
+ self.foreign_key_sql(
+ table_name,
+ field.column,
+ field.rel.to._meta.db_table,
+ field.rel.to._meta.get_field(field.rel.field_name).column
+ )
+ )
+
+ # Things like the contrib.gis module fields have this in 1.1 and below
+ if hasattr(field, 'post_create_sql'):
+ for stmt in field.post_create_sql(no_style(), table_name):
+ self.add_deferred_sql(stmt)
+
+ # In 1.2 and above, you have to ask the DatabaseCreation stuff for it.
+ # This also creates normal indexes in 1.1.
+ if hasattr(self._get_connection().creation, "sql_indexes_for_field"):
+ # Make a fake model to pass in, with only db_table
+ model = self.mock_model("FakeModelForGISCreation", table_name)
+ for stmt in self._get_connection().creation.sql_indexes_for_field(model, field, no_style()):
+ self.add_deferred_sql(stmt)
+
+ if sql:
+ return sql % sqlparams
+ else:
+ return None
+
+ def _field_sanity(self, field):
+ """
+ Placeholder for DBMS-specific field alterations (some combos aren't valid,
+ e.g. DEFAULT and TEXT on MySQL)
+ """
+ return field
+
+ def _default_value_workaround(self, value):
+ """
+ DBMS-specific value alterations (this really works around
+ missing functionality in Django backends)
+ """
+ if isinstance(value, bool) and not self.has_booleans:
+ return int(value)
+ else:
+ return value
+
+ def foreign_key_sql(self, from_table_name, from_column_name, to_table_name, to_column_name):
+ """
+ Generates a full SQL statement to add a foreign key constraint
+ """
+ constraint_name = '%s_refs_%s_%s' % (from_column_name, to_column_name, self._digest(from_table_name, to_table_name))
+ return 'ALTER TABLE %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)%s;' % (
+ self.quote_name(from_table_name),
+ self.quote_name(self.shorten_name(constraint_name)),
+ self.quote_name(from_column_name),
+ self.quote_name(to_table_name),
+ self.quote_name(to_column_name),
+ self._get_connection().ops.deferrable_sql() # Django knows this
+ )
+
+ @invalidate_table_constraints
+ def delete_foreign_key(self, table_name, column):
+ """
+ Drop a foreign key constraint
+ """
+ if self.dry_run:
+ if self.debug:
+ print(' - no dry run output for delete_foreign_key() due to dynamic DDL, sorry')
+ return # We can't look at the DB to get the constraints
+ constraints = self._find_foreign_constraints(table_name, column)
+ if not constraints:
+ raise ValueError("Cannot find a FOREIGN KEY constraint on table %s, column %s" % (table_name, column))
+ for constraint_name in constraints:
+ self.execute(self.delete_foreign_key_sql % {
+ "table": self.quote_name(table_name),
+ "constraint": self.quote_name(constraint_name),
+ })
+
+ drop_foreign_key = alias('delete_foreign_key')
+
+ def _find_foreign_constraints(self, table_name, column_name=None):
+ constraints = self._constraints_affecting_columns(
+ table_name, [column_name], "FOREIGN KEY")
+
+ primary_key_columns = self._find_primary_key_columns(table_name)
+
+ if len(primary_key_columns) > 1:
+ # Composite primary keys cannot be referenced by a foreign key
+ return list(constraints)
+ else:
+ primary_key_columns.add(column_name)
+ recursive_constraints = set(self._constraints_affecting_columns(
+ table_name, primary_key_columns, "FOREIGN KEY"))
+ return list(recursive_constraints.union(constraints))
+
+ def _digest(self, *args):
+ """
+ Use django.db.backends.creation.BaseDatabaseCreation._digest
+ to create index name in Django style. An evil hack :(
+ """
+ if not hasattr(self, '_django_db_creation'):
+ self._django_db_creation = BaseDatabaseCreation(self._get_connection())
+ return self._django_db_creation._digest(*args)
+
+ def shorten_name(self, name):
+ return truncate_name(name, self._get_connection().ops.max_name_length())
+
+ def create_index_name(self, table_name, column_names, suffix=""):
+ """
+ Generate a unique name for the index
+ """
+
+ # If there is just one column in the index, use a default algorithm from Django
+ if len(column_names) == 1 and not suffix:
+ try:
+ _hash = self._digest([column_names[0]])
+ except TypeError:
+ # Django < 1.5 backward compatibility.
+ _hash = self._digest(column_names[0])
+ return self.shorten_name(
+ '%s_%s' % (table_name, _hash),
+ )
+
+ # Else generate the name for the index by South
+ table_name = table_name.replace('"', '').replace('.', '_')
+ index_unique_name = '_%x' % abs(hash((table_name, ','.join(column_names))))
+
+ # If the index name is too long, truncate it
+ index_name = ('%s_%s%s%s' % (table_name, column_names[0], index_unique_name, suffix)).replace('"', '').replace('.', '_')
+ if len(index_name) > self.max_index_name_length:
+ part = ('_%s%s%s' % (column_names[0], index_unique_name, suffix))
+ index_name = '%s%s' % (table_name[:(self.max_index_name_length - len(part))], part)
+
+ return index_name
+
+ def create_index_sql(self, table_name, column_names, unique=False, db_tablespace=''):
+ """
+ Generates a create index statement on 'table_name' for a list of 'column_names'
+ """
+ if not column_names:
+ print("No column names supplied on which to create an index")
+ return ''
+
+ connection = self._get_connection()
+ if db_tablespace and connection.features.supports_tablespaces:
+ tablespace_sql = ' ' + connection.ops.tablespace_sql(db_tablespace)
+ else:
+ tablespace_sql = ''
+
+ index_name = self.create_index_name(table_name, column_names)
+ return 'CREATE %sINDEX %s ON %s (%s)%s;' % (
+ unique and 'UNIQUE ' or '',
+ self.quote_name(index_name),
+ self.quote_name(table_name),
+ ','.join([self.quote_name(field) for field in column_names]),
+ tablespace_sql
+ )
+
+ @invalidate_table_constraints
+ def create_index(self, table_name, column_names, unique=False, db_tablespace=''):
+ """ Executes a create index statement """
+ sql = self.create_index_sql(table_name, column_names, unique, db_tablespace)
+ self.execute(sql)
+
+ @invalidate_table_constraints
+ def delete_index(self, table_name, column_names, db_tablespace=''):
+ """
+ Deletes an index created with create_index.
+ This is possible using only columns due to the deterministic
+ index naming function which relies on column names.
+ """
+ if isinstance(column_names, string_types):
+ column_names = [column_names]
+ name = self.create_index_name(table_name, column_names)
+ sql = self.drop_index_string % {
+ "index_name": self.quote_name(name),
+ "table_name": self.quote_name(table_name),
+ }
+ self.execute(sql)
+
+ drop_index = alias('delete_index')
+
+ @delete_column_constraints
+ def delete_column(self, table_name, name):
+ """
+ Deletes the column 'column_name' from the table 'table_name'.
+ """
+ params = (self.quote_name(table_name), self.quote_name(name))
+ self.execute(self.delete_column_string % params, [])
+
+ drop_column = alias('delete_column')
+
+ def rename_column(self, table_name, old, new):
+ """
+ Renames the column 'old' from the table 'table_name' to 'new'.
+ """
+ raise NotImplementedError("rename_column has no generic SQL syntax")
+
+ @invalidate_table_constraints
+ def delete_primary_key(self, table_name):
+ """
+ Drops the old primary key.
+ """
+ # Dry runs mean we can't do anything.
+ if self.dry_run:
+ if self.debug:
+ print(' - no dry run output for delete_primary_key() due to dynamic DDL, sorry')
+ return
+
+ constraints = list(self._constraints_affecting_columns(table_name, None, type="PRIMARY KEY"))
+ if not constraints:
+ raise ValueError("Cannot find a PRIMARY KEY constraint on table %s" % (table_name,))
+
+ for constraint in constraints:
+ self.execute(self.delete_primary_key_sql % {
+ "table": self.quote_name(table_name),
+ "constraint": self.quote_name(constraint),
+ })
+
+ drop_primary_key = alias('delete_primary_key')
+
+ @invalidate_table_constraints
+ def create_primary_key(self, table_name, columns):
+ """
+ Creates a new primary key on the specified columns.
+ """
+ if not isinstance(columns, (list, tuple)):
+ columns = [columns]
+ self.execute(self.create_primary_key_string % {
+ "table": self.quote_name(table_name),
+ "constraint": self.quote_name(table_name + "_pkey"),
+ "columns": ", ".join(map(self.quote_name, columns)),
+ })
+
+ def _find_primary_key_columns(self, table_name):
+ """
+ Find all columns of the primary key of the specified table
+ """
+ db_name = self._get_setting('NAME')
+
+ primary_key_columns = set()
+ for col, constraints in self.lookup_constraint(db_name, table_name):
+ for kind, cname in constraints:
+ if kind == 'PRIMARY KEY':
+ primary_key_columns.add(col.lower())
+
+ return primary_key_columns
+
+ def start_transaction(self):
+ """
+ Makes sure the following commands are inside a transaction.
+ Must be followed by a (commit|rollback)_transaction call.
+ """
+ if self.dry_run:
+ self.pending_transactions += 1
+ transaction.commit_unless_managed(using=self.db_alias)
+ transaction.enter_transaction_management(using=self.db_alias)
+ transaction.managed(True, using=self.db_alias)
+
+ def commit_transaction(self):
+ """
+ Commits the current transaction.
+ Must be preceded by a start_transaction call.
+ """
+ if self.dry_run:
+ return
+ transaction.commit(using=self.db_alias)
+ transaction.leave_transaction_management(using=self.db_alias)
+
+ def rollback_transaction(self):
+ """
+ Rolls back the current transaction.
+ Must be preceded by a start_transaction call.
+ """
+ if self.dry_run:
+ self.pending_transactions -= 1
+ transaction.rollback(using=self.db_alias)
+ transaction.leave_transaction_management(using=self.db_alias)
+
+ def rollback_transactions_dry_run(self):
+ """
+ Rolls back all pending_transactions during this dry run.
+ """
+ if not self.dry_run:
+ return
+ while self.pending_transactions > 0:
+ self.rollback_transaction()
+ if transaction.is_dirty(using=self.db_alias):
+ # Force an exception, if we're still in a dirty transaction.
+ # This means we are missing a COMMIT/ROLLBACK.
+ transaction.leave_transaction_management(using=self.db_alias)
+
+ def send_create_signal(self, app_label, model_names):
+ self.pending_create_signals.append((app_label, model_names))
+
+ def send_pending_create_signals(self, verbosity=0, interactive=False):
+ # Group app_labels together
+ signals = SortedDict()
+ for (app_label, model_names) in self.pending_create_signals:
+ try:
+ signals[app_label].extend(model_names)
+ except KeyError:
+ signals[app_label] = list(model_names)
+ # Send only one signal per app.
+ for (app_label, model_names) in signals.items():
+ self.really_send_create_signal(app_label, list(set(model_names)),
+ verbosity=verbosity,
+ interactive=interactive)
+ self.pending_create_signals = []
+
+ def really_send_create_signal(self, app_label, model_names,
+ verbosity=0, interactive=False):
+ """
+ Sends a post_syncdb signal for the model specified.
+
+ If the model is not found (perhaps it's been deleted?),
+ no signal is sent.
+
+ TODO: The behavior of django.contrib.* apps seems flawed in that
+ they don't respect created_models. Rather, they blindly execute
+ over all models within the app sending the signal. This is a
+ patch we should push Django to make For now, this should work.
+ """
+
+ if self.debug:
+ print(" - Sending post_syncdb signal for %s: %s" % (app_label, model_names))
+
+ app = models.get_app(app_label)
+ if not app:
+ return
+
+ created_models = []
+ for model_name in model_names:
+ model = models.get_model(app_label, model_name)
+ if model:
+ created_models.append(model)
+
+ if created_models:
+
+ if hasattr(dispatcher, "send"):
+ # Older djangos
+ dispatcher.send(signal=models.signals.post_syncdb, sender=app,
+ app=app, created_models=created_models,
+ verbosity=verbosity, interactive=interactive)
+ else:
+ if self._is_multidb():
+ # Django 1.2+
+ models.signals.post_syncdb.send(
+ sender=app,
+ app=app,
+ created_models=created_models,
+ verbosity=verbosity,
+ interactive=interactive,
+ db=self.db_alias,
+ )
+ else:
+ # Django 1.1 - 1.0
+ models.signals.post_syncdb.send(
+ sender=app,
+ app=app,
+ created_models=created_models,
+ verbosity=verbosity,
+ interactive=interactive,
+ )
+
+ def mock_model(self, model_name, db_table, db_tablespace='',
+ pk_field_name='id', pk_field_type=models.AutoField,
+ pk_field_args=[], pk_field_kwargs={}):
+ """
+ Generates a MockModel class that provides enough information
+ to be used by a foreign key/many-to-many relationship.
+
+ Migrations should prefer to use these rather than actual models
+ as models could get deleted over time, but these can remain in
+ migration files forever.
+
+ Depreciated.
+ """
+ class MockOptions(object):
+ def __init__(self):
+ self.db_table = db_table
+ self.db_tablespace = db_tablespace or settings.DEFAULT_TABLESPACE
+ self.object_name = model_name
+ self.module_name = model_name.lower()
+
+ if pk_field_type == models.AutoField:
+ pk_field_kwargs['primary_key'] = True
+
+ self.pk = pk_field_type(*pk_field_args, **pk_field_kwargs)
+ self.pk.set_attributes_from_name(pk_field_name)
+ self.abstract = False
+
+ def get_field_by_name(self, field_name):
+ # we only care about the pk field
+ return (self.pk, self.model, True, False)
+
+ def get_field(self, name):
+ # we only care about the pk field
+ return self.pk
+
+ class MockModel(object):
+ _meta = None
+
+ # We need to return an actual class object here, not an instance
+ MockModel._meta = MockOptions()
+ MockModel._meta.model = MockModel
+ return MockModel
+
+ def _db_positive_type_for_alter_column(self, klass, field):
+ """
+ A helper for subclasses overriding _db_type_for_alter_column:
+ Remove the check constraint from the type string for PositiveInteger
+ and PositiveSmallInteger fields.
+ @param klass: The type of the child (required to allow this to be used when it is subclassed)
+ @param field: The field to generate type for
+ """
+ super_result = super(klass, self)._db_type_for_alter_column(field)
+ if isinstance(field, (models.PositiveSmallIntegerField, models.PositiveIntegerField)):
+ return super_result.split(" ", 1)[0]
+ return super_result
+
+ def _alter_add_positive_check(self, klass, field, name, params, sqls):
+ """
+ A helper for subclasses overriding _alter_add_column_mods:
+ Add a check constraint verifying positivity to PositiveInteger and
+ PositiveSmallInteger fields.
+ """
+ super(klass, self)._alter_add_column_mods(field, name, params, sqls)
+ if isinstance(field, (models.PositiveSmallIntegerField, models.PositiveIntegerField)):
+ uniq_hash = abs(hash(tuple(params.values())))
+ d = dict(
+ constraint = "CK_%s_PSTV_%s" % (name, hex(uniq_hash)[2:]),
+ check = "%s >= 0" % self.quote_name(name))
+ sqls.append((self.add_check_constraint_fragment % d, []))
+
+
+# Single-level flattening of lists
+def flatten(ls):
+ nl = []
+ for l in ls:
+ nl += l
+ return nl
diff --git a/lib/python2.7/site-packages/south/db/mysql.py b/lib/python2.7/site-packages/south/db/mysql.py
new file mode 100644
index 0000000..3e87464
--- /dev/null
+++ b/lib/python2.7/site-packages/south/db/mysql.py
@@ -0,0 +1,290 @@
+# MySQL-specific implementations for south
+# Original author: Andrew Godwin
+# Patches by: F. Gabriel Gosselin <gabrielNOSPAM@evidens.ca>
+
+from south.db import generic
+from south.db.generic import DryRunError, INVALID
+from south.logger import get_logger
+
+
+def delete_column_constraints(func):
+ """
+ Decorates column operation functions for MySQL.
+ Deletes the constraints from the database and clears local cache.
+ """
+ def _column_rm(self, table_name, column_name, *args, **opts):
+ # Delete foreign key constraints
+ try:
+ self.delete_foreign_key(table_name, column_name)
+ except ValueError:
+ pass # If no foreign key on column, OK because it checks first
+ # Delete constraints referring to this column
+ try:
+ reverse = self._lookup_reverse_constraint(table_name, column_name)
+ for cname, rtable, rcolumn in reverse:
+ self.delete_foreign_key(rtable, rcolumn)
+ except DryRunError:
+ pass
+ return func(self, table_name, column_name, *args, **opts)
+ return _column_rm
+
+
+def copy_column_constraints(func):
+ """
+ Decorates column operation functions for MySQL.
+ Determines existing constraints and copies them to a new column
+ """
+ def _column_cp(self, table_name, column_old, column_new, *args, **opts):
+ # Copy foreign key constraint
+ try:
+ constraint = self._find_foreign_constraints(
+ table_name, column_old)[0]
+ refs = self._lookup_constraint_references(table_name, constraint)
+ if refs is not None:
+ (ftable, fcolumn) = refs
+ if ftable and fcolumn:
+ fk_sql = self.foreign_key_sql(
+ table_name, column_new, ftable, fcolumn)
+ get_logger().debug("Foreign key SQL: " + fk_sql)
+ self.add_deferred_sql(fk_sql)
+ except IndexError:
+ pass # No constraint exists so ignore
+ except DryRunError:
+ pass
+ # Copy constraints referring to this column
+ try:
+ reverse = self._lookup_reverse_constraint(table_name, column_old)
+ for cname, rtable, rcolumn in reverse:
+ fk_sql = self.foreign_key_sql(
+ rtable, rcolumn, table_name, column_new)
+ self.add_deferred_sql(fk_sql)
+ except DryRunError:
+ pass
+ return func(self, table_name, column_old, column_new, *args, **opts)
+ return _column_cp
+
+
+def invalidate_table_constraints(func):
+ """
+ For MySQL we grab all table constraints simultaneously, so this is
+ effective.
+ It further solves the issues of invalidating referred table constraints.
+ """
+ def _cache_clear(self, table, *args, **opts):
+ db_name = self._get_setting('NAME')
+ if db_name in self._constraint_cache:
+ del self._constraint_cache[db_name]
+ if db_name in self._reverse_cache:
+ del self._reverse_cache[db_name]
+ if db_name in self._constraint_references:
+ del self._constraint_references[db_name]
+ return func(self, table, *args, **opts)
+ return _cache_clear
+
+
+class DatabaseOperations(generic.DatabaseOperations):
+ """
+ MySQL implementation of database operations.
+
+ MySQL has no DDL transaction support This can confuse people when they ask
+ how to roll back - hence the dry runs, etc., found in the migration code.
+ """
+
+ backend_name = "mysql"
+ alter_string_set_type = ''
+ alter_string_set_null = 'MODIFY %(column)s %(type)s NULL;'
+ alter_string_drop_null = 'MODIFY %(column)s %(type)s NOT NULL;'
+ drop_index_string = 'DROP INDEX %(index_name)s ON %(table_name)s'
+ delete_primary_key_sql = "ALTER TABLE %(table)s DROP PRIMARY KEY"
+ delete_foreign_key_sql = "ALTER TABLE %(table)s DROP FOREIGN KEY %(constraint)s"
+ delete_unique_sql = "ALTER TABLE %s DROP INDEX %s"
+ rename_table_sql = "RENAME TABLE %s TO %s;"
+
+ allows_combined_alters = False
+ has_check_constraints = False
+ raises_default_errors = False
+
+ geom_types = ['geometry', 'point', 'linestring', 'polygon']
+ text_types = ['text', 'blob']
+
+ def __init__(self, db_alias):
+ self._constraint_references = {}
+ self._reverse_cache = {}
+ super(DatabaseOperations, self).__init__(db_alias)
+ if self._has_setting('STORAGE_ENGINE') and self._get_setting('STORAGE_ENGINE'):
+ self.create_table_sql = self.create_table_sql + ' ENGINE=%s' % self._get_setting('STORAGE_ENGINE')
+
+ def _is_valid_cache(self, db_name, table_name):
+ cache = self._constraint_cache
+ # we cache the whole db so if there are any tables table_name is valid
+ return db_name in cache and cache[db_name].get(table_name, None) is not INVALID
+
+ def _fill_constraint_cache(self, db_name, table_name):
+ # for MySQL grab all constraints for this database. It's just as cheap as a single column.
+ self._constraint_cache[db_name] = {}
+ self._constraint_cache[db_name][table_name] = {}
+ self._reverse_cache[db_name] = {}
+ self._constraint_references[db_name] = {}
+
+ name_query = """
+ SELECT kc.`constraint_name`, kc.`column_name`, kc.`table_name`,
+ kc.`referenced_table_name`, kc.`referenced_column_name`
+ FROM information_schema.key_column_usage AS kc
+ WHERE
+ kc.table_schema = %s
+ """
+ rows = self.execute(name_query, [db_name])
+ if not rows:
+ return
+ cnames = {}
+ for constraint, column, table, ref_table, ref_column in rows:
+ key = (table, constraint)
+ cnames.setdefault(key, set())
+ cnames[key].add((column, ref_table, ref_column))
+
+ type_query = """
+ SELECT c.constraint_name, c.table_name, c.constraint_type
+ FROM information_schema.table_constraints AS c
+ WHERE
+ c.table_schema = %s
+ """
+ rows = self.execute(type_query, [db_name])
+ for constraint, table, kind in rows:
+ key = (table, constraint)
+ self._constraint_cache[db_name].setdefault(table, {})
+ try:
+ cols = cnames[key]
+ except KeyError:
+ cols = set()
+ for column_set in cols:
+ (column, ref_table, ref_column) = column_set
+ self._constraint_cache[db_name][table].setdefault(column, set())
+ if kind == 'FOREIGN KEY':
+ self._constraint_cache[db_name][table][column].add((kind,
+ constraint))
+ # Create constraint lookup, see constraint_references
+ self._constraint_references[db_name][(table,
+ constraint)] = (ref_table, ref_column)
+ # Create reverse table lookup, reverse_lookup
+ self._reverse_cache[db_name].setdefault(ref_table, {})
+ self._reverse_cache[db_name][ref_table].setdefault(ref_column,
+ set())
+ self._reverse_cache[db_name][ref_table][ref_column].add(
+ (constraint, table, column))
+ else:
+ self._constraint_cache[db_name][table][column].add((kind,
+ constraint))
+
+ def connection_init(self):
+ """
+ Run before any SQL to let database-specific config be sent as a command,
+ e.g. which storage engine (MySQL) or transaction serialisability level.
+ """
+ cursor = self._get_connection().cursor()
+ if cursor.execute("SHOW variables WHERE Variable_Name='default_storage_engine';"):
+ engine_var = 'default_storage_engine'
+ else:
+ engine_var = 'storage_engine'
+ if self._has_setting('STORAGE_ENGINE') and self._get_setting('STORAGE_ENGINE'):
+ cursor.execute("SET %s=%s;" % (engine_var, self._get_setting('STORAGE_ENGINE')))
+
+ def start_transaction(self):
+ super(DatabaseOperations, self).start_transaction()
+ self.execute("SET FOREIGN_KEY_CHECKS=0;")
+
+ @copy_column_constraints
+ @delete_column_constraints
+ @invalidate_table_constraints
+ def rename_column(self, table_name, old, new):
+ if old == new or self.dry_run:
+ return []
+
+ rows = [x for x in self.execute('DESCRIBE %s' % (self.quote_name(table_name),)) if x[0] == old]
+
+ if not rows:
+ raise ValueError("No column '%s' in '%s'." % (old, table_name))
+
+ params = (
+ self.quote_name(table_name),
+ self.quote_name(old),
+ self.quote_name(new),
+ rows[0][1],
+ rows[0][2] == "YES" and "NULL" or "NOT NULL",
+ rows[0][4] and "DEFAULT " or "",
+ rows[0][4] and "%s" or "",
+ rows[0][5] or "",
+ )
+
+ sql = 'ALTER TABLE %s CHANGE COLUMN %s %s %s %s %s %s %s;' % params
+
+ if rows[0][4]:
+ self.execute(sql, (rows[0][4],))
+ else:
+ self.execute(sql)
+
+ @delete_column_constraints
+ def delete_column(self, table_name, name):
+ super(DatabaseOperations, self).delete_column(table_name, name)
+
+ @invalidate_table_constraints
+ def rename_table(self, old_table_name, table_name):
+ super(DatabaseOperations, self).rename_table(old_table_name,
+ table_name)
+
+ @invalidate_table_constraints
+ def delete_table(self, table_name):
+ super(DatabaseOperations, self).delete_table(table_name)
+
+ def _lookup_constraint_references(self, table_name, cname):
+ """
+ Provided an existing table and constraint, returns tuple of (foreign
+ table, column)
+ """
+ db_name = self._get_setting('NAME')
+ try:
+ return self._constraint_references[db_name][(table_name, cname)]
+ except KeyError:
+ return None
+
+ def _lookup_reverse_constraint(self, table_name, column_name=None):
+ """Look for the column referenced by a foreign constraint"""
+ db_name = self._get_setting('NAME')
+ if self.dry_run:
+ raise DryRunError("Cannot get constraints for columns.")
+
+ if not self._is_valid_cache(db_name, table_name):
+ # Piggy-back on lookup_constraint, ensures cache exists
+ self.lookup_constraint(db_name, table_name)
+
+ try:
+ table = self._reverse_cache[db_name][table_name]
+ if column_name == None:
+ return [(y, tuple(y)) for x, y in table.items()]
+ else:
+ return tuple(table[column_name])
+ except KeyError:
+ return []
+
+ def _field_sanity(self, field):
+ """
+ This particular override stops us sending DEFAULTs for BLOB/TEXT columns.
+ """
+ # MySQL does not support defaults for geometry columns also
+ type = self._db_type_for_alter_column(field).lower()
+ is_geom = True in [type.find(t) > -1 for t in self.geom_types]
+ is_text = True in [type.find(t) > -1 for t in self.text_types]
+
+ if is_geom or is_text:
+ field._suppress_default = True
+ return field
+
+ def _alter_set_defaults(self, field, name, params, sqls):
+ """
+ MySQL does not support defaults on text or blob columns.
+ """
+ type = params['type']
+ # MySQL does not support defaults for geometry columns also
+ is_geom = True in [type.find(t) > -1 for t in self.geom_types]
+ is_text = True in [type.find(t) > -1 for t in self.text_types]
+ if not is_geom and not is_text:
+ super(DatabaseOperations, self)._alter_set_defaults(field, name, params, sqls)
diff --git a/lib/python2.7/site-packages/south/db/oracle.py b/lib/python2.7/site-packages/south/db/oracle.py
new file mode 100644
index 0000000..79496d0
--- /dev/null
+++ b/lib/python2.7/site-packages/south/db/oracle.py
@@ -0,0 +1,345 @@
+from __future__ import print_function
+
+import os.path
+import sys
+import re
+import warnings
+import cx_Oracle
+
+
+from django.db import connection, models
+from django.db.backends.util import truncate_name
+from django.core.management.color import no_style
+from django.db.models.fields import NOT_PROVIDED
+from django.db.utils import DatabaseError
+
+# In revision r16016 function get_sequence_name has been transformed into
+# method of DatabaseOperations class. To make code backward-compatible we
+# need to handle both situations.
+try:
+ from django.db.backends.oracle.base import get_sequence_name\
+ as original_get_sequence_name
+except ImportError:
+ original_get_sequence_name = None
+
+from south.db import generic
+
+class DatabaseOperations(generic.DatabaseOperations):
+ """
+ Oracle implementation of database operations.
+ """
+ backend_name = 'oracle'
+
+ alter_string_set_type = 'ALTER TABLE %(table_name)s MODIFY %(column)s %(type)s %(nullity)s;'
+ alter_string_set_default = 'ALTER TABLE %(table_name)s MODIFY %(column)s DEFAULT %(default)s;'
+ alter_string_update_nulls_to_default = \
+ 'UPDATE %(table_name)s SET %(column)s = %(default)s WHERE %(column)s IS NULL;'
+ add_column_string = 'ALTER TABLE %s ADD %s;'
+ delete_column_string = 'ALTER TABLE %s DROP COLUMN %s;'
+ add_constraint_string = 'ALTER TABLE %(table_name)s ADD CONSTRAINT %(constraint)s %(clause)s'
+
+ allows_combined_alters = False
+ has_booleans = False
+
+ constraints_dict = {
+ 'P': 'PRIMARY KEY',
+ 'U': 'UNIQUE',
+ 'C': 'CHECK',
+ 'R': 'FOREIGN KEY'
+ }
+
+ def get_sequence_name(self, table_name):
+ if original_get_sequence_name is None:
+ return self._get_connection().ops._get_sequence_name(table_name)
+ else:
+ return original_get_sequence_name(table_name)
+
+ #TODO: This will cause very obscure bugs if anyone uses a column name or string value
+ # that looks like a column definition (with 'CHECK', 'DEFAULT' and/or 'NULL' in it)
+ # e.g. "CHECK MATE" varchar(10) DEFAULT 'NULL'
+ def adj_column_sql(self, col):
+ # Syntax fixes -- Oracle is picky about clause order
+ col = re.sub('(?P<constr>CHECK \(.*\))(?P<any>.*)(?P<default>DEFAULT \d+)',
+ lambda mo: '%s %s%s'%(mo.group('default'), mo.group('constr'), mo.group('any')), col) #syntax fix for boolean/integer field only
+ col = re.sub('(?P<not_null>(NOT )?NULL) (?P<misc>(.* )?)(?P<default>DEFAULT.+)',
+ lambda mo: '%s %s %s'%(mo.group('default'),mo.group('not_null'),mo.group('misc') or ''), col) #fix order of NULL/NOT NULL and DEFAULT
+ return col
+
+ def check_meta(self, table_name):
+ return table_name in [ m._meta.db_table for m in models.get_models() ] #caching provided by Django
+
+ def normalize_name(self, name):
+ """
+ Get the properly shortened and uppercased identifier as returned by quote_name(), but without the actual quotes.
+ """
+ nn = self.quote_name(name)
+ if nn[0] == '"' and nn[-1] == '"':
+ nn = nn[1:-1]
+ return nn
+
+ @generic.invalidate_table_constraints
+ def create_table(self, table_name, fields):
+ qn = self.quote_name(table_name)
+ columns = []
+ autoinc_sql = ''
+
+
+ for field_name, field in fields:
+
+ field = self._field_sanity(field)
+
+ # avoid default values in CREATE TABLE statements (#925)
+ field._suppress_default = True
+
+
+ col = self.column_sql(table_name, field_name, field)
+ if not col:
+ continue
+ col = self.adj_column_sql(col)
+
+ columns.append(col)
+ if isinstance(field, models.AutoField):
+ autoinc_sql = connection.ops.autoinc_sql(table_name, field_name)
+
+ sql = 'CREATE TABLE %s (%s);' % (qn, ', '.join([col for col in columns]))
+ self.execute(sql)
+ if autoinc_sql:
+ self.execute(autoinc_sql[0])
+ self.execute(autoinc_sql[1])
+
+ @generic.invalidate_table_constraints
+ def delete_table(self, table_name, cascade=True):
+ qn = self.quote_name(table_name)
+
+ # Note: PURGE is not valid syntax for Oracle 9i (it was added in 10)
+ if cascade:
+ self.execute('DROP TABLE %s CASCADE CONSTRAINTS;' % qn)
+ else:
+ self.execute('DROP TABLE %s;' % qn)
+
+ # If the table has an AutoField a sequence was created.
+ sequence_sql = """
+DECLARE
+ i INTEGER;
+BEGIN
+ SELECT COUNT(*) INTO i FROM USER_CATALOG
+ WHERE TABLE_NAME = '%(sq_name)s' AND TABLE_TYPE = 'SEQUENCE';
+ IF i = 1 THEN
+ EXECUTE IMMEDIATE 'DROP SEQUENCE "%(sq_name)s"';
+ END IF;
+END;
+/""" % {'sq_name': self.get_sequence_name(table_name)}
+ self.execute(sequence_sql)
+
+ @generic.invalidate_table_constraints
+ def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False):
+
+ if self.dry_run:
+ if self.debug:
+ print(' - no dry run output for alter_column() due to dynamic DDL, sorry')
+ return
+
+ qn = self.quote_name(table_name)
+
+ # hook for the field to do any resolution prior to it's attributes being queried
+ if hasattr(field, 'south_init'):
+ field.south_init()
+ field = self._field_sanity(field)
+
+ # Add _id or whatever if we need to
+ field.set_attributes_from_name(name)
+ if not explicit_name:
+ name = field.column
+ qn_col = self.quote_name(name)
+
+ # First, change the type
+ # This will actually also add any CHECK constraints needed,
+ # since e.g. 'type' for a BooleanField is 'NUMBER(1) CHECK (%(qn_column)s IN (0,1))'
+ params = {
+ 'table_name':qn,
+ 'column': qn_col,
+ 'type': self._db_type_for_alter_column(field),
+ 'nullity': 'NOT NULL',
+ 'default': 'NULL'
+ }
+ if field.null:
+ params['nullity'] = 'NULL'
+
+ sql_templates = [
+ (self.alter_string_set_type, params, []),
+ (self.alter_string_set_default, params, []),
+ ]
+ if not field.null and field.has_default():
+ # Use default for rows that had nulls. To support the case where
+ # the new default does not fit the old type, we need to first change
+ # the column type to the new type, but null=True; then set the default;
+ # then complete the type change.
+ def change_params(**kw):
+ "A little helper for non-destructively changing the params"
+ p = params.copy()
+ p.update(kw)
+ return p
+ sql_templates[:0] = [
+ (self.alter_string_set_type, change_params(nullity='NULL'),[]),
+ (self.alter_string_update_nulls_to_default, change_params(default="%s"), [field.get_default()]),
+ ]
+
+ if not ignore_constraints:
+ # drop CHECK constraints. Make sure this is executed before the ALTER TABLE statements
+ # generated above, since those statements recreate the constraints we delete here.
+ check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK")
+ for constraint in check_constraints:
+ self.execute(self.delete_check_sql % {
+ 'table': self.quote_name(table_name),
+ 'constraint': self.quote_name(constraint),
+ })
+ # Drop foreign constraints
+ try:
+ self.delete_foreign_key(qn, qn_col)
+ except ValueError:
+ # There weren't any
+ pass
+
+ for sql_template, params, args in sql_templates:
+ try:
+ self.execute(sql_template % params, args, print_all_errors=False)
+ except DatabaseError as exc:
+ description = str(exc)
+ # Oracle complains if a column is already NULL/NOT NULL
+ if 'ORA-01442' in description or 'ORA-01451' in description:
+ # so we just drop NULL/NOT NULL part from target sql and retry
+ params['nullity'] = ''
+ sql = sql_template % params
+ self.execute(sql)
+ # Oracle also has issues if we try to change a regular column
+ # to a LOB or vice versa (also REF, object, VARRAY or nested
+ # table, but these don't come up much in Django apps)
+ elif 'ORA-22858' in description or 'ORA-22859' in description:
+ self._alter_column_lob_workaround(table_name, name, field)
+ else:
+ self._print_sql_error(exc, sql_template % params)
+ raise
+
+ if not ignore_constraints:
+ # Add back FK constraints if needed
+ if field.rel: #and self.supports_foreign_keys:
+ self.add_deferred_sql(
+ self.foreign_key_sql(
+ qn[1:-1], # foreign_key_sql uses this as part of constraint name
+ qn_col[1:-1], # foreign_key_sql uses this as part of constraint name
+ field.rel.to._meta.db_table,
+ field.rel.to._meta.get_field(field.rel.field_name).column
+ )
+ )
+
+
+ def _alter_column_lob_workaround(self, table_name, name, field):
+ """
+ Oracle refuses to change a column type from/to LOB to/from a regular
+ column. In Django, this shows up when the field is changed from/to
+ a TextField.
+ What we need to do instead is:
+ - Rename the original column
+ - Add the desired field as new
+ - Update the table to transfer values from old to new
+ - Drop old column
+ """
+ renamed = self._generate_temp_name(name)
+ self.rename_column(table_name, name, renamed)
+ self.add_column(table_name, name, field, keep_default=False)
+ self.execute("UPDATE %s set %s=%s" % (
+ self.quote_name(table_name),
+ self.quote_name(name),
+ self.quote_name(renamed),
+ ))
+ self.delete_column(table_name, renamed)
+
+ def _generate_temp_name(self, for_name):
+ suffix = hex(hash(for_name)).upper()[1:]
+ return self.normalize_name(for_name + "_" + suffix)
+
+ @generic.copy_column_constraints #TODO: Appears to be nulled by the delete decorator below...
+ @generic.delete_column_constraints
+ def rename_column(self, table_name, old, new):
+ if old == new:
+ # Short-circuit out
+ return []
+ self.execute('ALTER TABLE %s RENAME COLUMN %s TO %s;' % (
+ self.quote_name(table_name),
+ self.quote_name(old),
+ self.quote_name(new),
+ ))
+
+ @generic.invalidate_table_constraints
+ def add_column(self, table_name, name, field, keep_default=False):
+ field = self._field_sanity(field)
+ sql = self.column_sql(table_name, name, field)
+ sql = self.adj_column_sql(sql)
+
+ if sql:
+ params = (
+ self.quote_name(table_name),
+ sql
+ )
+ sql = self.add_column_string % params
+ self.execute(sql)
+
+ # Now, drop the default if we need to
+ if field.default is not None:
+ field.default = NOT_PROVIDED
+ self.alter_column(table_name, name, field, explicit_name=False, ignore_constraints=True)
+
+ def delete_column(self, table_name, name):
+ return super(DatabaseOperations, self).delete_column(self.quote_name(table_name), name)
+
+ def lookup_constraint(self, db_name, table_name, column_name=None):
+ if column_name:
+ # Column names in the constraint cache come from the database,
+ # make sure we use the properly shortened/uppercased version
+ # for lookup.
+ column_name = self.normalize_name(column_name)
+ return super(DatabaseOperations, self).lookup_constraint(db_name, table_name, column_name)
+
+ def _constraints_affecting_columns(self, table_name, columns, type="UNIQUE"):
+ if columns:
+ columns = [self.normalize_name(c) for c in columns]
+ return super(DatabaseOperations, self)._constraints_affecting_columns(table_name, columns, type)
+
+ def _field_sanity(self, field):
+ """
+ This particular override stops us sending DEFAULTs for BooleanField.
+ """
+ if isinstance(field, models.BooleanField) and field.has_default():
+ field.default = int(field.to_python(field.get_default()))
+ # On Oracle, empty strings are null
+ if isinstance(field, (models.CharField, models.TextField)):
+ field.null = field.empty_strings_allowed
+ return field
+
+
+ def _default_value_workaround(self, value):
+ from datetime import date,time,datetime
+ if isinstance(value, (date,time,datetime)):
+ return "'%s'" % value
+ else:
+ return super(DatabaseOperations, self)._default_value_workaround(value)
+
+ def _fill_constraint_cache(self, db_name, table_name):
+ self._constraint_cache.setdefault(db_name, {})
+ self._constraint_cache[db_name][table_name] = {}
+
+ rows = self.execute("""
+ SELECT user_cons_columns.constraint_name,
+ user_cons_columns.column_name,
+ user_constraints.constraint_type
+ FROM user_constraints
+ JOIN user_cons_columns ON
+ user_constraints.table_name = user_cons_columns.table_name AND
+ user_constraints.constraint_name = user_cons_columns.constraint_name
+ WHERE user_constraints.table_name = '%s'
+ """ % self.normalize_name(table_name))
+
+ for constraint, column, kind in rows:
+ self._constraint_cache[db_name][table_name].setdefault(column, set())
+ self._constraint_cache[db_name][table_name][column].add((self.constraints_dict[kind], constraint))
+ return
diff --git a/lib/python2.7/site-packages/south/db/postgresql_psycopg2.py b/lib/python2.7/site-packages/south/db/postgresql_psycopg2.py
new file mode 100644
index 0000000..d6c63c4
--- /dev/null
+++ b/lib/python2.7/site-packages/south/db/postgresql_psycopg2.py
@@ -0,0 +1,96 @@
+from __future__ import print_function
+
+import uuid
+from django.db.backends.util import truncate_name
+from south.db import generic
+
+
+class DatabaseOperations(generic.DatabaseOperations):
+
+ """
+ PsycoPG2 implementation of database operations.
+ """
+
+ backend_name = "postgres"
+
+ def create_index_name(self, table_name, column_names, suffix=""):
+ """
+ Generate a unique name for the index
+
+ Django's logic for naming field indexes is different in the
+ postgresql_psycopg2 backend, so we follow that for single-column
+ indexes.
+ """
+
+ if len(column_names) == 1:
+ return truncate_name(
+ '%s_%s%s' % (table_name, column_names[0], suffix),
+ self._get_connection().ops.max_name_length()
+ )
+ return super(DatabaseOperations, self).create_index_name(table_name, column_names, suffix)
+
+ @generic.copy_column_constraints
+ @generic.delete_column_constraints
+ def rename_column(self, table_name, old, new):
+ if old == new:
+ # Short-circuit out
+ return []
+ self.execute('ALTER TABLE %s RENAME COLUMN %s TO %s;' % (
+ self.quote_name(table_name),
+ self.quote_name(old),
+ self.quote_name(new),
+ ))
+
+ @generic.invalidate_table_constraints
+ def rename_table(self, old_table_name, table_name):
+ "will rename the table and an associated ID sequence and primary key index"
+ # First, rename the table
+ generic.DatabaseOperations.rename_table(self, old_table_name, table_name)
+ # Then, try renaming the ID sequence
+ # (if you're using other AutoFields... your problem, unfortunately)
+
+ if self.execute(
+ """
+ SELECT 1
+ FROM information_schema.sequences
+ WHERE sequence_name = %s
+ """,
+ [old_table_name + '_id_seq']
+ ):
+ generic.DatabaseOperations.rename_table(self, old_table_name + "_id_seq", table_name + "_id_seq")
+
+ # Rename primary key index, will not rename other indices on
+ # the table that are used by django (e.g. foreign keys). Until
+ # figure out how, you need to do this yourself.
+
+ pkey_index_names = self.execute(
+ """
+ SELECT pg_index.indexrelid::regclass
+ FROM pg_index, pg_attribute
+ WHERE
+ indrelid = %s::regclass AND
+ pg_attribute.attrelid = indrelid AND
+ pg_attribute.attnum = any(pg_index.indkey)
+ AND indisprimary
+ """,
+ [table_name]
+ )
+ if old_table_name + "_pkey" in pkey_index_names:
+ generic.DatabaseOperations.rename_table(self, old_table_name + "_pkey", table_name + "_pkey")
+
+ def rename_index(self, old_index_name, index_name):
+ "Rename an index individually"
+ generic.DatabaseOperations.rename_table(self, old_index_name, index_name)
+
+ def _default_value_workaround(self, value):
+ "Support for UUIDs on psql"
+ if isinstance(value, uuid.UUID):
+ return str(value)
+ else:
+ return super(DatabaseOperations, self)._default_value_workaround(value)
+
+ def _db_type_for_alter_column(self, field):
+ return self._db_positive_type_for_alter_column(DatabaseOperations, field)
+
+ def _alter_add_column_mods(self, field, name, params, sqls):
+ return self._alter_add_positive_check(DatabaseOperations, field, name, params, sqls)
diff --git a/lib/python2.7/site-packages/south/db/sql_server/__init__.py b/lib/python2.7/site-packages/south/db/sql_server/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/db/sql_server/__init__.py
diff --git a/lib/python2.7/site-packages/south/db/sql_server/pyodbc.py b/lib/python2.7/site-packages/south/db/sql_server/pyodbc.py
new file mode 100644
index 0000000..b725ec0
--- /dev/null
+++ b/lib/python2.7/site-packages/south/db/sql_server/pyodbc.py
@@ -0,0 +1,444 @@
+from datetime import date, datetime, time
+from warnings import warn
+from django.db import models
+from django.db.models import fields
+from south.db import generic
+from south.db.generic import delete_column_constraints, invalidate_table_constraints, copy_column_constraints
+from south.exceptions import ConstraintDropped
+from south.utils.py3 import string_types
+try:
+ from django.utils.encoding import smart_text # Django >= 1.5
+except ImportError:
+ from django.utils.encoding import smart_unicode as smart_text # Django < 1.5
+from django.core.management.color import no_style
+
+class DatabaseOperations(generic.DatabaseOperations):
+ """
+ django-pyodbc (sql_server.pyodbc) implementation of database operations.
+ """
+
+ backend_name = "pyodbc"
+
+ add_column_string = 'ALTER TABLE %s ADD %s;'
+ alter_string_set_type = 'ALTER COLUMN %(column)s %(type)s'
+ alter_string_set_null = 'ALTER COLUMN %(column)s %(type)s NULL'
+ alter_string_drop_null = 'ALTER COLUMN %(column)s %(type)s NOT NULL'
+
+ allows_combined_alters = False
+
+ drop_index_string = 'DROP INDEX %(index_name)s ON %(table_name)s'
+ drop_constraint_string = 'ALTER TABLE %(table_name)s DROP CONSTRAINT %(constraint_name)s'
+ delete_column_string = 'ALTER TABLE %s DROP COLUMN %s'
+
+ #create_check_constraint_sql = "ALTER TABLE %(table)s " + \
+ # generic.DatabaseOperations.add_check_constraint_fragment
+ create_foreign_key_sql = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s " + \
+ "FOREIGN KEY (%(column)s) REFERENCES %(target)s"
+ create_unique_sql = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s UNIQUE (%(columns)s)"
+
+
+ default_schema_name = "dbo"
+
+ has_booleans = False
+
+
+ @delete_column_constraints
+ def delete_column(self, table_name, name):
+ q_table_name, q_name = (self.quote_name(table_name), self.quote_name(name))
+
+ # Zap the constraints
+ for const in self._find_constraints_for_column(table_name,name):
+ params = {'table_name':q_table_name, 'constraint_name': const}
+ sql = self.drop_constraint_string % params
+ self.execute(sql, [])
+
+ # Zap the indexes
+ for ind in self._find_indexes_for_column(table_name,name):
+ params = {'table_name':q_table_name, 'index_name': ind}
+ sql = self.drop_index_string % params
+ self.execute(sql, [])
+
+ # Zap default if exists
+ drop_default = self.drop_column_default_sql(table_name, name)
+ if drop_default:
+ sql = "ALTER TABLE [%s] %s" % (table_name, drop_default)
+ self.execute(sql, [])
+
+ # Finally zap the column itself
+ self.execute(self.delete_column_string % (q_table_name, q_name), [])
+
+ def _find_indexes_for_column(self, table_name, name):
+ "Find the indexes that apply to a column, needed when deleting"
+
+ sql = """
+ SELECT si.name, si.id, sik.colid, sc.name
+ FROM dbo.sysindexes si WITH (NOLOCK)
+ INNER JOIN dbo.sysindexkeys sik WITH (NOLOCK)
+ ON sik.id = si.id
+ AND sik.indid = si.indid
+ INNER JOIN dbo.syscolumns sc WITH (NOLOCK)
+ ON si.id = sc.id
+ AND sik.colid = sc.colid
+ WHERE si.indid !=0
+ AND si.id = OBJECT_ID('%s')
+ AND sc.name = '%s'
+ """
+ idx = self.execute(sql % (table_name, name), [])
+ return [i[0] for i in idx]
+
+
+ def _find_constraints_for_column(self, table_name, name, just_names=True):
+ """
+ Find the constraints that apply to a column, needed when deleting. Defaults not included.
+ This is more general than the parent _constraints_affecting_columns, as on MSSQL this
+ includes PK and FK constraints.
+ """
+
+ sql = """
+ SELECT CC.[CONSTRAINT_NAME]
+ ,TC.[CONSTRAINT_TYPE]
+ ,CHK.[CHECK_CLAUSE]
+ ,RFD.TABLE_SCHEMA
+ ,RFD.TABLE_NAME
+ ,RFD.COLUMN_NAME
+ -- used for normalized names
+ ,CC.TABLE_NAME
+ ,CC.COLUMN_NAME
+ FROM [INFORMATION_SCHEMA].[TABLE_CONSTRAINTS] TC
+ JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE CC
+ ON TC.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG
+ AND TC.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA
+ AND TC.CONSTRAINT_NAME = CC.CONSTRAINT_NAME
+ LEFT JOIN INFORMATION_SCHEMA.CHECK_CONSTRAINTS CHK
+ ON CHK.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG
+ AND CHK.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA
+ AND CHK.CONSTRAINT_NAME = CC.CONSTRAINT_NAME
+ AND 'CHECK' = TC.CONSTRAINT_TYPE
+ LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS REF
+ ON REF.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG
+ AND REF.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA
+ AND REF.CONSTRAINT_NAME = CC.CONSTRAINT_NAME
+ AND 'FOREIGN KEY' = TC.CONSTRAINT_TYPE
+ LEFT JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE RFD
+ ON RFD.CONSTRAINT_CATALOG = REF.UNIQUE_CONSTRAINT_CATALOG
+ AND RFD.CONSTRAINT_SCHEMA = REF.UNIQUE_CONSTRAINT_SCHEMA
+ AND RFD.CONSTRAINT_NAME = REF.UNIQUE_CONSTRAINT_NAME
+ WHERE CC.CONSTRAINT_CATALOG = CC.TABLE_CATALOG
+ AND CC.CONSTRAINT_SCHEMA = CC.TABLE_SCHEMA
+ AND CC.TABLE_CATALOG = %s
+ AND CC.TABLE_SCHEMA = %s
+ AND CC.TABLE_NAME = %s
+ AND CC.COLUMN_NAME = %s
+ """
+ db_name = self._get_setting('name')
+ schema_name = self._get_schema_name()
+ table = self.execute(sql, [db_name, schema_name, table_name, name])
+
+ if just_names:
+ return [r[0] for r in table]
+
+ all = {}
+ for r in table:
+ cons_name, type = r[:2]
+ if type=='PRIMARY KEY' or type=='UNIQUE':
+ cons = all.setdefault(cons_name, (type,[]))
+ sql = '''
+ SELECT COLUMN_NAME
+ FROM INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE RFD
+ WHERE RFD.CONSTRAINT_CATALOG = %s
+ AND RFD.CONSTRAINT_SCHEMA = %s
+ AND RFD.TABLE_NAME = %s
+ AND RFD.CONSTRAINT_NAME = %s
+ '''
+ columns = self.execute(sql, [db_name, schema_name, table_name, cons_name])
+ cons[1].extend(col for col, in columns)
+ elif type=='CHECK':
+ cons = (type, r[2])
+ elif type=='FOREIGN KEY':
+ if cons_name in all:
+ raise NotImplementedError("Multiple-column foreign keys are not supported")
+ else:
+ cons = (type, r[3:6])
+ else:
+ raise NotImplementedError("Don't know how to handle constraints of type "+ type)
+ all[cons_name] = cons
+ return all
+
+ @invalidate_table_constraints
+ def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False):
+ """
+ Alters the given column name so it will match the given field.
+ Note that conversion between the two by the database must be possible.
+ Will not automatically add _id by default; to have this behavour, pass
+ explicit_name=False.
+
+ @param table_name: The name of the table to add the column to
+ @param name: The name of the column to alter
+ @param field: The new field definition to use
+ """
+ self._fix_field_definition(field)
+
+ if not ignore_constraints:
+ qn = self.quote_name
+ sch = qn(self._get_schema_name())
+ tab = qn(table_name)
+ table = ".".join([sch, tab])
+ try:
+ self.delete_foreign_key(table_name, name)
+ except ValueError:
+ # no FK constraint on this field. That's OK.
+ pass
+ constraints = self._find_constraints_for_column(table_name, name, False)
+ for constraint in constraints.keys():
+ params = dict(table_name = table,
+ constraint_name = qn(constraint))
+ sql = self.drop_constraint_string % params
+ self.execute(sql, [])
+
+ ret_val = super(DatabaseOperations, self).alter_column(table_name, name, field, explicit_name, ignore_constraints=True)
+
+ if not ignore_constraints:
+ for cname, (ctype,args) in constraints.items():
+ params = dict(table = table,
+ constraint = qn(cname))
+ if ctype=='UNIQUE':
+ params['columns'] = ", ".join(map(qn,args))
+ sql = self.create_unique_sql % params
+ elif ctype=='PRIMARY KEY':
+ params['columns'] = ", ".join(map(qn,args))
+ sql = self.create_primary_key_string % params
+ elif ctype=='FOREIGN KEY':
+ continue
+ # Foreign keys taken care of below
+ #target = "%s.%s(%s)" % tuple(map(qn,args))
+ #params.update(column = qn(name), target = target)
+ #sql = self.create_foreign_key_sql % params
+ elif ctype=='CHECK':
+ warn(ConstraintDropped("CHECK "+ args, table_name, name))
+ continue
+ #TODO: Some check constraints should be restored; but not before the generic
+ # backend restores them.
+ #params['check'] = args
+ #sql = self.create_check_constraint_sql % params
+ else:
+ raise NotImplementedError("Don't know how to handle constraints of type "+ type)
+ self.execute(sql, [])
+ # Create foreign key if necessary
+ if field.rel and self.supports_foreign_keys:
+ self.execute(
+ self.foreign_key_sql(
+ table_name,
+ field.column,
+ field.rel.to._meta.db_table,
+ field.rel.to._meta.get_field(field.rel.field_name).column
+ )
+ )
+ model = self.mock_model("FakeModelForIndexCreation", table_name)
+ for stmt in self._get_connection().creation.sql_indexes_for_field(model, field, no_style()):
+ self.execute(stmt)
+
+
+ return ret_val
+
+ def _alter_set_defaults(self, field, name, params, sqls):
+ "Subcommand of alter_column that sets default values (overrideable)"
+ # Historically, we used to set defaults here.
+ # But since South 0.8, we don't ever set defaults on alter-column -- we only
+ # use database-level defaults as scaffolding when adding columns.
+ # However, we still sometimes need to remove defaults in alter-column.
+ table_name = self.quote_name(params['table_name'])
+ drop_default = self.drop_column_default_sql(table_name, name)
+ if drop_default:
+ sqls.append((drop_default, []))
+
+ def _value_to_unquoted_literal(self, field, value):
+ # Start with the field's own translation
+ conn = self._get_connection()
+ value = field.get_db_prep_save(value, connection=conn)
+ # This is still a Python object -- nobody expects to need a literal.
+ if isinstance(value, string_types):
+ return smart_text(value)
+ elif isinstance(value, (date,time,datetime)):
+ return value.isoformat()
+ else:
+ #TODO: Anybody else needs special translations?
+ return str(value)
+ def _default_value_workaround(self, value):
+ if isinstance(value, (date,time,datetime)):
+ return value.isoformat()
+ else:
+ return super(DatabaseOperations, self)._default_value_workaround(value)
+
+ def _quote_string(self, s):
+ return "'" + s.replace("'","''") + "'"
+
+
+ def drop_column_default_sql(self, table_name, name, q_name=None):
+ "MSSQL specific drop default, which is a pain"
+
+ sql = """
+ SELECT object_name(cdefault)
+ FROM syscolumns
+ WHERE id = object_id('%s')
+ AND name = '%s'
+ """
+ cons = self.execute(sql % (table_name, name), [])
+ if cons and cons[0] and cons[0][0]:
+ return "DROP CONSTRAINT %s" % cons[0][0]
+ return None
+
+ def _fix_field_definition(self, field):
+ if isinstance(field, (fields.BooleanField, fields.NullBooleanField)):
+ if field.default == True:
+ field.default = 1
+ if field.default == False:
+ field.default = 0
+
+ # This is copied from South's generic add_column, with two modifications:
+ # 1) The sql-server-specific call to _fix_field_definition
+ # 2) Removing a default, when needed, by calling drop_default and not the more general alter_column
+ @invalidate_table_constraints
+ def add_column(self, table_name, name, field, keep_default=False):
+ """
+ Adds the column 'name' to the table 'table_name'.
+ Uses the 'field' paramater, a django.db.models.fields.Field instance,
+ to generate the necessary sql
+
+ @param table_name: The name of the table to add the column to
+ @param name: The name of the column to add
+ @param field: The field to use
+ """
+ self._fix_field_definition(field)
+ sql = self.column_sql(table_name, name, field)
+ if sql:
+ params = (
+ self.quote_name(table_name),
+ sql,
+ )
+ sql = self.add_column_string % params
+ self.execute(sql)
+
+ # Now, drop the default if we need to
+ if not keep_default and field.default is not None:
+ field.default = fields.NOT_PROVIDED
+ #self.alter_column(table_name, name, field, explicit_name=False, ignore_constraints=True)
+ self.drop_default(table_name, name, field)
+
+ @invalidate_table_constraints
+ def drop_default(self, table_name, name, field):
+ fragment = self.drop_column_default_sql(table_name, name)
+ if fragment:
+ table_name = self.quote_name(table_name)
+ sql = " ".join(["ALTER TABLE", table_name, fragment])
+ self.execute(sql)
+
+
+ @invalidate_table_constraints
+ def create_table(self, table_name, field_defs):
+ # Tweak stuff as needed
+ for _, f in field_defs:
+ self._fix_field_definition(f)
+
+ # Run
+ super(DatabaseOperations, self).create_table(table_name, field_defs)
+
+ def _find_referencing_fks(self, table_name):
+ "MSSQL does not support cascading FKs when dropping tables, we need to implement."
+
+ # FK -- Foreign Keys
+ # UCTU -- Unique Constraints Table Usage
+ # FKTU -- Foreign Key Table Usage
+ # (last two are both really CONSTRAINT_TABLE_USAGE, different join conditions)
+ sql = """
+ SELECT FKTU.TABLE_SCHEMA as REFING_TABLE_SCHEMA,
+ FKTU.TABLE_NAME as REFING_TABLE_NAME,
+ FK.[CONSTRAINT_NAME] as FK_NAME
+ FROM [INFORMATION_SCHEMA].[REFERENTIAL_CONSTRAINTS] FK
+ JOIN [INFORMATION_SCHEMA].[CONSTRAINT_TABLE_USAGE] UCTU
+ ON FK.UNIQUE_CONSTRAINT_CATALOG = UCTU.CONSTRAINT_CATALOG and
+ FK.UNIQUE_CONSTRAINT_NAME = UCTU.CONSTRAINT_NAME and
+ FK.UNIQUE_CONSTRAINT_SCHEMA = UCTU.CONSTRAINT_SCHEMA
+ JOIN [INFORMATION_SCHEMA].[CONSTRAINT_TABLE_USAGE] FKTU
+ ON FK.CONSTRAINT_CATALOG = FKTU.CONSTRAINT_CATALOG and
+ FK.CONSTRAINT_NAME = FKTU.CONSTRAINT_NAME and
+ FK.CONSTRAINT_SCHEMA = FKTU.CONSTRAINT_SCHEMA
+ WHERE FK.CONSTRAINT_CATALOG = %s
+ AND UCTU.TABLE_SCHEMA = %s -- REFD_TABLE_SCHEMA
+ AND UCTU.TABLE_NAME = %s -- REFD_TABLE_NAME
+ """
+ db_name = self._get_setting('name')
+ schema_name = self._get_schema_name()
+ return self.execute(sql, [db_name, schema_name, table_name])
+
+ @invalidate_table_constraints
+ def delete_table(self, table_name, cascade=True):
+ """
+ Deletes the table 'table_name'.
+ """
+ if cascade:
+ refing = self._find_referencing_fks(table_name)
+ for schmea, table, constraint in refing:
+ table = ".".join(map (self.quote_name, [schmea, table]))
+ params = dict(table_name = table,
+ constraint_name = self.quote_name(constraint))
+ sql = self.drop_constraint_string % params
+ self.execute(sql, [])
+ cascade = False
+ super(DatabaseOperations, self).delete_table(table_name, cascade)
+
+ @copy_column_constraints
+ @delete_column_constraints
+ def rename_column(self, table_name, old, new):
+ """
+ Renames the column of 'table_name' from 'old' to 'new'.
+ WARNING - This isn't transactional on MSSQL!
+ """
+ if old == new:
+ # No Operation
+ return
+ # Examples on the MS site show the table name not being quoted...
+ params = (table_name, self.quote_name(old), self.quote_name(new))
+ self.execute("EXEC sp_rename '%s.%s', %s, 'COLUMN'" % params)
+
+ @invalidate_table_constraints
+ def rename_table(self, old_table_name, table_name):
+ """
+ Renames the table 'old_table_name' to 'table_name'.
+ WARNING - This isn't transactional on MSSQL!
+ """
+ if old_table_name == table_name:
+ # No Operation
+ return
+ params = (self.quote_name(old_table_name), self.quote_name(table_name))
+ self.execute('EXEC sp_rename %s, %s' % params)
+
+ def _db_type_for_alter_column(self, field):
+ return self._db_positive_type_for_alter_column(DatabaseOperations, field)
+
+ def _alter_add_column_mods(self, field, name, params, sqls):
+ return self._alter_add_positive_check(DatabaseOperations, field, name, params, sqls)
+
+ @invalidate_table_constraints
+ def delete_foreign_key(self, table_name, column):
+ super(DatabaseOperations, self).delete_foreign_key(table_name, column)
+ # A FK also implies a non-unique index
+ find_index_sql = """
+ SELECT i.name -- s.name, t.name, c.name
+ FROM sys.tables t
+ INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
+ INNER JOIN sys.indexes i ON i.object_id = t.object_id
+ INNER JOIN sys.index_columns ic ON ic.object_id = t.object_id
+ AND ic.index_id = i.index_id
+ INNER JOIN sys.columns c ON c.object_id = t.object_id
+ AND ic.column_id = c.column_id
+ WHERE i.is_unique=0 AND i.is_primary_key=0 AND i.is_unique_constraint=0
+ AND s.name = %s
+ AND t.name = %s
+ AND c.name = %s
+ """
+ schema = self._get_schema_name()
+ indexes = self.execute(find_index_sql, [schema, table_name, column])
+ qn = self.quote_name
+ for index in (i[0] for i in indexes if i[0]): # "if i[0]" added because an empty name may return
+ self.execute("DROP INDEX %s on %s.%s" % (qn(index), qn(schema), qn(table_name) ))
+
diff --git a/lib/python2.7/site-packages/south/db/sqlite3.py b/lib/python2.7/site-packages/south/db/sqlite3.py
new file mode 100644
index 0000000..c4014d3
--- /dev/null
+++ b/lib/python2.7/site-packages/south/db/sqlite3.py
@@ -0,0 +1,272 @@
+from south.db import generic
+
+
+class DatabaseOperations(generic.DatabaseOperations):
+
+ """
+ SQLite3 implementation of database operations.
+ """
+
+ backend_name = "sqlite3"
+
+ # SQLite ignores several constraints. I wish I could.
+ supports_foreign_keys = False
+ has_check_constraints = False
+ has_booleans = False
+
+ def add_column(self, table_name, name, field, *args, **kwds):
+ """
+ Adds a column.
+ """
+ # If it's not nullable, and has no default, raise an error (SQLite is picky)
+ if (not field.null and
+ (not field.has_default() or field.get_default() is None) and
+ not field.empty_strings_allowed):
+ raise ValueError("You cannot add a null=False column without a default value.")
+ # Initialise the field.
+ field.set_attributes_from_name(name)
+ # We add columns by remaking the table; even though SQLite supports
+ # adding columns, it doesn't support adding PRIMARY KEY or UNIQUE cols.
+ # We define fields with no default; a default will be used, though, to fill up the remade table
+ field_default = None
+ if not getattr(field, '_suppress_default', False):
+ default = field.get_default()
+ if default is not None:
+ field_default = "'%s'" % field.get_db_prep_save(default, connection=self._get_connection())
+ field._suppress_default = True
+ self._remake_table(table_name, added={
+ field.column: (self._column_sql_for_create(table_name, name, field, False), field_default)
+ })
+
+ def _get_full_table_description(self, connection, cursor, table_name):
+ cursor.execute('PRAGMA table_info(%s)' % connection.ops.quote_name(table_name))
+ # cid, name, type, notnull, dflt_value, pk
+ return [{'name': field[1],
+ 'type': field[2],
+ 'null_ok': not field[3],
+ 'dflt_value': field[4],
+ 'pk': field[5] # undocumented
+ } for field in cursor.fetchall()]
+
+ @generic.invalidate_table_constraints
+ def _remake_table(self, table_name, added={}, renames={}, deleted=[], altered={}, primary_key_override=None, uniques_deleted=[]):
+ """
+ Given a table and three sets of changes (renames, deletes, alters),
+ recreates it with the modified schema.
+ """
+ # Dry runs get skipped completely
+ if self.dry_run:
+ return
+ # Temporary table's name
+ temp_name = "_south_new_" + table_name
+ # Work out the (possibly new) definitions of each column
+ definitions = {}
+ cursor = self._get_connection().cursor()
+ # Get the index descriptions
+ indexes = self._get_connection().introspection.get_indexes(cursor, table_name)
+ standalone_indexes = self._get_standalone_indexes(table_name)
+ # Work out new column defs.
+ for column_info in self._get_full_table_description(self._get_connection(), cursor, table_name):
+ name = column_info['name']
+ if name in deleted:
+ continue
+ # Get the type, ignoring PRIMARY KEY (we need to be consistent)
+ type = column_info['type'].replace("PRIMARY KEY", "")
+ # Add on primary key, not null or unique if needed.
+ if (primary_key_override and primary_key_override == name) or \
+ (not primary_key_override and name in indexes and
+ indexes[name]['primary_key']):
+ type += " PRIMARY KEY"
+ elif not column_info['null_ok']:
+ type += " NOT NULL"
+ if (name in indexes and indexes[name]['unique'] and
+ name not in uniques_deleted):
+ type += " UNIQUE"
+ if column_info['dflt_value'] is not None:
+ type += " DEFAULT " + column_info['dflt_value']
+ # Deal with a rename
+ if name in renames:
+ name = renames[name]
+ # Add to the defs
+ definitions[name] = type
+ # Add on altered columns
+ for name, type in altered.items():
+ if (primary_key_override and primary_key_override == name) or \
+ (not primary_key_override and name in indexes and
+ indexes[name]['primary_key']):
+ type += " PRIMARY KEY"
+ if (name in indexes and indexes[name]['unique'] and
+ name not in uniques_deleted):
+ type += " UNIQUE"
+ definitions[name] = type
+ # Add on the new columns
+ for name, (type,_) in added.items():
+ if (primary_key_override and primary_key_override == name):
+ type += " PRIMARY KEY"
+ definitions[name] = type
+ # Alright, Make the table
+ self.execute("CREATE TABLE %s (%s)" % (
+ self.quote_name(temp_name),
+ ", ".join(["%s %s" % (self.quote_name(cname), ctype) for cname, ctype in definitions.items()]),
+ ))
+ # Copy over the data
+ self._copy_data(table_name, temp_name, renames, added)
+ # Delete the old table, move our new one over it
+ self.delete_table(table_name)
+ self.rename_table(temp_name, table_name)
+ # Recreate multi-valued indexes
+ # We can't do that before since it's impossible to rename indexes
+ # and index name scope is global
+ self._make_standalone_indexes(table_name, standalone_indexes, renames=renames, deleted=deleted, uniques_deleted=uniques_deleted)
+ self.deferred_sql = [] # prevent double indexing
+
+ def _copy_data(self, src, dst, field_renames={}, added={}):
+ "Used to copy data into a new table"
+ # Make a list of all the fields to select
+ cursor = self._get_connection().cursor()
+ src_fields = [column_info[0] for column_info in self._get_connection().introspection.get_table_description(cursor, src)]
+ dst_fields = [column_info[0] for column_info in self._get_connection().introspection.get_table_description(cursor, dst)]
+ src_fields_new = []
+ dst_fields_new = []
+ for field in src_fields:
+ if field in field_renames:
+ dst_fields_new.append(self.quote_name(field_renames[field]))
+ elif field in dst_fields:
+ dst_fields_new.append(self.quote_name(field))
+ else:
+ continue
+ src_fields_new.append(self.quote_name(field))
+ for field, (_,default) in added.items():
+ if default is not None:
+ field = self.quote_name(field)
+ src_fields_new.append("%s as %s" % (default, field))
+ dst_fields_new.append(field)
+ # Copy over the data
+ self.execute("INSERT INTO %s (%s) SELECT %s FROM %s;" % (
+ self.quote_name(dst),
+ ', '.join(dst_fields_new),
+ ', '.join(src_fields_new),
+ self.quote_name(src),
+ ))
+
+ def _create_unique(self, table_name, columns):
+ self._create_index(table_name, columns, True)
+
+ def _create_index(self, table_name, columns, unique=False, index_name=None):
+ if index_name is None:
+ index_name = '%s_%s' % (table_name, '__'.join(columns))
+ self.execute("CREATE %sINDEX %s ON %s(%s);" % (
+ unique and "UNIQUE " or "",
+ self.quote_name(index_name),
+ self.quote_name(table_name),
+ ', '.join(self.quote_name(c) for c in columns),
+ ))
+
+ def _get_standalone_indexes(self, table_name):
+ indexes = []
+ cursor = self._get_connection().cursor()
+ cursor.execute('PRAGMA index_list(%s)' % self.quote_name(table_name))
+ # seq, name, unique
+ for index, unique in [(field[1], field[2]) for field in cursor.fetchall()]:
+ cursor.execute('PRAGMA index_info(%s)' % self.quote_name(index))
+ info = cursor.fetchall()
+ if len(info) == 1 and unique:
+ # This index is already specified in the CREATE TABLE columns
+ # specification
+ continue
+ columns = []
+ for field in info:
+ columns.append(field[2])
+ indexes.append((index, columns, unique))
+ return indexes
+
+ def _make_standalone_indexes(self, table_name, indexes, deleted=[], renames={}, uniques_deleted=[]):
+ for index_name, index, unique in indexes:
+ columns = []
+
+ for name in index:
+ # Handle deletion
+ if name in deleted:
+ columns = []
+ break
+
+ # Handle renames
+ if name in renames:
+ name = renames[name]
+ columns.append(name)
+
+ if columns and (set(columns) != set(uniques_deleted) or not unique):
+ self._create_index(table_name, columns, unique, index_name)
+
+ def _column_sql_for_create(self, table_name, name, field, explicit_name=True):
+ "Given a field and its name, returns the full type for the CREATE TABLE (without unique/pk)"
+ field.set_attributes_from_name(name)
+ if not explicit_name:
+ name = field.db_column
+ else:
+ field.column = name
+ sql = self.column_sql(table_name, name, field, with_name=False, field_prepared=True)
+ # Remove keywords we don't want (this should be type only, not constraint)
+ if sql:
+ sql = sql.replace("PRIMARY KEY", "")
+ return sql
+
+ def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False):
+ """
+ Changes a column's SQL definition.
+
+ Note that this sqlite3 implementation ignores the ignore_constraints argument.
+ The argument is accepted for API compatibility with the generic
+ DatabaseOperations.alter_column() method.
+ """
+ # Change nulls to default if needed
+ if not field.null and field.has_default():
+ params = {
+ "column": self.quote_name(name),
+ "table_name": self.quote_name(table_name)
+ }
+ self._update_nulls_to_default(params, field)
+ # Remake the table correctly
+ field._suppress_default = True
+ self._remake_table(table_name, altered={
+ name: self._column_sql_for_create(table_name, name, field, explicit_name),
+ })
+
+ def delete_column(self, table_name, column_name):
+ """
+ Deletes a column.
+ """
+ self._remake_table(table_name, deleted=[column_name])
+
+ def rename_column(self, table_name, old, new):
+ """
+ Renames a column from one name to another.
+ """
+ self._remake_table(table_name, renames={old: new})
+
+ def create_unique(self, table_name, columns):
+ """
+ Create an unique index on columns
+ """
+ self._create_unique(table_name, columns)
+
+ def delete_unique(self, table_name, columns):
+ """
+ Delete an unique index
+ """
+ self._remake_table(table_name, uniques_deleted=columns)
+
+ def create_primary_key(self, table_name, columns):
+ if not isinstance(columns, (list, tuple)):
+ columns = [columns]
+ assert len(columns) == 1, "SQLite backend does not support multi-column primary keys"
+ self._remake_table(table_name, primary_key_override=columns[0])
+
+ # Not implemented this yet.
+ def delete_primary_key(self, table_name):
+ # By passing True in, we make sure we wipe all existing PKs.
+ self._remake_table(table_name, primary_key_override=True)
+
+ # No cascades on deletes
+ def delete_table(self, table_name, cascade=True):
+ generic.DatabaseOperations.delete_table(self, table_name, False)
diff --git a/lib/python2.7/site-packages/south/exceptions.py b/lib/python2.7/site-packages/south/exceptions.py
new file mode 100644
index 0000000..f2e772f
--- /dev/null
+++ b/lib/python2.7/site-packages/south/exceptions.py
@@ -0,0 +1,160 @@
+from __future__ import print_function
+
+from traceback import format_exception, format_exc
+
+class SouthError(RuntimeError):
+ pass
+
+class SouthWarning(RuntimeWarning):
+ pass
+
+class BrokenMigration(SouthError):
+ def __init__(self, migration, exc_info):
+ self.migration = migration
+ self.exc_info = exc_info
+ if self.exc_info:
+ self.traceback = ''.join(format_exception(*self.exc_info))
+ else:
+ try:
+ self.traceback = format_exc()
+ except AttributeError: # Python3 when there is no previous exception
+ self.traceback = None
+
+ def __str__(self):
+ return ("While loading migration '%(migration)s':\n"
+ '%(traceback)s' % self.__dict__)
+
+
+class UnknownMigration(BrokenMigration):
+ def __str__(self):
+ if not hasattr(self, "traceback"):
+ self.traceback = ""
+ return ("Migration '%(migration)s' probably doesn't exist.\n"
+ '%(traceback)s' % self.__dict__)
+
+
+class InvalidMigrationModule(SouthError):
+ def __init__(self, application, module):
+ self.application = application
+ self.module = module
+
+ def __str__(self):
+ return ('The migration module specified for %(application)s, %(module)r, is invalid; the parent module does not exist.' % self.__dict__)
+
+
+class NoMigrations(SouthError):
+ def __init__(self, application):
+ self.application = application
+
+ def __str__(self):
+ return "Application '%(application)s' has no migrations." % self.__dict__
+
+
+class MultiplePrefixMatches(SouthError):
+ def __init__(self, prefix, matches):
+ self.prefix = prefix
+ self.matches = matches
+
+ def __str__(self):
+ self.matches_list = "\n ".join([str(m) for m in self.matches])
+ return ("Prefix '%(prefix)s' matches more than one migration:\n"
+ " %(matches_list)s") % self.__dict__
+
+
+class GhostMigrations(SouthError):
+ def __init__(self, ghosts):
+ self.ghosts = ghosts
+
+ def __str__(self):
+ self.ghosts_list = "\n ".join([str(m) for m in self.ghosts])
+ return ("\n\n ! These migrations are in the database but not on disk:\n"
+ " %(ghosts_list)s\n"
+ " ! I'm not trusting myself; either fix this yourself by fiddling\n"
+ " ! with the south_migrationhistory table, or pass --delete-ghost-migrations\n"
+ " ! to South to have it delete ALL of these records (this may not be good).") % self.__dict__
+
+
+class CircularDependency(SouthError):
+ def __init__(self, trace):
+ self.trace = trace
+
+ def __str__(self):
+ trace = " -> ".join([str(s) for s in self.trace])
+ return ("Found circular dependency:\n"
+ " %s") % trace
+
+
+class InconsistentMigrationHistory(SouthError):
+ def __init__(self, problems):
+ self.problems = problems
+
+ def __str__(self):
+ return ('Inconsistent migration history\n'
+ 'The following options are available:\n'
+ ' --merge: will just attempt the migration ignoring any potential dependency conflicts.')
+
+
+class DependsOnHigherMigration(SouthError):
+ def __init__(self, migration, depends_on):
+ self.migration = migration
+ self.depends_on = depends_on
+
+ def __str__(self):
+ return "Lower migration '%(migration)s' depends on a higher migration '%(depends_on)s' in the same app." % self.__dict__
+
+
+class DependsOnUnknownMigration(SouthError):
+ def __init__(self, migration, depends_on):
+ self.migration = migration
+ self.depends_on = depends_on
+
+ def __str__(self):
+ print("Migration '%(migration)s' depends on unknown migration '%(depends_on)s'." % self.__dict__)
+
+
+class DependsOnUnmigratedApplication(SouthError):
+ def __init__(self, migration, application):
+ self.migration = migration
+ self.application = application
+
+ def __str__(self):
+ return "Migration '%(migration)s' depends on unmigrated application '%(application)s'." % self.__dict__
+
+
+class FailedDryRun(SouthError):
+ def __init__(self, migration, exc_info):
+ self.migration = migration
+ self.name = migration.name()
+ self.exc_info = exc_info
+ self.traceback = ''.join(format_exception(*self.exc_info))
+
+ def __str__(self):
+ return (" ! Error found during dry run of '%(name)s'! Aborting.\n"
+ "%(traceback)s") % self.__dict__
+
+
+class ORMBaseNotIncluded(SouthError):
+ """Raised when a frozen model has something in _ormbases which isn't frozen."""
+ pass
+
+
+class UnfreezeMeLater(Exception):
+ """An exception, which tells the ORM unfreezer to postpone this model."""
+ pass
+
+
+class ImpossibleORMUnfreeze(SouthError):
+ """Raised if the ORM can't manage to unfreeze all the models in a linear fashion."""
+ pass
+
+class ConstraintDropped(SouthWarning):
+ def __init__(self, constraint, table, column=None):
+ self.table = table
+ if column:
+ self.column = ".%s" % column
+ else:
+ self.column = ""
+ self.constraint = constraint
+
+ def __str__(self):
+ return "Constraint %(constraint)s was dropped from %(table)s%(column)s -- was this intended?" % self.__dict__
diff --git a/lib/python2.7/site-packages/south/hacks/__init__.py b/lib/python2.7/site-packages/south/hacks/__init__.py
new file mode 100644
index 0000000..8f28503
--- /dev/null
+++ b/lib/python2.7/site-packages/south/hacks/__init__.py
@@ -0,0 +1,10 @@
+"""
+The hacks module encapsulates all the horrible things that play with Django
+internals in one, evil place.
+This top file will automagically expose the correct Hacks class.
+"""
+
+# Currently, these work for 1.0 and 1.1.
+from south.hacks.django_1_0 import Hacks
+
+hacks = Hacks() \ No newline at end of file
diff --git a/lib/python2.7/site-packages/south/hacks/django_1_0.py b/lib/python2.7/site-packages/south/hacks/django_1_0.py
new file mode 100644
index 0000000..e4a60c6
--- /dev/null
+++ b/lib/python2.7/site-packages/south/hacks/django_1_0.py
@@ -0,0 +1,110 @@
+"""
+Hacks for the Django 1.0/1.0.2 releases.
+"""
+
+import django
+from django.conf import settings
+from django.db.backends.creation import BaseDatabaseCreation
+from django.db.models.loading import cache
+from django.core import management
+from django.core.management.commands.flush import Command as FlushCommand
+from django.utils.datastructures import SortedDict
+
+from south.utils.py3 import string_types
+
+class SkipFlushCommand(FlushCommand):
+ def handle_noargs(self, **options):
+ # no-op to avoid calling flush
+ return
+
+class Hacks:
+
+ def set_installed_apps(self, apps):
+ """
+ Sets Django's INSTALLED_APPS setting to be effectively the list passed in.
+ """
+
+ # Make sure it's a list.
+ apps = list(apps)
+
+ # Make sure it contains strings
+ if apps:
+ assert isinstance(apps[0], string_types), "The argument to set_installed_apps must be a list of strings."
+
+ # Monkeypatch in!
+ settings.INSTALLED_APPS, settings.OLD_INSTALLED_APPS = (
+ apps,
+ settings.INSTALLED_APPS,
+ )
+ self._redo_app_cache()
+
+
+ def reset_installed_apps(self):
+ """
+ Undoes the effect of set_installed_apps.
+ """
+ settings.INSTALLED_APPS = settings.OLD_INSTALLED_APPS
+ self._redo_app_cache()
+
+
+ def _redo_app_cache(self):
+ """
+ Used to repopulate AppCache after fiddling with INSTALLED_APPS.
+ """
+ cache.loaded = False
+ cache.handled = set() if django.VERSION >= (1, 6) else {}
+ cache.postponed = []
+ cache.app_store = SortedDict()
+ cache.app_models = SortedDict()
+ cache.app_errors = {}
+ cache._populate()
+
+
+ def clear_app_cache(self):
+ """
+ Clears the contents of AppCache to a blank state, so new models
+ from the ORM can be added.
+ """
+ self.old_app_models, cache.app_models = cache.app_models, {}
+
+
+ def unclear_app_cache(self):
+ """
+ Reversed the effects of clear_app_cache.
+ """
+ cache.app_models = self.old_app_models
+ cache._get_models_cache = {}
+
+
+ def repopulate_app_cache(self):
+ """
+ Rebuilds AppCache with the real model definitions.
+ """
+ cache._populate()
+
+ def store_app_cache_state(self):
+ self.stored_app_cache_state = dict(**cache.__dict__)
+
+ def restore_app_cache_state(self):
+ cache.__dict__ = self.stored_app_cache_state
+
+ def patch_flush_during_test_db_creation(self):
+ """
+ Patches BaseDatabaseCreation.create_test_db to not flush database
+ """
+
+ def patch(f):
+ def wrapper(*args, **kwargs):
+ # hold onto the original and replace flush command with a no-op
+ original_flush_command = management._commands['flush']
+ try:
+ management._commands['flush'] = SkipFlushCommand()
+ # run create_test_db
+ return f(*args, **kwargs)
+ finally:
+ # unpatch flush back to the original
+ management._commands['flush'] = original_flush_command
+ return wrapper
+
+ BaseDatabaseCreation.create_test_db = patch(BaseDatabaseCreation.create_test_db)
+
diff --git a/lib/python2.7/site-packages/south/introspection_plugins/__init__.py b/lib/python2.7/site-packages/south/introspection_plugins/__init__.py
new file mode 100644
index 0000000..38262b5
--- /dev/null
+++ b/lib/python2.7/site-packages/south/introspection_plugins/__init__.py
@@ -0,0 +1,11 @@
+# This module contains built-in introspector plugins for various common
+# Django apps.
+
+# These imports trigger the lower-down files
+import south.introspection_plugins.geodjango
+import south.introspection_plugins.django_audit_log
+import south.introspection_plugins.django_tagging
+import south.introspection_plugins.django_taggit
+import south.introspection_plugins.django_objectpermissions
+import south.introspection_plugins.annoying_autoonetoone
+
diff --git a/lib/python2.7/site-packages/south/introspection_plugins/annoying_autoonetoone.py b/lib/python2.7/site-packages/south/introspection_plugins/annoying_autoonetoone.py
new file mode 100644
index 0000000..d61304f
--- /dev/null
+++ b/lib/python2.7/site-packages/south/introspection_plugins/annoying_autoonetoone.py
@@ -0,0 +1,11 @@
+from django.conf import settings
+from south.modelsinspector import add_introspection_rules
+
+if 'annoying' in settings.INSTALLED_APPS:
+ try:
+ from annoying.fields import AutoOneToOneField
+ except ImportError:
+ pass
+ else:
+ #django-annoying's AutoOneToOneField is essentially a OneToOneField.
+ add_introspection_rules([], ["^annoying\.fields\.AutoOneToOneField"])
diff --git a/lib/python2.7/site-packages/south/introspection_plugins/django_audit_log.py b/lib/python2.7/site-packages/south/introspection_plugins/django_audit_log.py
new file mode 100644
index 0000000..b874428
--- /dev/null
+++ b/lib/python2.7/site-packages/south/introspection_plugins/django_audit_log.py
@@ -0,0 +1,30 @@
+"""
+South introspection rules for django-audit-log
+"""
+
+from django.contrib.auth.models import User
+from django.conf import settings
+from south.modelsinspector import add_introspection_rules
+
+if "audit_log" in settings.INSTALLED_APPS:
+ try:
+ # Try and import the field so we can see if audit_log is available
+ from audit_log.models import fields
+
+ # Make sure the `to` and `null` parameters will be ignored
+ rules = [(
+ (fields.LastUserField,),
+ [],
+ {
+ 'to': ['rel.to', {'default': User}],
+ 'null': ['null', {'default': True}],
+ },
+ )]
+
+ # Add the rules for the `LastUserField`
+ add_introspection_rules(
+ rules,
+ ['^audit_log\.models\.fields\.LastUserField'],
+ )
+ except ImportError:
+ pass
diff --git a/lib/python2.7/site-packages/south/introspection_plugins/django_objectpermissions.py b/lib/python2.7/site-packages/south/introspection_plugins/django_objectpermissions.py
new file mode 100644
index 0000000..42b353b
--- /dev/null
+++ b/lib/python2.7/site-packages/south/introspection_plugins/django_objectpermissions.py
@@ -0,0 +1,16 @@
+"""
+South introspection rules for django-objectpermissions
+"""
+
+from django.conf import settings
+from south.modelsinspector import add_ignored_fields
+
+if 'objectpermissions' in settings.INSTALLED_APPS:
+ try:
+ from objectpermissions.models import UserPermissionRelation, GroupPermissionRelation
+ except ImportError:
+ pass
+ else:
+ add_ignored_fields(["^objectpermissions\.models\.UserPermissionRelation",
+ "^objectpermissions\.models\.GroupPermissionRelation"])
+
diff --git a/lib/python2.7/site-packages/south/introspection_plugins/django_tagging.py b/lib/python2.7/site-packages/south/introspection_plugins/django_tagging.py
new file mode 100644
index 0000000..c02e529
--- /dev/null
+++ b/lib/python2.7/site-packages/south/introspection_plugins/django_tagging.py
@@ -0,0 +1,24 @@
+from south.modelsinspector import add_introspection_rules
+from django.conf import settings
+
+if "tagging" in settings.INSTALLED_APPS:
+ try:
+ from tagging.fields import TagField
+ except ImportError:
+ pass
+ else:
+ rules = [
+ (
+ (TagField, ),
+ [],
+ {
+ "blank": ["blank", {"default": True}],
+ "max_length": ["max_length", {"default": 255}],
+ },
+ ),
+ ]
+ add_introspection_rules(rules, ["^tagging\.fields",])
+
+if "tagging_autocomplete" in settings.INSTALLED_APPS:
+ add_introspection_rules([], ["^tagging_autocomplete\.models\.TagAutocompleteField"])
+
diff --git a/lib/python2.7/site-packages/south/introspection_plugins/django_taggit.py b/lib/python2.7/site-packages/south/introspection_plugins/django_taggit.py
new file mode 100644
index 0000000..aded23f
--- /dev/null
+++ b/lib/python2.7/site-packages/south/introspection_plugins/django_taggit.py
@@ -0,0 +1,14 @@
+"""
+South introspection rules for django-taggit
+"""
+
+from django.conf import settings
+from south.modelsinspector import add_ignored_fields
+
+if 'taggit' in settings.INSTALLED_APPS:
+ try:
+ from taggit.managers import TaggableManager
+ except ImportError:
+ pass
+ else:
+ add_ignored_fields(["^taggit\.managers"])
diff --git a/lib/python2.7/site-packages/south/introspection_plugins/django_timezones.py b/lib/python2.7/site-packages/south/introspection_plugins/django_timezones.py
new file mode 100644
index 0000000..d4b573d
--- /dev/null
+++ b/lib/python2.7/site-packages/south/introspection_plugins/django_timezones.py
@@ -0,0 +1,21 @@
+from south.modelsinspector import add_introspection_rules
+from django.conf import settings
+
+if "timezones" in settings.INSTALLED_APPS:
+ try:
+ from timezones.fields import TimeZoneField
+ except ImportError:
+ pass
+ else:
+ rules = [
+ (
+ (TimeZoneField, ),
+ [],
+ {
+ "blank": ["blank", {"default": True}],
+ "max_length": ["max_length", {"default": 100}],
+ },
+ ),
+ ]
+ add_introspection_rules(rules, ["^timezones\.fields",])
+
diff --git a/lib/python2.7/site-packages/south/introspection_plugins/geodjango.py b/lib/python2.7/site-packages/south/introspection_plugins/geodjango.py
new file mode 100644
index 0000000..bece1c9
--- /dev/null
+++ b/lib/python2.7/site-packages/south/introspection_plugins/geodjango.py
@@ -0,0 +1,45 @@
+"""
+GeoDjango introspection rules
+"""
+
+import django
+from django.conf import settings
+
+from south.modelsinspector import add_introspection_rules
+
+has_gis = "django.contrib.gis" in settings.INSTALLED_APPS
+
+if has_gis:
+ # Alright,import the field
+ from django.contrib.gis.db.models.fields import GeometryField
+
+ # Make some introspection rules
+ if django.VERSION[0] == 1 and django.VERSION[1] >= 1:
+ # Django 1.1's gis module renamed these.
+ rules = [
+ (
+ (GeometryField, ),
+ [],
+ {
+ "srid": ["srid", {"default": 4326}],
+ "spatial_index": ["spatial_index", {"default": True}],
+ "dim": ["dim", {"default": 2}],
+ "geography": ["geography", {"default": False}],
+ },
+ ),
+ ]
+ else:
+ rules = [
+ (
+ (GeometryField, ),
+ [],
+ {
+ "srid": ["_srid", {"default": 4326}],
+ "spatial_index": ["_index", {"default": True}],
+ "dim": ["_dim", {"default": 2}],
+ },
+ ),
+ ]
+
+ # Install them
+ add_introspection_rules(rules, ["^django\.contrib\.gis"]) \ No newline at end of file
diff --git a/lib/python2.7/site-packages/south/logger.py b/lib/python2.7/site-packages/south/logger.py
new file mode 100644
index 0000000..2caae3a
--- /dev/null
+++ b/lib/python2.7/site-packages/south/logger.py
@@ -0,0 +1,38 @@
+import sys
+import logging
+from django.conf import settings
+
+# Create a dummy handler to use for now.
+class NullHandler(logging.Handler):
+ def emit(self, record):
+ pass
+
+def get_logger():
+ "Attach a file handler to the logger if there isn't one already."
+ debug_on = getattr(settings, "SOUTH_LOGGING_ON", False)
+ logging_file = getattr(settings, "SOUTH_LOGGING_FILE", False)
+
+ if debug_on:
+ if logging_file:
+ if len(_logger.handlers) < 2:
+ _logger.addHandler(logging.FileHandler(logging_file))
+ _logger.setLevel(logging.DEBUG)
+ else:
+ raise IOError("SOUTH_LOGGING_ON is True. You also need a SOUTH_LOGGING_FILE setting.")
+
+ return _logger
+
+def close_logger():
+ "Closes the logger handler for the file, so we can remove the file after a test."
+ for handler in _logger.handlers:
+ _logger.removeHandler(handler)
+ if isinstance(handler, logging.FileHandler):
+ handler.close()
+
+def init_logger():
+ "Initialize the south logger"
+ logger = logging.getLogger("south")
+ logger.addHandler(NullHandler())
+ return logger
+
+_logger = init_logger()
diff --git a/lib/python2.7/site-packages/south/management/__init__.py b/lib/python2.7/site-packages/south/management/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/management/__init__.py
diff --git a/lib/python2.7/site-packages/south/management/commands/__init__.py b/lib/python2.7/site-packages/south/management/commands/__init__.py
new file mode 100644
index 0000000..da218eb
--- /dev/null
+++ b/lib/python2.7/site-packages/south/management/commands/__init__.py
@@ -0,0 +1,40 @@
+
+# Common framework for syncdb actions
+
+import copy
+
+from django.core import management
+from django.conf import settings
+
+# Make sure the template loader cache is fixed _now_ (#448)
+import django.template.loaders.app_directories
+
+from south.hacks import hacks
+from south.management.commands.syncdb import Command as SyncCommand
+
+class MigrateAndSyncCommand(SyncCommand):
+ """Used for situations where "syncdb" is called by test frameworks."""
+
+ option_list = copy.deepcopy(SyncCommand.option_list)
+
+ for opt in option_list:
+ if "--migrate" == opt.get_opt_string():
+ opt.default = True
+ break
+
+def patch_for_test_db_setup():
+ # Load the commands cache
+ management.get_commands()
+ # Repoint to the correct version of syncdb
+ if hasattr(settings, "SOUTH_TESTS_MIGRATE") and not settings.SOUTH_TESTS_MIGRATE:
+ # point at the core syncdb command when creating tests
+ # tests should always be up to date with the most recent model structure
+ management._commands['syncdb'] = 'django.core'
+ else:
+ management._commands['syncdb'] = MigrateAndSyncCommand()
+ # Avoid flushing data migrations.
+ # http://code.djangoproject.com/ticket/14661 introduced change that flushed custom
+ # sql during the test database creation (thus flushing the data migrations).
+ # we patch flush to be no-op during create_test_db, but still allow flushing
+ # after each test for non-transactional backends.
+ hacks.patch_flush_during_test_db_creation()
diff --git a/lib/python2.7/site-packages/south/management/commands/convert_to_south.py b/lib/python2.7/site-packages/south/management/commands/convert_to_south.py
new file mode 100644
index 0000000..658ed48
--- /dev/null
+++ b/lib/python2.7/site-packages/south/management/commands/convert_to_south.py
@@ -0,0 +1,95 @@
+"""
+Quick conversion command module.
+"""
+
+from __future__ import print_function
+
+from optparse import make_option
+import sys
+
+from django.core.management.base import BaseCommand
+from django.core.management.color import no_style
+from django.conf import settings
+from django.db import models
+from django.core import management
+from django.core.exceptions import ImproperlyConfigured
+
+from south.migration import Migrations
+from south.hacks import hacks
+from south.exceptions import NoMigrations
+
+class Command(BaseCommand):
+
+ option_list = BaseCommand.option_list
+ if '--verbosity' not in [opt.get_opt_string() for opt in BaseCommand.option_list]:
+ option_list += (
+ make_option('--verbosity', action='store', dest='verbosity', default='1',
+ type='choice', choices=['0', '1', '2'],
+ help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
+ )
+ option_list += (
+ make_option('--delete-ghost-migrations', action='store_true', dest='delete_ghosts', default=False,
+ help="Tells South to delete any 'ghost' migrations (ones in the database but not on disk)."),
+ make_option('--ignore-ghost-migrations', action='store_true', dest='ignore_ghosts', default=False,
+ help="Tells South to ignore any 'ghost' migrations (ones in the database but not on disk) and continue to apply new migrations."),
+ )
+
+ help = "Quickly converts the named application to use South if it is currently using syncdb."
+
+ def handle(self, app=None, *args, **options):
+
+ # Make sure we have an app
+ if not app:
+ print("Please specify an app to convert.")
+ return
+
+ # See if the app exists
+ app = app.split(".")[-1]
+ try:
+ app_module = models.get_app(app)
+ except ImproperlyConfigured:
+ print("There is no enabled application matching '%s'." % app)
+ return
+
+ # Try to get its list of models
+ model_list = models.get_models(app_module)
+ if not model_list:
+ print("This application has no models; this command is for applications that already have models syncdb'd.")
+ print("Make some models, and then use ./manage.py schemamigration %s --initial instead." % app)
+ return
+
+ # Ask South if it thinks it's already got migrations
+ try:
+ Migrations(app)
+ except NoMigrations:
+ pass
+ else:
+ print("This application is already managed by South.")
+ return
+
+ # Finally! It seems we've got a candidate, so do the two-command trick
+ verbosity = int(options.get('verbosity', 0))
+ management.call_command("schemamigration", app, initial=True, verbosity=verbosity)
+
+ # Now, we need to re-clean and sanitise appcache
+ hacks.clear_app_cache()
+ hacks.repopulate_app_cache()
+
+ # And also clear our cached Migration classes
+ Migrations._clear_cache()
+
+ # Now, migrate
+ management.call_command(
+ "migrate",
+ app,
+ "0001",
+ fake=True,
+ verbosity=verbosity,
+ ignore_ghosts=options.get("ignore_ghosts", False),
+ delete_ghosts=options.get("delete_ghosts", False),
+ )
+
+ print()
+ print("App '%s' converted. Note that South assumed the application's models matched the database" % app)
+ print("(i.e. you haven't changed it since last syncdb); if you have, you should delete the %s/migrations" % app)
+ print("directory, revert models.py so it matches the database, and try again.")
diff --git a/lib/python2.7/site-packages/south/management/commands/datamigration.py b/lib/python2.7/site-packages/south/management/commands/datamigration.py
new file mode 100644
index 0000000..c3175e7
--- /dev/null
+++ b/lib/python2.7/site-packages/south/management/commands/datamigration.py
@@ -0,0 +1,139 @@
+"""
+Data migration creation command
+"""
+
+from __future__ import print_function
+
+import sys
+import os
+import re
+from optparse import make_option
+
+try:
+ set
+except NameError:
+ from sets import Set as set
+
+from django.core.management.base import BaseCommand
+from django.core.management.color import no_style
+from django.db import models
+from django.conf import settings
+
+from south.migration import Migrations
+from south.exceptions import NoMigrations
+from south.creator import freezer
+
+class Command(BaseCommand):
+ option_list = BaseCommand.option_list + (
+ make_option('--freeze', action='append', dest='freeze_list', type='string',
+ help='Freeze the specified app(s). Provide an app name with each; use the option multiple times for multiple apps'),
+ make_option('--stdout', action='store_true', dest='stdout', default=False,
+ help='Print the migration to stdout instead of writing it to a file.'),
+ )
+ help = "Creates a new template data migration for the given app"
+ usage_str = "Usage: ./manage.py datamigration appname migrationname [--stdout] [--freeze appname]"
+
+ def handle(self, app=None, name="", freeze_list=None, stdout=False, verbosity=1, **options):
+
+ verbosity = int(verbosity)
+
+ # Any supposed lists that are None become empty lists
+ freeze_list = freeze_list or []
+
+ # --stdout means name = -
+ if stdout:
+ name = "-"
+
+ # Only allow valid names
+ if re.search('[^_\w]', name) and name != "-":
+ self.error("Migration names should contain only alphanumeric characters and underscores.")
+
+ # If not name, there's an error
+ if not name:
+ self.error("You must provide a name for this migration.\n" + self.usage_str)
+
+ if not app:
+ self.error("You must provide an app to create a migration for.\n" + self.usage_str)
+
+ # Ensure that verbosity is not a string (Python 3)
+ try:
+ verbosity = int(verbosity)
+ except ValueError:
+ self.error("Verbosity must be an number.\n" + self.usage_str)
+
+ # Get the Migrations for this app (creating the migrations dir if needed)
+ migrations = Migrations(app, force_creation=True, verbose_creation=verbosity > 0)
+
+ # See what filename is next in line. We assume they use numbers.
+ new_filename = migrations.next_filename(name)
+
+ # Work out which apps to freeze
+ apps_to_freeze = self.calc_frozen_apps(migrations, freeze_list)
+
+ # So, what's in this file, then?
+ file_contents = self.get_migration_template() % {
+ "frozen_models": freezer.freeze_apps_to_string(apps_to_freeze),
+ "complete_apps": apps_to_freeze and "complete_apps = [%s]" % (", ".join(map(repr, apps_to_freeze))) or ""
+ }
+
+ # - is a special name which means 'print to stdout'
+ if name == "-":
+ print(file_contents)
+ # Write the migration file if the name isn't -
+ else:
+ fp = open(os.path.join(migrations.migrations_dir(), new_filename), "w")
+ fp.write(file_contents)
+ fp.close()
+ print("Created %s." % new_filename, file=sys.stderr)
+
+ def calc_frozen_apps(self, migrations, freeze_list):
+ """
+ Works out, from the current app, settings, and the command line options,
+ which apps should be frozen.
+ """
+ apps_to_freeze = []
+ for to_freeze in freeze_list:
+ if "." in to_freeze:
+ self.error("You cannot freeze %r; you must provide an app label, like 'auth' or 'books'." % to_freeze)
+ # Make sure it's a real app
+ if not models.get_app(to_freeze):
+ self.error("You cannot freeze %r; it's not an installed app." % to_freeze)
+ # OK, it's fine
+ apps_to_freeze.append(to_freeze)
+ if getattr(settings, 'SOUTH_AUTO_FREEZE_APP', True):
+ apps_to_freeze.append(migrations.app_label())
+ return apps_to_freeze
+
+ def error(self, message, code=1):
+ """
+ Prints the error, and exits with the given code.
+ """
+ print(message, file=sys.stderr)
+ sys.exit(code)
+
+ def get_migration_template(self):
+ return MIGRATION_TEMPLATE
+
+
+MIGRATION_TEMPLATE = """# -*- coding: utf-8 -*-
+from south.utils import datetime_utils as datetime
+from south.db import db
+from south.v2 import DataMigration
+from django.db import models
+
+class Migration(DataMigration):
+
+ def forwards(self, orm):
+ "Write your forwards methods here."
+ # Note: Don't use "from appname.models import ModelName".
+ # Use orm.ModelName to refer to models in this application,
+ # and orm['appname.ModelName'] for models in other applications.
+
+ def backwards(self, orm):
+ "Write your backwards methods here."
+
+ models = %(frozen_models)s
+
+ %(complete_apps)s
+ symmetrical = True
+"""
diff --git a/lib/python2.7/site-packages/south/management/commands/graphmigrations.py b/lib/python2.7/site-packages/south/management/commands/graphmigrations.py
new file mode 100644
index 0000000..6ff1e47
--- /dev/null
+++ b/lib/python2.7/site-packages/south/management/commands/graphmigrations.py
@@ -0,0 +1,63 @@
+"""
+Outputs a graphviz dot file of the dependencies.
+"""
+
+from __future__ import print_function
+
+from optparse import make_option
+import re
+import textwrap
+
+from django.core.management.base import BaseCommand
+from django.core.management.color import no_style
+
+from south.migration import Migrations, all_migrations
+
+class Command(BaseCommand):
+
+ help = "Outputs a GraphViz dot file of all migration dependencies to stdout."
+
+ def handle(self, **options):
+
+ # Resolve dependencies
+ Migrations.calculate_dependencies()
+
+ colors = [ 'crimson', 'darkgreen', 'darkgoldenrod', 'navy',
+ 'brown', 'darkorange', 'aquamarine' , 'blueviolet' ]
+ color_index = 0
+ wrapper = textwrap.TextWrapper(width=40)
+
+ print("digraph G {")
+
+ # Group each app in a subgraph
+ for migrations in all_migrations():
+ print(" subgraph %s {" % migrations.app_label())
+ print(" node [color=%s];" % colors[color_index])
+ for migration in migrations:
+ # Munge the label - text wrap and change _ to spaces
+ label = "%s - %s" % (
+ migration.app_label(), migration.name())
+ label = re.sub(r"_+", " ", label)
+ label= "\\n".join(wrapper.wrap(label))
+ print(' "%s.%s" [label="%s"];' % (
+ migration.app_label(), migration.name(), label))
+ print(" }")
+ color_index = (color_index + 1) % len(colors)
+
+ # For every migration, print its links.
+ for migrations in all_migrations():
+ for migration in migrations:
+ for other in migration.dependencies:
+ # Added weight tends to keep migrations from the same app
+ # in vertical alignment
+ attrs = "[weight=2.0]"
+ # But the more interesting edges are those between apps
+ if other.app_label() != migration.app_label():
+ attrs = "[style=bold]"
+ print(' "%s.%s" -> "%s.%s" %s;' % (
+ other.app_label(), other.name(),
+ migration.app_label(), migration.name(),
+ attrs
+ ))
+
+ print("}");
diff --git a/lib/python2.7/site-packages/south/management/commands/migrate.py b/lib/python2.7/site-packages/south/management/commands/migrate.py
new file mode 100644
index 0000000..693dbb7
--- /dev/null
+++ b/lib/python2.7/site-packages/south/management/commands/migrate.py
@@ -0,0 +1,264 @@
+"""
+Migrate management command.
+"""
+
+from __future__ import print_function
+
+import os.path, re, sys
+from functools import reduce
+from optparse import make_option
+
+from django.core.management.base import BaseCommand
+from django.conf import settings
+from django.utils.importlib import import_module
+
+from south import migration
+from south.migration import Migrations
+from south.exceptions import NoMigrations
+from south.db import DEFAULT_DB_ALIAS
+
+class Command(BaseCommand):
+ option_list = BaseCommand.option_list + (
+ make_option('--all', action='store_true', dest='all_apps', default=False,
+ help='Run the specified migration for all apps.'),
+ make_option('--list', action='store_true', dest='show_list', default=False,
+ help='List migrations noting those that have been applied'),
+ make_option('--changes', action='store_true', dest='show_changes', default=False,
+ help='List changes for migrations'),
+ make_option('--skip', action='store_true', dest='skip', default=False,
+ help='Will skip over out-of-order missing migrations'),
+ make_option('--merge', action='store_true', dest='merge', default=False,
+ help='Will run out-of-order missing migrations as they are - no rollbacks.'),
+ make_option('--no-initial-data', action='store_true', dest='no_initial_data', default=False,
+ help='Skips loading initial data if specified.'),
+ make_option('--fake', action='store_true', dest='fake', default=False,
+ help="Pretends to do the migrations, but doesn't actually execute them."),
+ make_option('--db-dry-run', action='store_true', dest='db_dry_run', default=False,
+ help="Doesn't execute the SQL generated by the db methods, and doesn't store a record that the migration(s) occurred. Useful to test migrations before applying them."),
+ make_option('--delete-ghost-migrations', action='store_true', dest='delete_ghosts', default=False,
+ help="Tells South to delete any 'ghost' migrations (ones in the database but not on disk)."),
+ make_option('--ignore-ghost-migrations', action='store_true', dest='ignore_ghosts', default=False,
+ help="Tells South to ignore any 'ghost' migrations (ones in the database but not on disk) and continue to apply new migrations."),
+ make_option('--noinput', action='store_false', dest='interactive', default=True,
+ help='Tells Django to NOT prompt the user for input of any kind.'),
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database to synchronize. '
+ 'Defaults to the "default" database.'),
+ )
+ if '--verbosity' not in [opt.get_opt_string() for opt in BaseCommand.option_list]:
+ option_list += (
+ make_option('--verbosity', action='store', dest='verbosity', default='1',
+ type='choice', choices=['0', '1', '2'],
+ help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
+ )
+ help = "Runs migrations for all apps."
+ args = "[appname] [migrationname|zero] [--all] [--list] [--skip] [--merge] [--no-initial-data] [--fake] [--db-dry-run] [--database=dbalias]"
+
+ def handle(self, app=None, target=None, skip=False, merge=False, backwards=False, fake=False, db_dry_run=False, show_list=False, show_changes=False, database=DEFAULT_DB_ALIAS, delete_ghosts=False, ignore_ghosts=False, **options):
+
+ # NOTE: THIS IS DUPLICATED FROM django.core.management.commands.syncdb
+ # This code imports any module named 'management' in INSTALLED_APPS.
+ # The 'management' module is the preferred way of listening to post_syncdb
+ # signals, and since we're sending those out with create_table migrations,
+ # we need apps to behave correctly.
+ for app_name in settings.INSTALLED_APPS:
+ try:
+ import_module('.management', app_name)
+ except ImportError as exc:
+ msg = exc.args[0]
+ if not msg.startswith('No module named') or 'management' not in msg:
+ raise
+ # END DJANGO DUPE CODE
+
+ # if all_apps flag is set, shift app over to target
+ if options.get('all_apps', False):
+ target = app
+ app = None
+
+ # Migrate each app
+ if app:
+ try:
+ apps = [Migrations(app)]
+ except NoMigrations:
+ print("The app '%s' does not appear to use migrations." % app)
+ print("./manage.py migrate " + self.args)
+ return
+ else:
+ apps = list(migration.all_migrations())
+
+ # Do we need to show the list of migrations?
+ if show_list and apps:
+ list_migrations(apps, database, **options)
+
+ if show_changes and apps:
+ show_migration_changes(apps)
+
+ if not (show_list or show_changes):
+
+ for app in apps:
+ result = migration.migrate_app(
+ app,
+ target_name = target,
+ fake = fake,
+ db_dry_run = db_dry_run,
+ verbosity = int(options.get('verbosity', 0)),
+ interactive = options.get('interactive', True),
+ load_initial_data = not options.get('no_initial_data', False),
+ merge = merge,
+ skip = skip,
+ database = database,
+ delete_ghosts = delete_ghosts,
+ ignore_ghosts = ignore_ghosts,
+ )
+ if result is False:
+ sys.exit(1) # Migration failed, so the command fails.
+
+
+def list_migrations(apps, database = DEFAULT_DB_ALIAS, **options):
+ """
+ Prints a list of all available migrations, and which ones are currently applied.
+ Accepts a list of Migrations instances.
+ """
+ from south.models import MigrationHistory
+ applied_migrations = MigrationHistory.objects.filter(app_name__in=[app.app_label() for app in apps])
+ if database != DEFAULT_DB_ALIAS:
+ applied_migrations = applied_migrations.using(database)
+ applied_migrations_lookup = dict(('%s.%s' % (mi.app_name, mi.migration), mi) for mi in applied_migrations)
+
+ print()
+ for app in apps:
+ print(" " + app.app_label())
+ # Get the migrations object
+ for migration in app:
+ full_name = migration.app_label() + "." + migration.name()
+ if full_name in applied_migrations_lookup:
+ applied_migration = applied_migrations_lookup[full_name]
+ print(format_migration_list_item(migration.name(), applied=applied_migration.applied, **options))
+ else:
+ print(format_migration_list_item(migration.name(), applied=False, **options))
+ print()
+
+def show_migration_changes(apps):
+ """
+ Prints a list of all available migrations, and which ones are currently applied.
+ Accepts a list of Migrations instances.
+
+ Much simpler, less clear, and much less robust version:
+ grep "ing " migrations/*.py
+ """
+ for app in apps:
+ print(app.app_label())
+ # Get the migrations objects
+ migrations = [migration for migration in app]
+ # we use reduce to compare models in pairs, not to generate a value
+ reduce(diff_migrations, migrations)
+
+def format_migration_list_item(name, applied=True, **options):
+ if applied:
+ if int(options.get('verbosity')) >= 2:
+ return ' (*) %-80s (applied %s)' % (name, applied)
+ else:
+ return ' (*) %s' % name
+ else:
+ return ' ( ) %s' % name
+
+def diff_migrations(migration1, migration2):
+
+ def model_name(models, model):
+ return models[model].get('Meta', {}).get('object_name', model)
+
+ def field_name(models, model, field):
+ return '%s.%s' % (model_name(models, model), field)
+
+ print(" " + migration2.name())
+
+ models1 = migration1.migration_class().models
+ models2 = migration2.migration_class().models
+
+ # find new models
+ for model in models2.keys():
+ if not model in models1.keys():
+ print(' added model %s' % model_name(models2, model))
+
+ # find removed models
+ for model in models1.keys():
+ if not model in models2.keys():
+ print(' removed model %s' % model_name(models1, model))
+
+ # compare models
+ for model in models1:
+ if model in models2:
+
+ # find added fields
+ for field in models2[model]:
+ if not field in models1[model]:
+ print(' added field %s' % field_name(models2, model, field))
+
+ # find removed fields
+ for field in models1[model]:
+ if not field in models2[model]:
+ print(' removed field %s' % field_name(models1, model, field))
+
+ # compare fields
+ for field in models1[model]:
+ if field in models2[model]:
+
+ name = field_name(models1, model, field)
+
+ # compare field attributes
+ field_value1 = models1[model][field]
+ field_value2 = models2[model][field]
+
+ # if a field has become a class, or vice versa
+ if type(field_value1) != type(field_value2):
+ print(' type of %s changed from %s to %s' % (
+ name, field_value1, field_value2))
+
+ # if class
+ elif isinstance(field_value1, dict):
+ # print ' %s is a class' % name
+ pass
+
+ # else regular field
+ else:
+
+ type1, attr_list1, field_attrs1 = models1[model][field]
+ type2, attr_list2, field_attrs2 = models2[model][field]
+
+ if type1 != type2:
+ print(' %s type changed from %s to %s' % (
+ name, type1, type2))
+
+ if attr_list1 != []:
+ print(' %s list %s is not []' % (
+ name, attr_list1))
+ if attr_list2 != []:
+ print(' %s list %s is not []' % (
+ name, attr_list2))
+ if attr_list1 != attr_list2:
+ print(' %s list changed from %s to %s' % (
+ name, attr_list1, attr_list2))
+
+ # find added field attributes
+ for attr in field_attrs2:
+ if not attr in field_attrs1:
+ print(' added %s attribute %s=%s' % (
+ name, attr, field_attrs2[attr]))
+
+ # find removed field attributes
+ for attr in field_attrs1:
+ if not attr in field_attrs2:
+ print(' removed attribute %s(%s=%s)' % (
+ name, attr, field_attrs1[attr]))
+
+ # compare field attributes
+ for attr in field_attrs1:
+ if attr in field_attrs2:
+
+ value1 = field_attrs1[attr]
+ value2 = field_attrs2[attr]
+ if value1 != value2:
+ print(' %s attribute %s changed from %s to %s' % (
+ name, attr, value1, value2))
+
+ return migration2
diff --git a/lib/python2.7/site-packages/south/management/commands/migrationcheck.py b/lib/python2.7/site-packages/south/management/commands/migrationcheck.py
new file mode 100644
index 0000000..f498d0b
--- /dev/null
+++ b/lib/python2.7/site-packages/south/management/commands/migrationcheck.py
@@ -0,0 +1,67 @@
+from django.core.exceptions import ImproperlyConfigured
+from django.core.management import call_command, CommandError
+from django.core.management.base import BaseCommand
+from django.conf import settings
+from django.db.models import loading
+from django.test import simple
+
+from south.migration import Migrations
+from south.exceptions import NoMigrations
+from south.hacks import hacks
+
+class Command(BaseCommand):
+ help = "Runs migrations for each app in turn, detecting missing depends_on values."
+ usage_str = "Usage: ./manage.py migrationcheck"
+
+ def handle(self, check_app_name=None, **options):
+ runner = simple.DjangoTestSuiteRunner(verbosity=0)
+ err_msg = "Failed to migrate %s; see output for hints at missing dependencies:\n"
+ hacks.patch_flush_during_test_db_creation()
+ failures = 0
+ if check_app_name is None:
+ app_names = settings.INSTALLED_APPS
+ else:
+ app_names = [check_app_name]
+ for app_name in app_names:
+ app_label = app_name.split(".")[-1]
+ if app_name == 'south':
+ continue
+
+ try:
+ Migrations(app_name)
+ except (NoMigrations, ImproperlyConfigured):
+ continue
+ app = loading.get_app(app_label)
+
+ verbosity = int(options.get('verbosity', 1))
+ if verbosity >= 1:
+ self.stderr.write("processing %s\n" % app_name)
+
+ old_config = runner.setup_databases()
+ try:
+ call_command('migrate', app_label, noinput=True, verbosity=verbosity)
+ for model in loading.get_models(app):
+ dummy = model._default_manager.exists()
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except Exception as e:
+ failures += 1
+ if verbosity >= 1:
+ self.stderr.write(err_msg % app_name)
+ self.stderr.write("%s\n" % e)
+ finally:
+ runner.teardown_databases(old_config)
+ if failures > 0:
+ raise CommandError("Missing depends_on found in %s app(s)." % failures)
+ self.stderr.write("No missing depends_on found.\n")
+#
+#for each app:
+# start with blank db.
+# syncdb only south (and contrib?)
+#
+# migrate a single app all the way up. any errors is missing depends_on.
+# for all models of that app, try the default manager:
+# from django.db.models import loading
+# for m in loading.get_models(loading.get_app('a')):
+# m._default_manager.exists()
+# Any error is also a missing depends on.
diff --git a/lib/python2.7/site-packages/south/management/commands/schemamigration.py b/lib/python2.7/site-packages/south/management/commands/schemamigration.py
new file mode 100644
index 0000000..efd4266
--- /dev/null
+++ b/lib/python2.7/site-packages/south/management/commands/schemamigration.py
@@ -0,0 +1,229 @@
+"""
+Startmigration command, version 2.
+"""
+
+from __future__ import print_function
+
+import sys
+import os
+import re
+import string
+import random
+import inspect
+from optparse import make_option
+
+try:
+ set
+except NameError:
+ from sets import Set as set
+
+from django.core.management.base import BaseCommand
+from django.core.management.color import no_style
+from django.core.exceptions import ImproperlyConfigured
+from django.db import models
+from django.conf import settings
+
+from south.migration import Migrations, migrate_app
+from south.models import MigrationHistory
+from south.exceptions import NoMigrations
+from south.creator import changes, actions, freezer
+from south.management.commands.datamigration import Command as DataCommand
+
+class Command(DataCommand):
+ option_list = DataCommand.option_list + (
+ make_option('--add-model', action='append', dest='added_model_list', type='string',
+ help='Generate a Create Table migration for the specified model. Add multiple models to this migration with subsequent --add-model parameters.'),
+ make_option('--add-field', action='append', dest='added_field_list', type='string',
+ help='Generate an Add Column migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'),
+ make_option('--add-index', action='append', dest='added_index_list', type='string',
+ help='Generate an Add Index migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'),
+ make_option('--initial', action='store_true', dest='initial', default=False,
+ help='Generate the initial schema for the app.'),
+ make_option('--auto', action='store_true', dest='auto', default=False,
+ help='Attempt to automatically detect differences from the last migration.'),
+ make_option('--empty', action='store_true', dest='empty', default=False,
+ help='Make a blank migration.'),
+ make_option('--update', action='store_true', dest='update', default=False,
+ help='Update the most recent migration instead of creating a new one. Rollback this migration if it is already applied.'),
+ )
+ help = "Creates a new template schema migration for the given app"
+ usage_str = "Usage: ./manage.py schemamigration appname migrationname [--empty] [--initial] [--auto] [--add-model ModelName] [--add-field ModelName.field_name] [--stdout]"
+
+ def handle(self, app=None, name="", added_model_list=None, added_field_list=None, freeze_list=None, initial=False, auto=False, stdout=False, added_index_list=None, verbosity=1, empty=False, update=False, **options):
+
+ # Any supposed lists that are None become empty lists
+ added_model_list = added_model_list or []
+ added_field_list = added_field_list or []
+ added_index_list = added_index_list or []
+ freeze_list = freeze_list or []
+
+ # --stdout means name = -
+ if stdout:
+ name = "-"
+
+ # Only allow valid names
+ if re.search('[^_\w]', name) and name != "-":
+ self.error("Migration names should contain only alphanumeric characters and underscores.")
+
+ # Make sure options are compatable
+ if initial and (added_model_list or added_field_list or auto):
+ self.error("You cannot use --initial and other options together\n" + self.usage_str)
+
+ if auto and (added_model_list or added_field_list or initial):
+ self.error("You cannot use --auto and other options together\n" + self.usage_str)
+
+ if not app:
+ self.error("You must provide an app to create a migration for.\n" + self.usage_str)
+
+ # See if the app exists
+ app = app.split(".")[-1]
+ try:
+ app_module = models.get_app(app)
+ except ImproperlyConfigured:
+ print("There is no enabled application matching '%s'." % app)
+ return
+
+ # Get the Migrations for this app (creating the migrations dir if needed)
+ migrations = Migrations(app, force_creation=True, verbose_creation=int(verbosity) > 0)
+
+ # What actions do we need to do?
+ if auto:
+ # Get the old migration
+ try:
+ last_migration = migrations[-2 if update else -1]
+ except IndexError:
+ self.error("You cannot use --auto on an app with no migrations. Try --initial.")
+ # Make sure it has stored models
+ if migrations.app_label() not in getattr(last_migration.migration_class(), "complete_apps", []):
+ self.error("You cannot use automatic detection, since the previous migration does not have this whole app frozen.\nEither make migrations using '--freeze %s' or set 'SOUTH_AUTO_FREEZE_APP = True' in your settings.py." % migrations.app_label())
+ # Alright, construct two model dicts to run the differ on.
+ old_defs = dict(
+ (k, v) for k, v in last_migration.migration_class().models.items()
+ if k.split(".")[0] == migrations.app_label()
+ )
+ new_defs = dict(
+ (k, v) for k, v in freezer.freeze_apps([migrations.app_label()]).items()
+ if k.split(".")[0] == migrations.app_label()
+ )
+ change_source = changes.AutoChanges(
+ migrations = migrations,
+ old_defs = old_defs,
+ old_orm = last_migration.orm(),
+ new_defs = new_defs,
+ )
+
+ elif initial:
+ # Do an initial migration
+ change_source = changes.InitialChanges(migrations)
+
+ else:
+ # Read the commands manually off of the arguments
+ if (added_model_list or added_field_list or added_index_list):
+ change_source = changes.ManualChanges(
+ migrations,
+ added_model_list,
+ added_field_list,
+ added_index_list,
+ )
+ elif empty:
+ change_source = None
+ else:
+ print("You have not passed any of --initial, --auto, --empty, --add-model, --add-field or --add-index.", file=sys.stderr)
+ sys.exit(1)
+
+ # Validate this so we can access the last migration without worrying
+ if update and not migrations:
+ self.error("You cannot use --update on an app with no migrations.")
+
+ # if not name, there's an error
+ if not name:
+ if change_source:
+ name = change_source.suggest_name()
+ if update:
+ name = re.sub(r'^\d{4}_', '', migrations[-1].name())
+ if not name:
+ self.error("You must provide a name for this migration\n" + self.usage_str)
+
+ # Get the actions, and then insert them into the actions lists
+ forwards_actions = []
+ backwards_actions = []
+ if change_source:
+ for action_name, params in change_source.get_changes():
+ # Run the correct Action class
+ try:
+ action_class = getattr(actions, action_name)
+ except AttributeError:
+ raise ValueError("Invalid action name from source: %s" % action_name)
+ else:
+ action = action_class(**params)
+ action.add_forwards(forwards_actions)
+ action.add_backwards(backwards_actions)
+ print(action.console_line(), file=sys.stderr)
+
+ # Nowt happen? That's not good for --auto.
+ if auto and not forwards_actions:
+ self.error("Nothing seems to have changed.")
+
+ # Work out which apps to freeze
+ apps_to_freeze = self.calc_frozen_apps(migrations, freeze_list)
+
+ # So, what's in this file, then?
+ file_contents = self.get_migration_template() % {
+ "forwards": "\n".join(forwards_actions or [" pass"]),
+ "backwards": "\n".join(backwards_actions or [" pass"]),
+ "frozen_models": freezer.freeze_apps_to_string(apps_to_freeze),
+ "complete_apps": apps_to_freeze and "complete_apps = [%s]" % (", ".join(map(repr, apps_to_freeze))) or ""
+ }
+
+ # Deal with update mode as late as possible, avoid a rollback as long
+ # as something else can go wrong.
+ if update:
+ last_migration = migrations[-1]
+ if MigrationHistory.objects.filter(applied__isnull=False, app_name=app, migration=last_migration.name()):
+ print("Migration to be updated, %s, is already applied, rolling it back now..." % last_migration.name(), file=sys.stderr)
+ migrate_app(migrations, 'current-1', verbosity=verbosity)
+ for ext in ('py', 'pyc'):
+ old_filename = "%s.%s" % (os.path.join(migrations.migrations_dir(), last_migration.filename), ext)
+ if os.path.isfile(old_filename):
+ os.unlink(old_filename)
+ migrations.remove(last_migration)
+
+ # See what filename is next in line. We assume they use numbers.
+ new_filename = migrations.next_filename(name)
+
+ # - is a special name which means 'print to stdout'
+ if name == "-":
+ print(file_contents)
+ # Write the migration file if the name isn't -
+ else:
+ fp = open(os.path.join(migrations.migrations_dir(), new_filename), "w")
+ fp.write(file_contents)
+ fp.close()
+ verb = 'Updated' if update else 'Created'
+ if empty:
+ print("%s %s. You must now edit this migration and add the code for each direction." % (verb, new_filename), file=sys.stderr)
+ else:
+ print("%s %s. You can now apply this migration with: ./manage.py migrate %s" % (verb, new_filename, app), file=sys.stderr)
+
+ def get_migration_template(self):
+ return MIGRATION_TEMPLATE
+
+
+MIGRATION_TEMPLATE = """# -*- coding: utf-8 -*-
+from south.utils import datetime_utils as datetime
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+
+class Migration(SchemaMigration):
+
+ def forwards(self, orm):
+%(forwards)s
+
+ def backwards(self, orm):
+%(backwards)s
+
+ models = %(frozen_models)s
+
+ %(complete_apps)s"""
diff --git a/lib/python2.7/site-packages/south/management/commands/startmigration.py b/lib/python2.7/site-packages/south/management/commands/startmigration.py
new file mode 100644
index 0000000..e4fcf45
--- /dev/null
+++ b/lib/python2.7/site-packages/south/management/commands/startmigration.py
@@ -0,0 +1,33 @@
+"""
+Now-obsolete startmigration command.
+"""
+
+from __future__ import print_function
+
+from optparse import make_option
+
+from django.core.management.base import BaseCommand
+from django.core.management.color import no_style
+
+class Command(BaseCommand):
+ option_list = BaseCommand.option_list + (
+ make_option('--model', action='append', dest='added_model_list', type='string',
+ help='Generate a Create Table migration for the specified model. Add multiple models to this migration with subsequent --add-model parameters.'),
+ make_option('--add-field', action='append', dest='added_field_list', type='string',
+ help='Generate an Add Column migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'),
+ make_option('--add-index', action='append', dest='added_index_list', type='string',
+ help='Generate an Add Index migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'),
+ make_option('--initial', action='store_true', dest='initial', default=False,
+ help='Generate the initial schema for the app.'),
+ make_option('--auto', action='store_true', dest='auto', default=False,
+ help='Attempt to automatically detect differences from the last migration.'),
+ make_option('--freeze', action='append', dest='freeze_list', type='string',
+ help='Freeze the specified model(s). Pass in either an app name (to freeze the whole app) or a single model, as appname.modelname.'),
+ make_option('--stdout', action='store_true', dest='stdout', default=False,
+ help='Print the migration to stdout instead of writing it to a file.'),
+ )
+ help = "Deprecated command"
+
+ def handle(self, app=None, name="", added_model_list=None, added_field_list=None, initial=False, freeze_list=None, auto=False, stdout=False, added_index_list=None, **options):
+
+ print("The 'startmigration' command is now deprecated; please use the new 'schemamigration' and 'datamigration' commands.")
diff --git a/lib/python2.7/site-packages/south/management/commands/syncdb.py b/lib/python2.7/site-packages/south/management/commands/syncdb.py
new file mode 100644
index 0000000..17fc22c
--- /dev/null
+++ b/lib/python2.7/site-packages/south/management/commands/syncdb.py
@@ -0,0 +1,115 @@
+"""
+Overridden syncdb command
+"""
+
+from __future__ import print_function
+
+import sys
+from optparse import make_option
+
+from django.core.management.base import NoArgsCommand, BaseCommand
+from django.core.management.color import no_style
+from django.utils.datastructures import SortedDict
+from django.core.management.commands import syncdb
+from django.conf import settings
+from django.db import models
+from django.db.models.loading import cache
+from django.core import management
+
+from south.db import dbs
+from south import migration
+from south.exceptions import NoMigrations
+
+def get_app_label(app):
+ return '.'.join( app.__name__.split('.')[0:-1] )
+
+class Command(NoArgsCommand):
+ option_list = syncdb.Command.option_list + (
+ make_option('--migrate', action='store_true', dest='migrate', default=False,
+ help='Tells South to also perform migrations after the sync. Default for during testing, and other internal calls.'),
+ make_option('--all', action='store_true', dest='migrate_all', default=False,
+ help='Makes syncdb work on all apps, even migrated ones. Be careful!'),
+ )
+ if '--verbosity' not in [opt.get_opt_string() for opt in syncdb.Command.option_list]:
+ option_list += (
+ make_option('--verbosity', action='store', dest='verbosity', default='1',
+ type='choice', choices=['0', '1', '2'],
+ help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
+ )
+ help = "Create the database tables for all apps in INSTALLED_APPS whose tables haven't already been created, except those which use migrations."
+
+ def handle_noargs(self, migrate_all=False, **options):
+
+ # Import the 'management' module within each installed app, to register
+ # dispatcher events.
+ # This is copied from Django, to fix bug #511.
+ try:
+ from django.utils.importlib import import_module
+ except ImportError:
+ pass # TODO: Remove, only for Django1.0
+ else:
+ for app_name in settings.INSTALLED_APPS:
+ try:
+ import_module('.management', app_name)
+ except ImportError as exc:
+ msg = exc.args[0]
+ if not msg.startswith('No module named') or 'management' not in msg:
+ raise
+
+ # Work out what uses migrations and so doesn't need syncing
+ apps_needing_sync = []
+ apps_migrated = []
+ for app in models.get_apps():
+ app_label = get_app_label(app)
+ if migrate_all:
+ apps_needing_sync.append(app_label)
+ else:
+ try:
+ migrations = migration.Migrations(app_label)
+ except NoMigrations:
+ # It needs syncing
+ apps_needing_sync.append(app_label)
+ else:
+ # This is a migrated app, leave it
+ apps_migrated.append(app_label)
+ verbosity = int(options.get('verbosity', 0))
+
+ # Run syncdb on only the ones needed
+ if verbosity:
+ print("Syncing...")
+
+ old_installed, settings.INSTALLED_APPS = settings.INSTALLED_APPS, apps_needing_sync
+ old_app_store, cache.app_store = cache.app_store, SortedDict([
+ (k, v) for (k, v) in cache.app_store.items()
+ if get_app_label(k) in apps_needing_sync
+ ])
+
+ # This will allow the setting of the MySQL storage engine, for example.
+ for db in dbs.values():
+ db.connection_init()
+
+ # OK, run the actual syncdb
+ syncdb.Command().execute(**options)
+
+ settings.INSTALLED_APPS = old_installed
+ cache.app_store = old_app_store
+
+ # Migrate if needed
+ if options.get('migrate', True):
+ if verbosity:
+ print("Migrating...")
+ # convert from store_true to store_false
+ options['no_initial_data'] = not options.get('load_initial_data', True)
+ management.call_command('migrate', **options)
+
+ # Be obvious about what we did
+ if verbosity:
+ print("\nSynced:\n > %s" % "\n > ".join(apps_needing_sync))
+
+ if options.get('migrate', True):
+ if verbosity:
+ print("\nMigrated:\n - %s" % "\n - ".join(apps_migrated))
+ else:
+ if verbosity:
+ print("\nNot synced (use migrations):\n - %s" % "\n - ".join(apps_migrated))
+ print("(use ./manage.py migrate to migrate these)")
diff --git a/lib/python2.7/site-packages/south/management/commands/test.py b/lib/python2.7/site-packages/south/management/commands/test.py
new file mode 100644
index 0000000..9901786
--- /dev/null
+++ b/lib/python2.7/site-packages/south/management/commands/test.py
@@ -0,0 +1,8 @@
+from django.core.management.commands import test
+
+from south.management.commands import patch_for_test_db_setup
+
+class Command(test.Command):
+ def handle(self, *args, **kwargs):
+ patch_for_test_db_setup()
+ super(Command, self).handle(*args, **kwargs)
diff --git a/lib/python2.7/site-packages/south/management/commands/testserver.py b/lib/python2.7/site-packages/south/management/commands/testserver.py
new file mode 100644
index 0000000..3c3c4b5
--- /dev/null
+++ b/lib/python2.7/site-packages/south/management/commands/testserver.py
@@ -0,0 +1,8 @@
+from django.core.management.commands import testserver
+
+from south.management.commands import patch_for_test_db_setup
+
+class Command(testserver.Command):
+ def handle(self, *args, **kwargs):
+ patch_for_test_db_setup()
+ super(Command, self).handle(*args, **kwargs)
diff --git a/lib/python2.7/site-packages/south/migration/__init__.py b/lib/python2.7/site-packages/south/migration/__init__.py
new file mode 100644
index 0000000..1d91ddf
--- /dev/null
+++ b/lib/python2.7/site-packages/south/migration/__init__.py
@@ -0,0 +1,235 @@
+"""
+Main migration logic.
+"""
+
+from __future__ import print_function
+
+import sys
+
+from django.core.exceptions import ImproperlyConfigured
+
+import south.db
+from south import exceptions
+from south.models import MigrationHistory
+from south.db import db, DEFAULT_DB_ALIAS
+from south.migration.migrators import (Backwards, Forwards,
+ DryRunMigrator, FakeMigrator,
+ LoadInitialDataMigrator)
+from south.migration.base import Migration, Migrations
+from south.migration.utils import SortedSet
+from south.migration.base import all_migrations
+from south.signals import pre_migrate, post_migrate
+
+
+def to_apply(forwards, done):
+ return [m for m in forwards if m not in done]
+
+def to_unapply(backwards, done):
+ return [m for m in backwards if m in done]
+
+def problems(pending, done):
+ last = None
+ if not pending:
+ raise StopIteration()
+ for migration in pending:
+ if migration in done:
+ last = migration
+ continue
+ if last and migration not in done:
+ yield last, migration
+
+def forwards_problems(pending, done, verbosity):
+ """
+ Takes the list of linearised pending migrations, and the set of done ones,
+ and returns the list of problems, if any.
+ """
+ return inner_problem_check(problems(reversed(pending), done), done, verbosity)
+
+def backwards_problems(pending, done, verbosity):
+ return inner_problem_check(problems(pending, done), done, verbosity)
+
+def inner_problem_check(problems, done, verbosity):
+ "Takes a set of possible problems and gets the actual issues out of it."
+ result = []
+ for last, migration in problems:
+ checked = set([])
+ # 'Last' is the last applied migration. Step back from it until we
+ # either find nothing wrong, or we find something.
+ to_check = list(last.dependencies)
+ while to_check:
+ checking = to_check.pop()
+ if checking in checked:
+ continue
+ checked.add(checking)
+
+ if checking not in done:
+ # That's bad. Error.
+ if verbosity:
+ print((" ! Migration %s should not have been applied "
+ "before %s but was." % (last, checking)))
+ result.append((last, checking))
+ else:
+ to_check.extend(checking.dependencies)
+ return result
+
+def check_migration_histories(histories, delete_ghosts=False, ignore_ghosts=False):
+ "Checks that there's no 'ghost' migrations in the database."
+ exists = SortedSet()
+ ghosts = []
+ for h in histories:
+ try:
+ m = h.get_migration()
+ m.migration()
+ except exceptions.UnknownMigration:
+ ghosts.append(h)
+ except ImproperlyConfigured:
+ pass # Ignore missing applications
+ else:
+ exists.add(m)
+ if ghosts:
+ # They may want us to delete ghosts.
+ if delete_ghosts:
+ for h in ghosts:
+ h.delete()
+ elif not ignore_ghosts:
+ raise exceptions.GhostMigrations(ghosts)
+ return exists
+
+def get_dependencies(target, migrations):
+ forwards = list
+ backwards = list
+ if target is None:
+ backwards = migrations[0].backwards_plan
+ else:
+ forwards = target.forwards_plan
+ # When migrating backwards we want to remove up to and
+ # including the next migration up in this app (not the next
+ # one, that includes other apps)
+ migration_before_here = target.next()
+ if migration_before_here:
+ backwards = migration_before_here.backwards_plan
+ return forwards, backwards
+
+def get_direction(target, applied, migrations, verbosity, interactive):
+ # Get the forwards and reverse dependencies for this target
+ forwards, backwards = get_dependencies(target, migrations)
+ # Is the whole forward branch applied?
+ problems = None
+ forwards = forwards()
+ workplan = to_apply(forwards, applied)
+ if not workplan:
+ # If they're all applied, we only know it's not backwards
+ direction = None
+ else:
+ # If the remaining migrations are strictly a right segment of
+ # the forwards trace, we just need to go forwards to our
+ # target (and check for badness)
+ problems = forwards_problems(forwards, applied, verbosity)
+ direction = Forwards(verbosity=verbosity, interactive=interactive)
+ if not problems:
+ # What about the whole backward trace then?
+ backwards = backwards()
+ missing_backwards = to_apply(backwards, applied)
+ if missing_backwards != backwards:
+ # If what's missing is a strict left segment of backwards (i.e.
+ # all the higher migrations) then we need to go backwards
+ workplan = to_unapply(backwards, applied)
+ problems = backwards_problems(backwards, applied, verbosity)
+ direction = Backwards(verbosity=verbosity, interactive=interactive)
+ return direction, problems, workplan
+
+def get_migrator(direction, db_dry_run, fake, load_initial_data):
+ if not direction:
+ return direction
+ if db_dry_run:
+ direction = DryRunMigrator(migrator=direction, ignore_fail=False)
+ elif fake:
+ direction = FakeMigrator(migrator=direction)
+ elif load_initial_data:
+ direction = LoadInitialDataMigrator(migrator=direction)
+ return direction
+
+def get_unapplied_migrations(migrations, applied_migrations):
+ applied_migration_names = ['%s.%s' % (mi.app_name,mi.migration) for mi in applied_migrations]
+
+ for migration in migrations:
+ is_applied = '%s.%s' % (migration.app_label(), migration.name()) in applied_migration_names
+ if not is_applied:
+ yield migration
+
+def migrate_app(migrations, target_name=None, merge=False, fake=False, db_dry_run=False, yes=False, verbosity=0, load_initial_data=False, skip=False, database=DEFAULT_DB_ALIAS, delete_ghosts=False, ignore_ghosts=False, interactive=False):
+ app_label = migrations.app_label()
+
+ verbosity = int(verbosity)
+ # Fire off the pre-migrate signal
+ pre_migrate.send(None, app=app_label, verbosity=verbosity, interactive=verbosity, db=database)
+
+ # If there aren't any, quit quizically
+ if not migrations:
+ print("? You have no migrations for the '%s' app. You might want some." % app_label)
+ return
+
+ # Load the entire dependency graph
+ Migrations.calculate_dependencies()
+
+ # Check there's no strange ones in the database
+ applied_all = MigrationHistory.objects.filter(applied__isnull=False).order_by('applied').using(database)
+ applied = applied_all.filter(app_name=app_label).using(database)
+ south.db.db = south.db.dbs[database]
+ Migrations.invalidate_all_modules()
+
+ south.db.db.debug = (verbosity > 1)
+
+ if target_name == 'current-1':
+ if applied.count() > 1:
+ previous_migration = applied[applied.count() - 2]
+ if verbosity:
+ print('previous_migration: %s (applied: %s)' % (previous_migration.migration, previous_migration.applied))
+ target_name = previous_migration.migration
+ else:
+ if verbosity:
+ print('previous_migration: zero')
+ target_name = 'zero'
+ elif target_name == 'current+1':
+ try:
+ first_unapplied_migration = get_unapplied_migrations(migrations, applied).next()
+ target_name = first_unapplied_migration.name()
+ except StopIteration:
+ target_name = None
+
+ applied_all = check_migration_histories(applied_all, delete_ghosts, ignore_ghosts)
+
+ # Guess the target_name
+ target = migrations.guess_migration(target_name)
+ if verbosity:
+ if target_name not in ('zero', None) and target.name() != target_name:
+ print(" - Soft matched migration %s to %s." % (target_name,
+ target.name()))
+ print("Running migrations for %s:" % app_label)
+
+ # Get the forwards and reverse dependencies for this target
+ direction, problems, workplan = get_direction(target, applied_all, migrations,
+ verbosity, interactive)
+ if problems and not (merge or skip):
+ raise exceptions.InconsistentMigrationHistory(problems)
+
+ # Perform the migration
+ migrator = get_migrator(direction, db_dry_run, fake, load_initial_data)
+ if migrator:
+ migrator.print_title(target)
+ success = migrator.migrate_many(target, workplan, database)
+ # Finally, fire off the post-migrate signal
+ if success:
+ post_migrate.send(None, app=app_label, verbosity=verbosity, interactive=verbosity, db=database)
+ else:
+ if verbosity:
+ # Say there's nothing.
+ print('- Nothing to migrate.')
+ # If we have initial data enabled, and we're at the most recent
+ # migration, do initial data.
+ # Note: We use a fake Forwards() migrator here. It's never used really.
+ if load_initial_data:
+ migrator = LoadInitialDataMigrator(migrator=Forwards(verbosity=verbosity))
+ migrator.load_initial_data(target, db=database)
+ # Send signal.
+ post_migrate.send(None, app=app_label, verbosity=verbosity, interactive=verbosity, db=database)
diff --git a/lib/python2.7/site-packages/south/migration/base.py b/lib/python2.7/site-packages/south/migration/base.py
new file mode 100644
index 0000000..8bd6a5a
--- /dev/null
+++ b/lib/python2.7/site-packages/south/migration/base.py
@@ -0,0 +1,440 @@
+from __future__ import print_function
+
+from collections import deque
+import datetime
+from imp import reload
+import os
+import re
+import sys
+
+from django.core.exceptions import ImproperlyConfigured
+from django.db import models
+from django.conf import settings
+from django.utils import importlib
+
+from south import exceptions
+from south.migration.utils import depends, dfs, flatten, get_app_label
+from south.orm import FakeORM
+from south.utils import memoize, ask_for_it_by_name, datetime_utils
+from south.migration.utils import app_label_to_app_module
+from south.utils.py3 import string_types, with_metaclass
+
+def all_migrations(applications=None):
+ """
+ Returns all Migrations for all `applications` that are migrated.
+ """
+ if applications is None:
+ applications = models.get_apps()
+ for model_module in applications:
+ # The app they've passed is the models module - go up one level
+ app_path = ".".join(model_module.__name__.split(".")[:-1])
+ app = ask_for_it_by_name(app_path)
+ try:
+ yield Migrations(app)
+ except exceptions.NoMigrations:
+ pass
+
+
+def application_to_app_label(application):
+ "Works out the app label from either the app label, the app name, or the module"
+ if isinstance(application, string_types):
+ app_label = application.split('.')[-1]
+ else:
+ app_label = application.__name__.split('.')[-1]
+ return app_label
+
+
+class MigrationsMetaclass(type):
+
+ """
+ Metaclass which ensures there is only one instance of a Migrations for
+ any given app.
+ """
+
+ def __init__(self, name, bases, dict):
+ super(MigrationsMetaclass, self).__init__(name, bases, dict)
+ self.instances = {}
+
+ def __call__(self, application, **kwds):
+
+ app_label = application_to_app_label(application)
+
+ # If we don't already have an instance, make one
+ if app_label not in self.instances:
+ self.instances[app_label] = super(MigrationsMetaclass, self).__call__(app_label_to_app_module(app_label), **kwds)
+
+ return self.instances[app_label]
+
+ def _clear_cache(self):
+ "Clears the cache of Migration objects."
+ self.instances = {}
+
+
+class Migrations(with_metaclass(MigrationsMetaclass, list)):
+ """
+ Holds a list of Migration objects for a particular app.
+ """
+
+ if getattr(settings, "SOUTH_USE_PYC", False):
+ MIGRATION_FILENAME = re.compile(r'(?!__init__)' # Don't match __init__.py
+ r'[0-9a-zA-Z_]*' # Don't match dotfiles, or names with dots/invalid chars in them
+ r'(\.pyc?)?$') # Match .py or .pyc files, or module dirs
+ else:
+ MIGRATION_FILENAME = re.compile(r'(?!__init__)' # Don't match __init__.py
+ r'[0-9a-zA-Z_]*' # Don't match dotfiles, or names with dots/invalid chars in them
+ r'(\.py)?$') # Match only .py files, or module dirs
+
+ def __init__(self, application, force_creation=False, verbose_creation=True):
+ "Constructor. Takes the module of the app, NOT its models (like get_app returns)"
+ self._cache = {}
+ self.set_application(application, force_creation, verbose_creation)
+
+ def create_migrations_directory(self, verbose=True):
+ "Given an application, ensures that the migrations directory is ready."
+ migrations_dir = self.migrations_dir()
+ # Make the directory if it's not already there
+ if not os.path.isdir(migrations_dir):
+ if verbose:
+ print("Creating migrations directory at '%s'..." % migrations_dir)
+ os.mkdir(migrations_dir)
+ # Same for __init__.py
+ init_path = os.path.join(migrations_dir, "__init__.py")
+ if not os.path.isfile(init_path):
+ # Touch the init py file
+ if verbose:
+ print("Creating __init__.py in '%s'..." % migrations_dir)
+ open(init_path, "w").close()
+
+ def migrations_dir(self):
+ """
+ Returns the full path of the migrations directory.
+ If it doesn't exist yet, returns where it would exist, based on the
+ app's migrations module (defaults to app.migrations)
+ """
+ module_path = self.migrations_module()
+ try:
+ module = importlib.import_module(module_path)
+ except ImportError:
+ # There's no migrations module made yet; guess!
+ try:
+ parent = importlib.import_module(".".join(module_path.split(".")[:-1]))
+ except ImportError:
+ # The parent doesn't even exist, that's an issue.
+ raise exceptions.InvalidMigrationModule(
+ application = self.application.__name__,
+ module = module_path,
+ )
+ else:
+ # Good guess.
+ return os.path.join(os.path.dirname(parent.__file__), module_path.split(".")[-1])
+ else:
+ # Get directory directly
+ return os.path.dirname(module.__file__)
+
+ def migrations_module(self):
+ "Returns the module name of the migrations module for this"
+ app_label = application_to_app_label(self.application)
+ if hasattr(settings, "SOUTH_MIGRATION_MODULES"):
+ if app_label in settings.SOUTH_MIGRATION_MODULES:
+ # There's an override.
+ return settings.SOUTH_MIGRATION_MODULES[app_label]
+ return self._application.__name__ + '.migrations'
+
+ def get_application(self):
+ return self._application
+
+ def set_application(self, application, force_creation=False, verbose_creation=True):
+ """
+ Called when the application for this Migrations is set.
+ Imports the migrations module object, and throws a paddy if it can't.
+ """
+ self._application = application
+ if not hasattr(application, 'migrations'):
+ try:
+ module = importlib.import_module(self.migrations_module())
+ self._migrations = application.migrations = module
+ except ImportError:
+ if force_creation:
+ self.create_migrations_directory(verbose_creation)
+ module = importlib.import_module(self.migrations_module())
+ self._migrations = application.migrations = module
+ else:
+ raise exceptions.NoMigrations(application)
+ self._load_migrations_module(application.migrations)
+
+ application = property(get_application, set_application)
+
+ def _load_migrations_module(self, module):
+ self._migrations = module
+ filenames = []
+ dirname = self.migrations_dir()
+ for f in os.listdir(dirname):
+ if self.MIGRATION_FILENAME.match(os.path.basename(f)):
+ full_path = os.path.join(dirname, f)
+ # If it's a .pyc file, only append if the .py isn't already around
+ if f.endswith(".pyc") and (os.path.isfile(full_path[:-1])):
+ continue
+ # If it's a module directory, only append if it contains __init__.py[c].
+ if os.path.isdir(full_path):
+ if not (os.path.isfile(os.path.join(full_path, "__init__.py")) or \
+ (getattr(settings, "SOUTH_USE_PYC", False) and \
+ os.path.isfile(os.path.join(full_path, "__init__.pyc")))):
+ continue
+ filenames.append(f)
+ filenames.sort()
+ self.extend(self.migration(f) for f in filenames)
+
+ def migration(self, filename):
+ name = Migration.strip_filename(filename)
+ if name not in self._cache:
+ self._cache[name] = Migration(self, name)
+ return self._cache[name]
+
+ def __getitem__(self, value):
+ if isinstance(value, string_types):
+ return self.migration(value)
+ return super(Migrations, self).__getitem__(value)
+
+ def _guess_migration(self, prefix):
+ prefix = Migration.strip_filename(prefix)
+ matches = [m for m in self if m.name().startswith(prefix)]
+ if len(matches) == 1:
+ return matches[0]
+ elif len(matches) > 1:
+ raise exceptions.MultiplePrefixMatches(prefix, matches)
+ else:
+ raise exceptions.UnknownMigration(prefix, None)
+
+ def guess_migration(self, target_name):
+ if target_name == 'zero' or not self:
+ return
+ elif target_name is None:
+ return self[-1]
+ else:
+ return self._guess_migration(prefix=target_name)
+
+ def app_label(self):
+ return self._application.__name__.split('.')[-1]
+
+ def full_name(self):
+ return self._migrations.__name__
+
+ @classmethod
+ def calculate_dependencies(cls, force=False):
+ "Goes through all the migrations, and works out the dependencies."
+ if getattr(cls, "_dependencies_done", False) and not force:
+ return
+ for migrations in all_migrations():
+ for migration in migrations:
+ migration.calculate_dependencies()
+ cls._dependencies_done = True
+
+ @staticmethod
+ def invalidate_all_modules():
+ "Goes through all the migrations, and invalidates all cached modules."
+ for migrations in all_migrations():
+ for migration in migrations:
+ migration.invalidate_module()
+
+ def next_filename(self, name):
+ "Returns the fully-formatted filename of what a new migration 'name' would be"
+ highest_number = 0
+ for migration in self:
+ try:
+ number = int(migration.name().split("_")[0])
+ highest_number = max(highest_number, number)
+ except ValueError:
+ pass
+ # Work out the new filename
+ return "%04i_%s.py" % (
+ highest_number + 1,
+ name,
+ )
+
+
+class Migration(object):
+
+ """
+ Class which represents a particular migration file on-disk.
+ """
+
+ def __init__(self, migrations, filename):
+ """
+ Returns the migration class implied by 'filename'.
+ """
+ self.migrations = migrations
+ self.filename = filename
+ self.dependencies = set()
+ self.dependents = set()
+
+ def __str__(self):
+ return self.app_label() + ':' + self.name()
+
+ def __repr__(self):
+ return '<Migration: %s>' % str(self)
+
+ def __eq__(self, other):
+ return self.app_label() == other.app_label() and self.name() == other.name()
+
+ def __hash__(self):
+ return hash(str(self))
+
+ def app_label(self):
+ return self.migrations.app_label()
+
+ @staticmethod
+ def strip_filename(filename):
+ return os.path.splitext(os.path.basename(filename))[0]
+
+ def name(self):
+ return self.strip_filename(os.path.basename(self.filename))
+
+ def full_name(self):
+ return self.migrations.full_name() + '.' + self.name()
+
+ def migration(self):
+ "Tries to load the actual migration module"
+ full_name = self.full_name()
+ try:
+ migration = sys.modules[full_name]
+ except KeyError:
+ try:
+ migration = __import__(full_name, {}, {}, ['Migration'])
+ except ImportError as e:
+ raise exceptions.UnknownMigration(self, sys.exc_info())
+ except Exception as e:
+ raise exceptions.BrokenMigration(self, sys.exc_info())
+ # Override some imports
+ migration._ = lambda x: x # Fake i18n
+ migration.datetime = datetime_utils
+ return migration
+ migration = memoize(migration)
+
+ def migration_class(self):
+ "Returns the Migration class from the module"
+ return self.migration().Migration
+
+ def migration_instance(self):
+ "Instantiates the migration_class"
+ return self.migration_class()()
+ migration_instance = memoize(migration_instance)
+
+ def previous(self):
+ "Returns the migration that comes before this one in the sequence."
+ index = self.migrations.index(self) - 1
+ if index < 0:
+ return None
+ return self.migrations[index]
+ previous = memoize(previous)
+
+ def next(self):
+ "Returns the migration that comes after this one in the sequence."
+ index = self.migrations.index(self) + 1
+ if index >= len(self.migrations):
+ return None
+ return self.migrations[index]
+ next = memoize(next)
+
+ def _get_dependency_objects(self, attrname):
+ """
+ Given the name of an attribute (depends_on or needed_by), either yields
+ a list of migration objects representing it, or errors out.
+ """
+ for app, name in getattr(self.migration_class(), attrname, []):
+ try:
+ migrations = Migrations(app)
+ except ImproperlyConfigured:
+ raise exceptions.DependsOnUnmigratedApplication(self, app)
+ migration = migrations.migration(name)
+ try:
+ migration.migration()
+ except exceptions.UnknownMigration:
+ raise exceptions.DependsOnUnknownMigration(self, migration)
+ if migration.is_before(self) == False:
+ raise exceptions.DependsOnHigherMigration(self, migration)
+ yield migration
+
+ def calculate_dependencies(self):
+ """
+ Loads dependency info for this migration, and stores it in itself
+ and any other relevant migrations.
+ """
+ # Normal deps first
+ for migration in self._get_dependency_objects("depends_on"):
+ self.dependencies.add(migration)
+ migration.dependents.add(self)
+ # And reverse deps
+ for migration in self._get_dependency_objects("needed_by"):
+ self.dependents.add(migration)
+ migration.dependencies.add(self)
+ # And implicit ordering deps
+ previous = self.previous()
+ if previous:
+ self.dependencies.add(previous)
+ previous.dependents.add(self)
+
+ def invalidate_module(self):
+ """
+ Removes the cached version of this migration's module import, so we
+ have to re-import it. Used when south.db.db changes.
+ """
+ reload(self.migration())
+ self.migration._invalidate()
+
+ def forwards(self):
+ return self.migration_instance().forwards
+
+ def backwards(self):
+ return self.migration_instance().backwards
+
+ def forwards_plan(self):
+ """
+ Returns a list of Migration objects to be applied, in order.
+
+ This list includes `self`, which will be applied last.
+ """
+ return depends(self, lambda x: x.dependencies)
+
+ def _backwards_plan(self):
+ return depends(self, lambda x: x.dependents)
+
+ def backwards_plan(self):
+ """
+ Returns a list of Migration objects to be unapplied, in order.
+
+ This list includes `self`, which will be unapplied last.
+ """
+ return list(self._backwards_plan())
+
+ def is_before(self, other):
+ if self.migrations == other.migrations:
+ if self.filename < other.filename:
+ return True
+ return False
+
+ def is_after(self, other):
+ if self.migrations == other.migrations:
+ if self.filename > other.filename:
+ return True
+ return False
+
+ def prev_orm(self):
+ if getattr(self.migration_class(), 'symmetrical', False):
+ return self.orm()
+ previous = self.previous()
+ if previous is None:
+ # First migration? The 'previous ORM' is empty.
+ return FakeORM(None, self.app_label())
+ return previous.orm()
+ prev_orm = memoize(prev_orm)
+
+ def orm(self):
+ return FakeORM(self.migration_class(), self.app_label())
+ orm = memoize(orm)
+
+ def no_dry_run(self):
+ migration_class = self.migration_class()
+ try:
+ return migration_class.no_dry_run
+ except AttributeError:
+ return False
diff --git a/lib/python2.7/site-packages/south/migration/migrators.py b/lib/python2.7/site-packages/south/migration/migrators.py
new file mode 100644
index 0000000..f405a15
--- /dev/null
+++ b/lib/python2.7/site-packages/south/migration/migrators.py
@@ -0,0 +1,379 @@
+from __future__ import print_function
+
+from copy import copy, deepcopy
+import datetime
+import inspect
+import sys
+import traceback
+
+from django.core.management import call_command
+from django.core.management.commands import loaddata
+from django.db import models
+from django import VERSION as DJANGO_VERSION
+
+import south.db
+from south import exceptions
+from south.db import DEFAULT_DB_ALIAS
+from south.models import MigrationHistory
+from south.signals import ran_migration
+from south.utils.py3 import StringIO
+
+
+class Migrator(object):
+ def __init__(self, verbosity=0, interactive=False):
+ self.verbosity = int(verbosity)
+ self.interactive = bool(interactive)
+
+ @staticmethod
+ def title(target):
+ raise NotImplementedError()
+
+ def print_title(self, target):
+ if self.verbosity:
+ print(self.title(target))
+
+ @staticmethod
+ def status(target):
+ raise NotImplementedError()
+
+ def print_status(self, migration):
+ status = self.status(migration)
+ if self.verbosity and status:
+ print(status)
+
+ @staticmethod
+ def orm(migration):
+ raise NotImplementedError()
+
+ def backwards(self, migration):
+ return self._wrap_direction(migration.backwards(), migration.prev_orm())
+
+ def direction(self, migration):
+ raise NotImplementedError()
+
+ @staticmethod
+ def _wrap_direction(direction, orm):
+ args = inspect.getargspec(direction)
+ if len(args[0]) == 1:
+ # Old migration, no ORM should be passed in
+ return direction
+ return (lambda: direction(orm))
+
+ @staticmethod
+ def record(migration, database):
+ raise NotImplementedError()
+
+ def run_migration_error(self, migration, extra_info=''):
+ return (
+ ' ! Error found during real run of migration! Aborting.\n'
+ '\n'
+ ' ! Since you have a database that does not support running\n'
+ ' ! schema-altering statements in transactions, we have had \n'
+ ' ! to leave it in an interim state between migrations.\n'
+ '%s\n'
+ ' ! The South developers regret this has happened, and would\n'
+ ' ! like to gently persuade you to consider a slightly\n'
+ ' ! easier-to-deal-with DBMS (one that supports DDL transactions)\n'
+ ' ! NOTE: The error which caused the migration to fail is further up.'
+ ) % extra_info
+
+ def run_migration(self, migration, database):
+ migration_function = self.direction(migration)
+ south.db.db.start_transaction()
+ try:
+ migration_function()
+ south.db.db.execute_deferred_sql()
+ if not isinstance(getattr(self, '_wrapper', self), DryRunMigrator):
+ # record us as having done this in the same transaction,
+ # since we're not in a dry run
+ self.record(migration, database)
+ except:
+ south.db.db.rollback_transaction()
+ if not south.db.db.has_ddl_transactions:
+ print(self.run_migration_error(migration))
+ print("Error in migration: %s" % migration)
+ raise
+ else:
+ try:
+ south.db.db.commit_transaction()
+ except:
+ print("Error during commit in migration: %s" % migration)
+ raise
+
+
+ def run(self, migration, database):
+ # Get the correct ORM.
+ south.db.db.current_orm = self.orm(migration)
+ # If we're not already in a dry run, and the database doesn't support
+ # running DDL inside a transaction, *cough*MySQL*cough* then do a dry
+ # run first.
+ if not isinstance(getattr(self, '_wrapper', self), DryRunMigrator):
+ if not south.db.db.has_ddl_transactions:
+ dry_run = DryRunMigrator(migrator=self, ignore_fail=False)
+ dry_run.run_migration(migration, database)
+ return self.run_migration(migration, database)
+
+
+ def send_ran_migration(self, migration, database):
+ ran_migration.send(None,
+ app=migration.app_label(),
+ migration=migration,
+ method=self.__class__.__name__.lower(),
+ verbosity=self.verbosity,
+ interactive=self.interactive,
+ db=database)
+
+ def migrate(self, migration, database):
+ """
+ Runs the specified migration forwards/backwards, in order.
+ """
+ app = migration.migrations._migrations
+ migration_name = migration.name()
+ self.print_status(migration)
+ result = self.run(migration, database)
+ self.send_ran_migration(migration, database)
+ return result
+
+ def migrate_many(self, target, migrations, database):
+ raise NotImplementedError()
+
+
+class MigratorWrapper(object):
+ def __init__(self, migrator, *args, **kwargs):
+ self._migrator = copy(migrator)
+ attributes = dict([(k, getattr(self, k))
+ for k in self.__class__.__dict__
+ if not k.startswith('__')])
+ self._migrator.__dict__.update(attributes)
+ self._migrator.__dict__['_wrapper'] = self
+
+ def __getattr__(self, name):
+ return getattr(self._migrator, name)
+
+
+class DryRunMigrator(MigratorWrapper):
+ def __init__(self, ignore_fail=True, *args, **kwargs):
+ super(DryRunMigrator, self).__init__(*args, **kwargs)
+ self._ignore_fail = ignore_fail
+
+ def _run_migration(self, migration):
+ if migration.no_dry_run():
+ if self.verbosity:
+ print(" - Migration '%s' is marked for no-dry-run." % migration)
+ return
+ south.db.db.dry_run = True
+ # preserve the constraint cache as it can be mutated by the dry run
+ constraint_cache = deepcopy(south.db.db._constraint_cache)
+ if self._ignore_fail:
+ south.db.db.debug, old_debug = False, south.db.db.debug
+ pending_creates = south.db.db.get_pending_creates()
+ south.db.db.start_transaction()
+ migration_function = self.direction(migration)
+ try:
+ try:
+ migration_function()
+ south.db.db.execute_deferred_sql()
+ except:
+ raise exceptions.FailedDryRun(migration, sys.exc_info())
+ finally:
+ south.db.db.rollback_transactions_dry_run()
+ if self._ignore_fail:
+ south.db.db.debug = old_debug
+ south.db.db.clear_run_data(pending_creates)
+ south.db.db.dry_run = False
+ # restore the preserved constraint cache from before dry run was
+ # executed
+ south.db.db._constraint_cache = constraint_cache
+
+ def run_migration(self, migration, database):
+ try:
+ self._run_migration(migration)
+ except exceptions.FailedDryRun:
+ if self._ignore_fail:
+ return False
+ raise
+
+ def send_ran_migration(self, *args, **kwargs):
+ pass
+
+
+class FakeMigrator(MigratorWrapper):
+ def run(self, migration, database):
+ # Don't actually run, just record as if ran
+ self.record(migration, database)
+ if self.verbosity:
+ print(' (faked)')
+
+ def send_ran_migration(self, *args, **kwargs):
+ pass
+
+
+class LoadInitialDataMigrator(MigratorWrapper):
+
+ def load_initial_data(self, target, db='default'):
+ if target is None or target != target.migrations[-1]:
+ return
+ # Load initial data, if we ended up at target
+ if self.verbosity:
+ print(" - Loading initial data for %s." % target.app_label())
+ if DJANGO_VERSION < (1, 6):
+ self.pre_1_6(target, db)
+ else:
+ self.post_1_6(target, db)
+
+ def pre_1_6(self, target, db):
+ # Override Django's get_apps call temporarily to only load from the
+ # current app
+ old_get_apps = models.get_apps
+ new_get_apps = lambda: [models.get_app(target.app_label())]
+ models.get_apps = new_get_apps
+ loaddata.get_apps = new_get_apps
+ try:
+ call_command('loaddata', 'initial_data', verbosity=self.verbosity, database=db)
+ finally:
+ models.get_apps = old_get_apps
+ loaddata.get_apps = old_get_apps
+
+ def post_1_6(self, target, db):
+ import django.db.models.loading
+ ## build a new 'AppCache' object with just the app we care about.
+ old_cache = django.db.models.loading.cache
+ new_cache = django.db.models.loading.AppCache()
+ new_cache.get_apps = lambda: [new_cache.get_app(target.app_label())]
+
+ ## monkeypatch
+ django.db.models.loading.cache = new_cache
+ try:
+ call_command('loaddata', 'initial_data', verbosity=self.verbosity, database=db)
+ finally:
+ ## unmonkeypatch
+ django.db.models.loading.cache = old_cache
+
+ def migrate_many(self, target, migrations, database):
+ migrator = self._migrator
+ result = migrator.__class__.migrate_many(migrator, target, migrations, database)
+ if result:
+ self.load_initial_data(target, db=database)
+ return True
+
+
+class Forwards(Migrator):
+ """
+ Runs the specified migration forwards, in order.
+ """
+ torun = 'forwards'
+
+ @staticmethod
+ def title(target):
+ if target is not None:
+ return " - Migrating forwards to %s." % target.name()
+ else:
+ assert False, "You cannot migrate forwards to zero."
+
+ @staticmethod
+ def status(migration):
+ return ' > %s' % migration
+
+ @staticmethod
+ def orm(migration):
+ return migration.orm()
+
+ def forwards(self, migration):
+ return self._wrap_direction(migration.forwards(), migration.orm())
+
+ direction = forwards
+
+ @staticmethod
+ def record(migration, database):
+ # Record us as having done this
+ record = MigrationHistory.for_migration(migration, database)
+ try:
+ from django.utils.timezone import now
+ record.applied = now()
+ except ImportError:
+ record.applied = datetime.datetime.utcnow()
+ if database != DEFAULT_DB_ALIAS:
+ record.save(using=database)
+ else:
+ # Django 1.1 and below always go down this branch.
+ record.save()
+
+ def format_backwards(self, migration):
+ if migration.no_dry_run():
+ return " (migration cannot be dry-run; cannot discover commands)"
+ old_debug, old_dry_run = south.db.db.debug, south.db.db.dry_run
+ south.db.db.debug = south.db.db.dry_run = True
+ stdout = sys.stdout
+ sys.stdout = StringIO()
+ try:
+ try:
+ self.backwards(migration)()
+ return sys.stdout.getvalue()
+ except:
+ raise
+ finally:
+ south.db.db.debug, south.db.db.dry_run = old_debug, old_dry_run
+ sys.stdout = stdout
+
+ def run_migration_error(self, migration, extra_info=''):
+ extra_info = ('\n'
+ '! You *might* be able to recover with:'
+ '%s'
+ '%s' %
+ (self.format_backwards(migration), extra_info))
+ return super(Forwards, self).run_migration_error(migration, extra_info)
+
+ def migrate_many(self, target, migrations, database):
+ try:
+ for migration in migrations:
+ result = self.migrate(migration, database)
+ if result is False: # The migrations errored, but nicely.
+ return False
+ finally:
+ # Call any pending post_syncdb signals
+ south.db.db.send_pending_create_signals(verbosity=self.verbosity,
+ interactive=self.interactive)
+ return True
+
+
+class Backwards(Migrator):
+ """
+ Runs the specified migration backwards, in order.
+ """
+ torun = 'backwards'
+
+ @staticmethod
+ def title(target):
+ if target is None:
+ return " - Migrating backwards to zero state."
+ else:
+ return " - Migrating backwards to just after %s." % target.name()
+
+ @staticmethod
+ def status(migration):
+ return ' < %s' % migration
+
+ @staticmethod
+ def orm(migration):
+ return migration.prev_orm()
+
+ direction = Migrator.backwards
+
+ @staticmethod
+ def record(migration, database):
+ # Record us as having not done this
+ record = MigrationHistory.for_migration(migration, database)
+ if record.id is not None:
+ if database != DEFAULT_DB_ALIAS:
+ record.delete(using=database)
+ else:
+ # Django 1.1 always goes down here
+ record.delete()
+
+ def migrate_many(self, target, migrations, database):
+ for migration in migrations:
+ self.migrate(migration, database)
+ return True
+
+
+
diff --git a/lib/python2.7/site-packages/south/migration/utils.py b/lib/python2.7/site-packages/south/migration/utils.py
new file mode 100644
index 0000000..68b9164
--- /dev/null
+++ b/lib/python2.7/site-packages/south/migration/utils.py
@@ -0,0 +1,94 @@
+import sys
+from collections import deque
+
+from django.utils.datastructures import SortedDict
+from django.db import models
+
+from south import exceptions
+
+
+class SortedSet(SortedDict):
+ def __init__(self, data=tuple()):
+ self.extend(data)
+
+ def __str__(self):
+ return "SortedSet(%s)" % list(self)
+
+ def add(self, value):
+ self[value] = True
+
+ def remove(self, value):
+ del self[value]
+
+ def extend(self, iterable):
+ [self.add(k) for k in iterable]
+
+
+def get_app_label(app):
+ """
+ Returns the _internal_ app label for the given app module.
+ i.e. for <module django.contrib.auth.models> will return 'auth'
+ """
+ return app.__name__.split('.')[-2]
+
+
+def app_label_to_app_module(app_label):
+ """
+ Given the app label, returns the module of the app itself (unlike models.get_app,
+ which returns the models module)
+ """
+ # Get the models module
+ app = models.get_app(app_label)
+ module_name = ".".join(app.__name__.split(".")[:-1])
+ try:
+ module = sys.modules[module_name]
+ except KeyError:
+ __import__(module_name, {}, {}, [''])
+ module = sys.modules[module_name]
+ return module
+
+
+def flatten(*stack):
+ stack = deque(stack)
+ while stack:
+ try:
+ x = next(stack[0])
+ except TypeError:
+ stack[0] = iter(stack[0])
+ x = next(stack[0])
+ except StopIteration:
+ stack.popleft()
+ continue
+ if hasattr(x, '__iter__') and not isinstance(x, str):
+ stack.appendleft(x)
+ else:
+ yield x
+
+dependency_cache = {}
+
+def _dfs(start, get_children, path):
+ if (start, get_children) in dependency_cache:
+ return dependency_cache[(start, get_children)]
+
+ results = []
+ if start in path:
+ raise exceptions.CircularDependency(path[path.index(start):] + [start])
+ path.append(start)
+ results.append(start)
+ children = sorted(get_children(start), key=lambda x: str(x))
+
+ # We need to apply all the migrations this one depends on
+ for n in children:
+ results = _dfs(n, get_children, path) + results
+
+ path.pop()
+
+ results = list(SortedSet(results))
+ dependency_cache[(start, get_children)] = results
+ return results
+
+def dfs(start, get_children):
+ return _dfs(start, get_children, [])
+
+def depends(start, get_children):
+ return dfs(start, get_children)
diff --git a/lib/python2.7/site-packages/south/models.py b/lib/python2.7/site-packages/south/models.py
new file mode 100644
index 0000000..8239d61
--- /dev/null
+++ b/lib/python2.7/site-packages/south/models.py
@@ -0,0 +1,37 @@
+from django.db import models
+from south.db import DEFAULT_DB_ALIAS
+
+class MigrationHistory(models.Model):
+ app_name = models.CharField(max_length=255)
+ migration = models.CharField(max_length=255)
+ applied = models.DateTimeField(blank=True)
+
+ @classmethod
+ def for_migration(cls, migration, database):
+ try:
+ # Switch on multi-db-ness
+ if database != DEFAULT_DB_ALIAS:
+ # Django 1.2
+ objects = cls.objects.using(database)
+ else:
+ # Django <= 1.1
+ objects = cls.objects
+ return objects.get(
+ app_name=migration.app_label(),
+ migration=migration.name(),
+ )
+ except cls.DoesNotExist:
+ return cls(
+ app_name=migration.app_label(),
+ migration=migration.name(),
+ )
+
+ def get_migrations(self):
+ from south.migration.base import Migrations
+ return Migrations(self.app_name)
+
+ def get_migration(self):
+ return self.get_migrations().migration(self.migration)
+
+ def __str__(self):
+ return "<%s: %s>" % (self.app_name, self.migration)
diff --git a/lib/python2.7/site-packages/south/modelsinspector.py b/lib/python2.7/site-packages/south/modelsinspector.py
new file mode 100644
index 0000000..13e493d
--- /dev/null
+++ b/lib/python2.7/site-packages/south/modelsinspector.py
@@ -0,0 +1,464 @@
+"""
+Like the old south.modelsparser, but using introspection where possible
+rather than direct inspection of models.py.
+"""
+
+from __future__ import print_function
+
+import datetime
+import re
+import decimal
+
+from south.utils import get_attribute, auto_through
+from south.utils.py3 import text_type
+
+from django.db import models
+from django.db.models.base import ModelBase, Model
+from django.db.models.fields import NOT_PROVIDED
+from django.conf import settings
+from django.utils.functional import Promise
+from django.contrib.contenttypes import generic
+from django.utils.datastructures import SortedDict
+from django.utils import datetime_safe
+
+NOISY = False
+
+try:
+ from django.utils import timezone
+except ImportError:
+ timezone = False
+
+
+# Define any converter functions first to prevent NameErrors
+
+def convert_on_delete_handler(value):
+ django_db_models_module = 'models' # relative to standard import 'django.db'
+ if hasattr(models, "PROTECT"):
+ if value in (models.CASCADE, models.PROTECT, models.DO_NOTHING, models.SET_DEFAULT):
+ # straightforward functions
+ return '%s.%s' % (django_db_models_module, value.__name__)
+ else:
+ # This is totally dependent on the implementation of django.db.models.deletion.SET
+ func_name = getattr(value, '__name__', None)
+ if func_name == 'set_on_delete':
+ # we must inspect the function closure to see what parameters were passed in
+ closure_contents = value.__closure__[0].cell_contents
+ if closure_contents is None:
+ return "%s.SET_NULL" % (django_db_models_module)
+ # simple function we can perhaps cope with:
+ elif hasattr(closure_contents, '__call__'):
+ raise ValueError("South does not support on_delete with SET(function) as values.")
+ else:
+ # Attempt to serialise the value
+ return "%s.SET(%s)" % (django_db_models_module, value_clean(closure_contents))
+ raise ValueError("%s was not recognized as a valid model deletion handler. Possible values: %s." % (value, ', '.join(f.__name__ for f in (models.CASCADE, models.PROTECT, models.SET, models.SET_NULL, models.SET_DEFAULT, models.DO_NOTHING))))
+ else:
+ raise ValueError("on_delete argument encountered in Django version that does not support it")
+
+# Gives information about how to introspect certain fields.
+# This is a list of triples; the first item is a list of fields it applies to,
+# (note that isinstance is used, so superclasses are perfectly valid here)
+# the second is a list of positional argument descriptors, and the third
+# is a list of keyword argument descriptors.
+# Descriptors are of the form:
+# [attrname, options]
+# Where attrname is the attribute on the field to get the value from, and options
+# is an optional dict.
+#
+# The introspector uses the combination of all matching entries, in order.
+
+introspection_details = [
+ (
+ (models.Field, ),
+ [],
+ {
+ "null": ["null", {"default": False}],
+ "blank": ["blank", {"default": False, "ignore_if":"primary_key"}],
+ "primary_key": ["primary_key", {"default": False}],
+ "max_length": ["max_length", {"default": None}],
+ "unique": ["_unique", {"default": False}],
+ "db_index": ["db_index", {"default": False}],
+ "default": ["default", {"default": NOT_PROVIDED, "ignore_dynamics": True}],
+ "db_column": ["db_column", {"default": None}],
+ "db_tablespace": ["db_tablespace", {"default": settings.DEFAULT_INDEX_TABLESPACE}],
+ },
+ ),
+ (
+ (models.ForeignKey, models.OneToOneField),
+ [],
+ dict([
+ ("to", ["rel.to", {}]),
+ ("to_field", ["rel.field_name", {"default_attr": "rel.to._meta.pk.name"}]),
+ ("related_name", ["rel.related_name", {"default": None}]),
+ ("db_index", ["db_index", {"default": True}]),
+ ("on_delete", ["rel.on_delete", {"default": getattr(models, "CASCADE", None), "is_django_function": True, "converter": convert_on_delete_handler, "ignore_missing": True}])
+ ])
+ ),
+ (
+ (models.ManyToManyField,),
+ [],
+ {
+ "to": ["rel.to", {}],
+ "symmetrical": ["rel.symmetrical", {"default": True}],
+ "related_name": ["rel.related_name", {"default": None}],
+ "db_table": ["db_table", {"default": None}],
+ # TODO: Kind of ugly to add this one-time-only option
+ "through": ["rel.through", {"ignore_if_auto_through": True}],
+ },
+ ),
+ (
+ (models.DateField, models.TimeField),
+ [],
+ {
+ "auto_now": ["auto_now", {"default": False}],
+ "auto_now_add": ["auto_now_add", {"default": False}],
+ },
+ ),
+ (
+ (models.DecimalField, ),
+ [],
+ {
+ "max_digits": ["max_digits", {"default": None}],
+ "decimal_places": ["decimal_places", {"default": None}],
+ },
+ ),
+ (
+ (models.SlugField, ),
+ [],
+ {
+ "db_index": ["db_index", {"default": True}],
+ },
+ ),
+ (
+ (models.BooleanField, ),
+ [],
+ {
+ "default": ["default", {"default": NOT_PROVIDED, "converter": bool}],
+ "blank": ["blank", {"default": True, "ignore_if":"primary_key"}],
+ },
+ ),
+ (
+ (models.FilePathField, ),
+ [],
+ {
+ "path": ["path", {"default": ''}],
+ "match": ["match", {"default": None}],
+ "recursive": ["recursive", {"default": False}],
+ },
+ ),
+ (
+ (generic.GenericRelation, ),
+ [],
+ {
+ "to": ["rel.to", {}],
+ "symmetrical": ["rel.symmetrical", {"default": True}],
+ "object_id_field": ["object_id_field_name", {"default": "object_id"}],
+ "content_type_field": ["content_type_field_name", {"default": "content_type"}],
+ "blank": ["blank", {"default": True}],
+ },
+ ),
+]
+
+# Regexes of allowed field full paths
+allowed_fields = [
+ "^django\.db",
+ "^django\.contrib\.contenttypes\.generic",
+ "^django\.contrib\.localflavor",
+ "^django_localflavor_\w\w",
+]
+
+# Regexes of ignored fields (custom fields which look like fields, but have no column behind them)
+ignored_fields = [
+ "^django\.contrib\.contenttypes\.generic\.GenericRelation",
+ "^django\.contrib\.contenttypes\.generic\.GenericForeignKey",
+]
+
+# Similar, but for Meta, so just the inner level (kwds).
+meta_details = {
+ "db_table": ["db_table", {"default_attr_concat": ["%s_%s", "app_label", "module_name"]}],
+ "db_tablespace": ["db_tablespace", {"default": settings.DEFAULT_TABLESPACE}],
+ "unique_together": ["unique_together", {"default": []}],
+ "index_together": ["index_together", {"default": [], "ignore_missing": True}],
+ "ordering": ["ordering", {"default": []}],
+ "proxy": ["proxy", {"default": False, "ignore_missing": True}],
+}
+
+
+def add_introspection_rules(rules=[], patterns=[]):
+ "Allows you to add some introspection rules at runtime, e.g. for 3rd party apps."
+ assert isinstance(rules, (list, tuple))
+ assert isinstance(patterns, (list, tuple))
+ allowed_fields.extend(patterns)
+ introspection_details.extend(rules)
+
+
+def add_ignored_fields(patterns):
+ "Allows you to add some ignore field patterns."
+ assert isinstance(patterns, (list, tuple))
+ ignored_fields.extend(patterns)
+
+
+def can_ignore(field):
+ """
+ Returns True if we know for certain that we can ignore this field, False
+ otherwise.
+ """
+ full_name = "%s.%s" % (field.__class__.__module__, field.__class__.__name__)
+ for regex in ignored_fields:
+ if re.match(regex, full_name):
+ return True
+ return False
+
+
+def can_introspect(field):
+ """
+ Returns True if we are allowed to introspect this field, False otherwise.
+ ('allowed' means 'in core'. Custom fields can declare they are introspectable
+ by the default South rules by adding the attribute _south_introspects = True.)
+ """
+ # Check for special attribute
+ if hasattr(field, "_south_introspects") and field._south_introspects:
+ return True
+ # Check it's an introspectable field
+ full_name = "%s.%s" % (field.__class__.__module__, field.__class__.__name__)
+ for regex in allowed_fields:
+ if re.match(regex, full_name):
+ return True
+ return False
+
+
+def matching_details(field):
+ """
+ Returns the union of all matching entries in introspection_details for the field.
+ """
+ our_args = []
+ our_kwargs = {}
+ for classes, args, kwargs in introspection_details:
+ if any([isinstance(field, x) for x in classes]):
+ our_args.extend(args)
+ our_kwargs.update(kwargs)
+ return our_args, our_kwargs
+
+
+class IsDefault(Exception):
+ """
+ Exception for when a field contains its default value.
+ """
+
+
+def get_value(field, descriptor):
+ """
+ Gets an attribute value from a Field instance and formats it.
+ """
+ attrname, options = descriptor
+ # If the options say it's not a attribute name but a real value, use that.
+ if options.get('is_value', False):
+ value = attrname
+ else:
+ try:
+ value = get_attribute(field, attrname)
+ except AttributeError:
+ if options.get("ignore_missing", False):
+ raise IsDefault
+ else:
+ raise
+
+ # Lazy-eval functions get eval'd.
+ if isinstance(value, Promise):
+ value = text_type(value)
+ # If the value is the same as the default, omit it for clarity
+ if "default" in options and value == options['default']:
+ raise IsDefault
+ # If there's an ignore_if, use it
+ if "ignore_if" in options:
+ if get_attribute(field, options['ignore_if']):
+ raise IsDefault
+ # If there's an ignore_if_auto_through which is True, use it
+ if options.get("ignore_if_auto_through", False):
+ if auto_through(field):
+ raise IsDefault
+ # Some default values need to be gotten from an attribute too.
+ if "default_attr" in options:
+ default_value = get_attribute(field, options['default_attr'])
+ if value == default_value:
+ raise IsDefault
+ # Some are made from a formatting string and several attrs (e.g. db_table)
+ if "default_attr_concat" in options:
+ format, attrs = options['default_attr_concat'][0], options['default_attr_concat'][1:]
+ default_value = format % tuple(map(lambda x: get_attribute(field, x), attrs))
+ if value == default_value:
+ raise IsDefault
+ # Clean and return the value
+ return value_clean(value, options)
+
+
+def value_clean(value, options={}):
+ "Takes a value and cleans it up (so e.g. it has timezone working right)"
+ # Lazy-eval functions get eval'd.
+ if isinstance(value, Promise):
+ value = text_type(value)
+ # Callables get called.
+ if not options.get('is_django_function', False) and callable(value) and not isinstance(value, ModelBase):
+ # Datetime.datetime.now is special, as we can access it from the eval
+ # context (and because it changes all the time; people will file bugs otherwise).
+ if value == datetime.datetime.now:
+ return "datetime.datetime.now"
+ elif value == datetime.datetime.utcnow:
+ return "datetime.datetime.utcnow"
+ elif value == datetime.date.today:
+ return "datetime.date.today"
+ # In case we use Django's own now function, revert to datetime's
+ # original one since we'll deal with timezones on our own.
+ elif timezone and value == timezone.now:
+ return "datetime.datetime.now"
+ # All other callables get called.
+ value = value()
+ # Models get their own special repr()
+ if isinstance(value, ModelBase):
+ # If it's a proxy model, follow it back to its non-proxy parent
+ if getattr(value._meta, "proxy", False):
+ value = value._meta.proxy_for_model
+ return "orm['%s.%s']" % (value._meta.app_label, value._meta.object_name)
+ # As do model instances
+ if isinstance(value, Model):
+ if options.get("ignore_dynamics", False):
+ raise IsDefault
+ return "orm['%s.%s'].objects.get(pk=%r)" % (value.__class__._meta.app_label, value.__class__._meta.object_name, value.pk)
+ # Make sure Decimal is converted down into a string
+ if isinstance(value, decimal.Decimal):
+ value = str(value)
+ # in case the value is timezone aware
+ datetime_types = (
+ datetime.datetime,
+ datetime.time,
+ datetime_safe.datetime,
+ )
+ if (timezone and isinstance(value, datetime_types) and
+ getattr(settings, 'USE_TZ', False) and
+ value is not None and timezone.is_aware(value)):
+ default_timezone = timezone.get_default_timezone()
+ value = timezone.make_naive(value, default_timezone)
+ # datetime_safe has an improper repr value
+ if isinstance(value, datetime_safe.datetime):
+ value = datetime.datetime(*value.utctimetuple()[:7])
+ # converting a date value to a datetime to be able to handle
+ # timezones later gracefully
+ elif isinstance(value, (datetime.date, datetime_safe.date)):
+ value = datetime.datetime(*value.timetuple()[:3])
+ # Now, apply the converter func if there is one
+ if "converter" in options:
+ value = options['converter'](value)
+ # Return the final value
+ if options.get('is_django_function', False):
+ return value
+ else:
+ return repr(value)
+
+
+def introspector(field):
+ """
+ Given a field, introspects its definition triple.
+ """
+ arg_defs, kwarg_defs = matching_details(field)
+ args = []
+ kwargs = {}
+ # For each argument, use the descriptor to get the real value.
+ for defn in arg_defs:
+ try:
+ args.append(get_value(field, defn))
+ except IsDefault:
+ pass
+ for kwd, defn in kwarg_defs.items():
+ try:
+ kwargs[kwd] = get_value(field, defn)
+ except IsDefault:
+ pass
+ return args, kwargs
+
+
+def get_model_fields(model, m2m=False):
+ """
+ Given a model class, returns a dict of {field_name: field_triple} defs.
+ """
+
+ field_defs = SortedDict()
+ inherited_fields = {}
+
+ # Go through all bases (that are themselves models, but not Model)
+ for base in model.__bases__:
+ if hasattr(base, '_meta') and issubclass(base, models.Model):
+ if not base._meta.abstract:
+ # Looks like we need their fields, Ma.
+ inherited_fields.update(get_model_fields(base))
+
+ # Now, go through all the fields and try to get their definition
+ source = model._meta.local_fields[:]
+ if m2m:
+ source += model._meta.local_many_to_many
+
+ for field in source:
+ # Can we ignore it completely?
+ if can_ignore(field):
+ continue
+ # Does it define a south_field_triple method?
+ if hasattr(field, "south_field_triple"):
+ if NOISY:
+ print(" ( Nativing field: %s" % field.name)
+ field_defs[field.name] = field.south_field_triple()
+ # Can we introspect it?
+ elif can_introspect(field):
+ # Get the full field class path.
+ field_class = field.__class__.__module__ + "." + field.__class__.__name__
+ # Run this field through the introspector
+ args, kwargs = introspector(field)
+ # Workaround for Django bug #13987
+ if model._meta.pk.column == field.column and 'primary_key' not in kwargs:
+ kwargs['primary_key'] = True
+ # That's our definition!
+ field_defs[field.name] = (field_class, args, kwargs)
+ # Shucks, no definition!
+ else:
+ if NOISY:
+ print(" ( Nodefing field: %s" % field.name)
+ field_defs[field.name] = None
+
+ # If they've used the horrific hack that is order_with_respect_to, deal with
+ # it.
+ if model._meta.order_with_respect_to:
+ field_defs['_order'] = ("django.db.models.fields.IntegerField", [], {"default": "0"})
+
+ return field_defs
+
+
+def get_model_meta(model):
+ """
+ Given a model class, will return the dict representing the Meta class.
+ """
+
+ # Get the introspected attributes
+ meta_def = {}
+ for kwd, defn in meta_details.items():
+ try:
+ meta_def[kwd] = get_value(model._meta, defn)
+ except IsDefault:
+ pass
+
+ # Also, add on any non-abstract model base classes.
+ # This is called _ormbases as the _bases variable was previously used
+ # for a list of full class paths to bases, so we can't conflict.
+ for base in model.__bases__:
+ if hasattr(base, '_meta') and issubclass(base, models.Model):
+ if not base._meta.abstract:
+ # OK, that matches our terms.
+ if "_ormbases" not in meta_def:
+ meta_def['_ormbases'] = []
+ meta_def['_ormbases'].append("%s.%s" % (
+ base._meta.app_label,
+ base._meta.object_name,
+ ))
+
+ return meta_def
+
+
+# Now, load the built-in South introspection plugins
+import south.introspection_plugins
diff --git a/lib/python2.7/site-packages/south/orm.py b/lib/python2.7/site-packages/south/orm.py
new file mode 100644
index 0000000..8d46ee7
--- /dev/null
+++ b/lib/python2.7/site-packages/south/orm.py
@@ -0,0 +1,407 @@
+"""
+South's fake ORM; lets you not have to write SQL inside migrations.
+Roughly emulates the real Django ORM, to a point.
+"""
+
+from __future__ import print_function
+
+import inspect
+
+from django.db import models
+from django.db.models.loading import cache
+from django.core.exceptions import ImproperlyConfigured
+
+from south.db import db
+from south.utils import ask_for_it_by_name, datetime_utils
+from south.hacks import hacks
+from south.exceptions import UnfreezeMeLater, ORMBaseNotIncluded, ImpossibleORMUnfreeze
+from south.utils.py3 import string_types
+
+
+class ModelsLocals(object):
+
+ """
+ Custom dictionary-like class to be locals();
+ falls back to lowercase search for items that don't exist
+ (because we store model names as lowercase).
+ """
+
+ def __init__(self, data):
+ self.data = data
+
+ def __getitem__(self, key):
+ try:
+ return self.data[key]
+ except KeyError:
+ return self.data[key.lower()]
+
+
+# Stores already-created ORMs.
+_orm_cache = {}
+
+def FakeORM(*args):
+ """
+ Creates a Fake Django ORM.
+ This is actually a memoised constructor; the real class is _FakeORM.
+ """
+ if not args in _orm_cache:
+ _orm_cache[args] = _FakeORM(*args)
+ return _orm_cache[args]
+
+
+class LazyFakeORM(object):
+ """
+ In addition to memoising the ORM call, this function lazily generates them
+ for a Migration class. Assign the result of this to (for example)
+ .orm, and as soon as .orm is accessed the ORM will be created.
+ """
+
+ def __init__(self, *args):
+ self._args = args
+ self.orm = None
+
+ def __get__(self, obj, type=None):
+ if not self.orm:
+ self.orm = FakeORM(*self._args)
+ return self.orm
+
+
+class _FakeORM(object):
+
+ """
+ Simulates the Django ORM at some point in time,
+ using a frozen definition on the Migration class.
+ """
+
+ def __init__(self, cls, app):
+ self.default_app = app
+ self.cls = cls
+ # Try loading the models off the migration class; default to no models.
+ self.models = {}
+ try:
+ self.models_source = cls.models
+ except AttributeError:
+ return
+
+ # Start a 'new' AppCache
+ hacks.clear_app_cache()
+
+ # Now, make each model's data into a FakeModel
+ # We first make entries for each model that are just its name
+ # This allows us to have circular model dependency loops
+ model_names = []
+ for name, data in self.models_source.items():
+ # Make sure there's some kind of Meta
+ if "Meta" not in data:
+ data['Meta'] = {}
+ try:
+ app_label, model_name = name.split(".", 1)
+ except ValueError:
+ app_label = self.default_app
+ model_name = name
+
+ # If there's an object_name in the Meta, use it and remove it
+ if "object_name" in data['Meta']:
+ model_name = data['Meta']['object_name']
+ del data['Meta']['object_name']
+
+ name = "%s.%s" % (app_label, model_name)
+ self.models[name.lower()] = name
+ model_names.append((name.lower(), app_label, model_name, data))
+
+ # Loop until model_names is entry, or hasn't shrunk in size since
+ # last iteration.
+ # The make_model method can ask to postpone a model; it's then pushed
+ # to the back of the queue. Because this is currently only used for
+ # inheritance, it should thus theoretically always decrease by one.
+ last_size = None
+ while model_names:
+ # First, make sure we've shrunk.
+ if len(model_names) == last_size:
+ raise ImpossibleORMUnfreeze()
+ last_size = len(model_names)
+ # Make one run through
+ postponed_model_names = []
+ for name, app_label, model_name, data in model_names:
+ try:
+ self.models[name] = self.make_model(app_label, model_name, data)
+ except UnfreezeMeLater:
+ postponed_model_names.append((name, app_label, model_name, data))
+ # Reset
+ model_names = postponed_model_names
+
+ # And perform the second run to iron out any circular/backwards depends.
+ self.retry_failed_fields()
+
+ # Force evaluation of relations on the models now
+ for model in self.models.values():
+ model._meta.get_all_field_names()
+
+ # Reset AppCache
+ hacks.unclear_app_cache()
+
+
+ def __iter__(self):
+ return iter(self.models.values())
+
+
+ def __getattr__(self, key):
+ fullname = (self.default_app+"."+key).lower()
+ try:
+ return self.models[fullname]
+ except KeyError:
+ raise AttributeError("The model '%s' from the app '%s' is not available in this migration. (Did you use orm.ModelName, not orm['app.ModelName']?)" % (key, self.default_app))
+
+
+ def __getitem__(self, key):
+ # Detect if they asked for a field on a model or not.
+ if ":" in key:
+ key, fname = key.split(":")
+ else:
+ fname = None
+ # Now, try getting the model
+ key = key.lower()
+ try:
+ model = self.models[key]
+ except KeyError:
+ try:
+ app, model = key.split(".", 1)
+ except ValueError:
+ raise KeyError("The model '%s' is not in appname.modelname format." % key)
+ else:
+ raise KeyError("The model '%s' from the app '%s' is not available in this migration." % (model, app))
+ # If they asked for a field, get it.
+ if fname:
+ return model._meta.get_field_by_name(fname)[0]
+ else:
+ return model
+
+
+ def eval_in_context(self, code, app, extra_imports={}):
+ "Evaluates the given code in the context of the migration file."
+
+ # Drag in the migration module's locals (hopefully including models.py)
+ # excluding all models from that (i.e. from modern models.py), to stop pollution
+ fake_locals = dict(
+ (key, value)
+ for key, value in inspect.getmodule(self.cls).__dict__.items()
+ if not (
+ isinstance(value, type)
+ and issubclass(value, models.Model)
+ and hasattr(value, "_meta")
+ )
+ )
+
+ # We add our models into the locals for the eval
+ fake_locals.update(dict([
+ (name.split(".")[-1], model)
+ for name, model in self.models.items()
+ ]))
+
+ # Make sure the ones for this app override.
+ fake_locals.update(dict([
+ (name.split(".")[-1], model)
+ for name, model in self.models.items()
+ if name.split(".")[0] == app
+ ]))
+
+ # Ourselves as orm, to allow non-fail cross-app referencing
+ fake_locals['orm'] = self
+
+ # And a fake _ function
+ fake_locals['_'] = lambda x: x
+
+ # Datetime; there should be no datetime direct accesses
+ fake_locals['datetime'] = datetime_utils
+
+ # Now, go through the requested imports and import them.
+ for name, value in extra_imports.items():
+ # First, try getting it out of locals.
+ parts = value.split(".")
+ try:
+ obj = fake_locals[parts[0]]
+ for part in parts[1:]:
+ obj = getattr(obj, part)
+ except (KeyError, AttributeError):
+ pass
+ else:
+ fake_locals[name] = obj
+ continue
+ # OK, try to import it directly
+ try:
+ fake_locals[name] = ask_for_it_by_name(value)
+ except ImportError:
+ if name == "SouthFieldClass":
+ raise ValueError("Cannot import the required field '%s'" % value)
+ else:
+ print("WARNING: Cannot import '%s'" % value)
+
+ # Use ModelsLocals to make lookups work right for CapitalisedModels
+ fake_locals = ModelsLocals(fake_locals)
+
+ return eval(code, globals(), fake_locals)
+
+
+ def make_meta(self, app, model, data, stub=False):
+ "Makes a Meta class out of a dict of eval-able arguments."
+ results = {'app_label': app}
+ for key, code in data.items():
+ # Some things we never want to use.
+ if key in ["_bases", "_ormbases"]:
+ continue
+ # Some things we don't want with stubs.
+ if stub and key in ["order_with_respect_to"]:
+ continue
+ # OK, add it.
+ try:
+ results[key] = self.eval_in_context(code, app)
+ except (NameError, AttributeError) as e:
+ raise ValueError("Cannot successfully create meta field '%s' for model '%s.%s': %s." % (
+ key, app, model, e
+ ))
+ return type("Meta", tuple(), results)
+
+
+ def make_model(self, app, name, data):
+ "Makes a Model class out of the given app name, model name and pickled data."
+
+ # Extract any bases out of Meta
+ if "_ormbases" in data['Meta']:
+ # Make sure everything we depend on is done already; otherwise, wait.
+ for key in data['Meta']['_ormbases']:
+ key = key.lower()
+ if key not in self.models:
+ raise ORMBaseNotIncluded("Cannot find ORM base %s" % key)
+ elif isinstance(self.models[key], string_types):
+ # Then the other model hasn't been unfrozen yet.
+ # We postpone ourselves; the situation will eventually resolve.
+ raise UnfreezeMeLater()
+ bases = [self.models[key.lower()] for key in data['Meta']['_ormbases']]
+ # Perhaps the old style?
+ elif "_bases" in data['Meta']:
+ bases = map(ask_for_it_by_name, data['Meta']['_bases'])
+ # Ah, bog standard, then.
+ else:
+ bases = [models.Model]
+
+ # Turn the Meta dict into a basic class
+ meta = self.make_meta(app, name, data['Meta'], data.get("_stub", False))
+
+ failed_fields = {}
+ fields = {}
+ stub = False
+
+ # Now, make some fields!
+ for fname, params in data.items():
+ # If it's the stub marker, ignore it.
+ if fname == "_stub":
+ stub = bool(params)
+ continue
+ elif fname == "Meta":
+ continue
+ elif not params:
+ raise ValueError("Field '%s' on model '%s.%s' has no definition." % (fname, app, name))
+ elif isinstance(params, string_types):
+ # It's a premade definition string! Let's hope it works...
+ code = params
+ extra_imports = {}
+ else:
+ # If there's only one parameter (backwards compat), make it 3.
+ if len(params) == 1:
+ params = (params[0], [], {})
+ # There should be 3 parameters. Code is a tuple of (code, what-to-import)
+ if len(params) == 3:
+ code = "SouthFieldClass(%s)" % ", ".join(
+ params[1] +
+ ["%s=%s" % (n, v) for n, v in params[2].items()]
+ )
+ extra_imports = {"SouthFieldClass": params[0]}
+ else:
+ raise ValueError("Field '%s' on model '%s.%s' has a weird definition length (should be 1 or 3 items)." % (fname, app, name))
+
+ try:
+ # Execute it in a probably-correct context.
+ field = self.eval_in_context(code, app, extra_imports)
+ except (NameError, AttributeError, AssertionError, KeyError):
+ # It might rely on other models being around. Add it to the
+ # model for the second pass.
+ failed_fields[fname] = (code, extra_imports)
+ else:
+ fields[fname] = field
+
+ # Find the app in the Django core, and get its module
+ more_kwds = {}
+ try:
+ app_module = models.get_app(app)
+ more_kwds['__module__'] = app_module.__name__
+ except ImproperlyConfigured:
+ # The app this belonged to has vanished, but thankfully we can still
+ # make a mock model, so ignore the error.
+ more_kwds['__module__'] = '_south_mock'
+
+ more_kwds['Meta'] = meta
+
+ # Make our model
+ fields.update(more_kwds)
+
+ model = type(
+ str(name),
+ tuple(bases),
+ fields,
+ )
+
+ # If this is a stub model, change Objects to a whiny class
+ if stub:
+ model.objects = WhinyManager()
+ # Also, make sure they can't instantiate it
+ model.__init__ = whiny_method
+ else:
+ model.objects = NoDryRunManager(model.objects)
+
+ if failed_fields:
+ model._failed_fields = failed_fields
+
+ return model
+
+ def retry_failed_fields(self):
+ "Tries to re-evaluate the _failed_fields for each model."
+ for modelkey, model in self.models.items():
+ app, modelname = modelkey.split(".", 1)
+ if hasattr(model, "_failed_fields"):
+ for fname, (code, extra_imports) in model._failed_fields.items():
+ try:
+ field = self.eval_in_context(code, app, extra_imports)
+ except (NameError, AttributeError, AssertionError, KeyError) as e:
+ # It's failed again. Complain.
+ raise ValueError("Cannot successfully create field '%s' for model '%s': %s." % (
+ fname, modelname, e
+ ))
+ else:
+ # Startup that field.
+ model.add_to_class(fname, field)
+
+
+class WhinyManager(object):
+ "A fake manager that whines whenever you try to touch it. For stub models."
+
+ def __getattr__(self, key):
+ raise AttributeError("You cannot use items from a stub model.")
+
+
+class NoDryRunManager(object):
+ """
+ A manager that always proxies through to the real manager,
+ unless a dry run is in progress.
+ """
+
+ def __init__(self, real):
+ self.real = real
+
+ def __getattr__(self, name):
+ if db.dry_run:
+ raise AttributeError("You are in a dry run, and cannot access the ORM.\nWrap ORM sections in 'if not db.dry_run:', or if the whole migration is only a data migration, set no_dry_run = True on the Migration class.")
+ return getattr(self.real, name)
+
+
+def whiny_method(*a, **kw):
+ raise ValueError("You cannot instantiate a stub model.")
diff --git a/lib/python2.7/site-packages/south/signals.py b/lib/python2.7/site-packages/south/signals.py
new file mode 100644
index 0000000..12a1362
--- /dev/null
+++ b/lib/python2.7/site-packages/south/signals.py
@@ -0,0 +1,24 @@
+"""
+South-specific signals
+"""
+
+from django.dispatch import Signal
+from django.conf import settings
+
+# Sent at the start of the migration of an app
+pre_migrate = Signal(providing_args=["app", "verbosity", "interactive", "db"])
+
+# Sent after each successful migration of an app
+post_migrate = Signal(providing_args=["app", "verbosity", "interactive", "db"])
+
+# Sent after each run of a particular migration in a direction
+ran_migration = Signal(providing_args=["app", "migration", "method", "verbosity", "interactive", "db"])
+
+# Compatibility code for django.contrib.auth
+# Is causing strange errors, removing for now (we might need to fix up orm first)
+#if 'django.contrib.auth' in settings.INSTALLED_APPS:
+ #def create_permissions_compat(app, **kwargs):
+ #from django.db.models import get_app
+ #from django.contrib.auth.management import create_permissions
+ #create_permissions(get_app(app), (), 0)
+ #post_migrate.connect(create_permissions_compat)
diff --git a/lib/python2.7/site-packages/south/test_shim.py b/lib/python2.7/site-packages/south/test_shim.py
new file mode 100644
index 0000000..fdf39f3
--- /dev/null
+++ b/lib/python2.7/site-packages/south/test_shim.py
@@ -0,0 +1,6 @@
+"""
+This file is needed as 1.6 only finds tests in files labelled test_*,
+and ignores tests/__init__.py.
+"""
+
+from south.tests import *
diff --git a/lib/python2.7/site-packages/south/tests/__init__.py b/lib/python2.7/site-packages/south/tests/__init__.py
new file mode 100644
index 0000000..26779e3
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/__init__.py
@@ -0,0 +1,109 @@
+from __future__ import print_function
+
+#import unittest
+import os
+import sys
+from functools import wraps
+from django.conf import settings
+from south.hacks import hacks
+
+# Make sure skipping tests is available.
+try:
+ # easiest and best is unittest included in Django>=1.3
+ from django.utils import unittest
+except ImportError:
+ # earlier django... use unittest from stdlib
+ import unittest
+# however, skipUnless was only added in Python 2.7;
+# if not available, we need to do something else
+try:
+ skipUnless = unittest.skipUnless #@UnusedVariable
+except AttributeError:
+ def skipUnless(condition, message):
+ def decorator(testfunc):
+ @wraps(testfunc)
+ def wrapper(self):
+ if condition:
+ # Apply method
+ testfunc(self)
+ else:
+ # The skip exceptions are not available either...
+ print("Skipping", testfunc.__name__,"--", message)
+ return wrapper
+ return decorator
+
+# ditto for skipIf
+try:
+ skipIf = unittest.skipIf #@UnusedVariable
+except AttributeError:
+ def skipIf(condition, message):
+ def decorator(testfunc):
+ @wraps(testfunc)
+ def wrapper(self):
+ if condition:
+ print("Skipping", testfunc.__name__,"--", message)
+ else:
+ # Apply method
+ testfunc(self)
+ return wrapper
+ return decorator
+
+# Add the tests directory so fakeapp is on sys.path
+test_root = os.path.dirname(__file__)
+sys.path.append(test_root)
+
+# Note: the individual test files are imported below this.
+
+class Monkeypatcher(unittest.TestCase):
+
+ """
+ Base test class for tests that play with the INSTALLED_APPS setting at runtime.
+ """
+
+ def create_fake_app(self, name):
+
+ class Fake:
+ pass
+
+ fake = Fake()
+ fake.__name__ = name
+ try:
+ fake.migrations = __import__(name + ".migrations", {}, {}, ['migrations'])
+ except ImportError:
+ pass
+ return fake
+
+ def setUp(self):
+ """
+ Changes the Django environment so we can run tests against our test apps.
+ """
+ if hasattr(self, 'installed_apps'):
+ hacks.store_app_cache_state()
+ hacks.set_installed_apps(self.installed_apps)
+ # Make sure dependencies are calculated for new apps
+ Migrations._dependencies_done = False
+
+ def tearDown(self):
+ """
+ Undoes what setUp did.
+ """
+ if hasattr(self, 'installed_apps'):
+ hacks.reset_installed_apps()
+ hacks.restore_app_cache_state()
+
+
+# Try importing all tests if asked for (then we can run 'em)
+try:
+ skiptest = settings.SKIP_SOUTH_TESTS
+except:
+ skiptest = True
+
+if not skiptest:
+ from south.tests.db import *
+ from south.tests.db_mysql import *
+ from south.tests.db_firebird import *
+ from south.tests.logic import *
+ from south.tests.autodetection import *
+ from south.tests.logger import *
+ from south.tests.inspector import *
+ from south.tests.freezer import *
diff --git a/lib/python2.7/site-packages/south/tests/autodetection.py b/lib/python2.7/site-packages/south/tests/autodetection.py
new file mode 100644
index 0000000..c320d3a
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/autodetection.py
@@ -0,0 +1,360 @@
+from south.tests import unittest
+
+from south.creator.changes import AutoChanges, InitialChanges
+from south.migration.base import Migrations
+from south.tests import Monkeypatcher
+from south.creator import freezer
+from south.orm import FakeORM
+from south.v2 import SchemaMigration
+
+try:
+ from django.utils.six.moves import reload_module
+except ImportError:
+ # Older django, no python3 support
+ reload_module = reload
+
+class TestComparison(unittest.TestCase):
+
+ """
+ Tests the comparison methods of startmigration.
+ """
+
+ def test_no_change(self):
+ "Test with a completely unchanged definition."
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['southdemo.Lizard']"}),
+ ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['southdemo.Lizard']"}),
+ ),
+ False,
+ )
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('django.db.models.fields.related.ForeignKey', ['ohhai', 'there'], {'to': "somewhere", "from": "there"}),
+ ('django.db.models.fields.related.ForeignKey', ['ohhai', 'there'], {"from": "there", 'to': "somewhere"}),
+ ),
+ False,
+ )
+
+
+ def test_pos_change(self):
+ "Test with a changed positional argument."
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('django.db.models.fields.CharField', ['hi'], {'to': "foo"}),
+ ('django.db.models.fields.CharField', [], {'to': "foo"}),
+ ),
+ True,
+ )
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('django.db.models.fields.CharField', [], {'to': "foo"}),
+ ('django.db.models.fields.CharField', ['bye'], {'to': "foo"}),
+ ),
+ True,
+ )
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('django.db.models.fields.CharField', ['pi'], {'to': "foo"}),
+ ('django.db.models.fields.CharField', ['pi'], {'to': "foo"}),
+ ),
+ False,
+ )
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('django.db.models.fields.CharField', ['pisdadad'], {'to': "foo"}),
+ ('django.db.models.fields.CharField', ['pi'], {'to': "foo"}),
+ ),
+ True,
+ )
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('django.db.models.fields.CharField', ['hi'], {}),
+ ('django.db.models.fields.CharField', [], {}),
+ ),
+ True,
+ )
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('django.db.models.fields.CharField', [], {}),
+ ('django.db.models.fields.CharField', ['bye'], {}),
+ ),
+ True,
+ )
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('django.db.models.fields.CharField', ['pi'], {}),
+ ('django.db.models.fields.CharField', ['pi'], {}),
+ ),
+ False,
+ )
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('django.db.models.fields.CharField', ['pi'], {}),
+ ('django.db.models.fields.CharField', ['45fdfdf'], {}),
+ ),
+ True,
+ )
+
+
+ def test_kwd_change(self):
+ "Test a changed keyword argument"
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('django.db.models.fields.CharField', ['pi'], {'to': "foo"}),
+ ('django.db.models.fields.CharField', ['pi'], {'to': "blue"}),
+ ),
+ True,
+ )
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('django.db.models.fields.CharField', [], {'to': "foo"}),
+ ('django.db.models.fields.CharField', [], {'to': "blue"}),
+ ),
+ True,
+ )
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('django.db.models.fields.CharField', ['b'], {'to': "foo"}),
+ ('django.db.models.fields.CharField', ['b'], {'to': "blue"}),
+ ),
+ True,
+ )
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('django.db.models.fields.CharField', [], {'to': "foo"}),
+ ('django.db.models.fields.CharField', [], {}),
+ ),
+ True,
+ )
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('django.db.models.fields.CharField', ['a'], {'to': "foo"}),
+ ('django.db.models.fields.CharField', ['a'], {}),
+ ),
+ True,
+ )
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('django.db.models.fields.CharField', [], {}),
+ ('django.db.models.fields.CharField', [], {'to': "foo"}),
+ ),
+ True,
+ )
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('django.db.models.fields.CharField', ['a'], {}),
+ ('django.db.models.fields.CharField', ['a'], {'to': "foo"}),
+ ),
+ True,
+ )
+
+
+
+ def test_backcompat_nochange(self):
+ "Test that the backwards-compatable comparison is working"
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('models.CharField', [], {}),
+ ('django.db.models.fields.CharField', [], {}),
+ ),
+ False,
+ )
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('models.CharField', ['ack'], {}),
+ ('django.db.models.fields.CharField', ['ack'], {}),
+ ),
+ False,
+ )
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('models.CharField', [], {'to':'b'}),
+ ('django.db.models.fields.CharField', [], {'to':'b'}),
+ ),
+ False,
+ )
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('models.CharField', ['hah'], {'to':'you'}),
+ ('django.db.models.fields.CharField', ['hah'], {'to':'you'}),
+ ),
+ False,
+ )
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('models.CharField', ['hah'], {'to':'you'}),
+ ('django.db.models.fields.CharField', ['hah'], {'to':'heh'}),
+ ),
+ True,
+ )
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('models.CharField', ['hah'], {}),
+ ('django.db.models.fields.CharField', [], {'to':"orm['appname.hah']"}),
+ ),
+ False,
+ )
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('models.CharField', ['hah'], {}),
+ ('django.db.models.fields.CharField', [], {'to':'hah'}),
+ ),
+ True,
+ )
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('models.CharField', ['hah'], {}),
+ ('django.db.models.fields.CharField', [], {'to':'rrr'}),
+ ),
+ True,
+ )
+
+ self.assertEqual(
+ AutoChanges.different_attributes(
+ ('models.CharField', ['hah'], {}),
+ ('django.db.models.fields.IntField', [], {'to':'hah'}),
+ ),
+ True,
+ )
+
+class TestNonManagedIgnored(Monkeypatcher):
+
+ installed_apps = ["non_managed"]
+
+ full_defs = {
+ 'non_managed.legacy': {
+ 'Meta': {'object_name': 'Legacy', 'db_table': "'legacy_table'", 'managed': 'False'},
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True'}),
+ 'size': ('django.db.models.fields.IntegerField', [], {})
+ }
+ }
+
+ def test_not_added_init(self):
+
+ migrations = Migrations("non_managed")
+ changes = InitialChanges(migrations)
+ change_list = changes.get_changes()
+ if list(change_list):
+ self.fail("Initial migration creates table for non-managed model")
+
+ def test_not_added_auto(self):
+
+ empty_defs = { }
+ class EmptyMigration(SchemaMigration):
+ "Serves as fake previous migration"
+
+ def forwards(self, orm):
+ pass
+
+ def backwards(self, orm):
+ pass
+
+ models = empty_defs
+
+ complete_apps = ['non_managed']
+
+ migrations = Migrations("non_managed")
+ empty_orm = FakeORM(EmptyMigration, "non_managed")
+ changes = AutoChanges(
+ migrations = migrations,
+ old_defs = empty_defs,
+ old_orm = empty_orm,
+ new_defs = self.full_defs,
+ )
+ change_list = changes.get_changes()
+ if list(change_list):
+ self.fail("Auto migration creates table for non-managed model")
+
+ def test_not_deleted_auto(self):
+
+ empty_defs = { }
+ old_defs = freezer.freeze_apps(["non_managed"])
+ class InitialMigration(SchemaMigration):
+ "Serves as fake previous migration"
+
+ def forwards(self, orm):
+ pass
+
+ def backwards(self, orm):
+ pass
+
+ models = self.full_defs
+
+ complete_apps = ['non_managed']
+
+ migrations = Migrations("non_managed")
+ initial_orm = FakeORM(InitialMigration, "non_managed")
+ changes = AutoChanges(
+ migrations = migrations,
+ old_defs = self.full_defs,
+ old_orm = initial_orm,
+ new_defs = empty_defs,
+ )
+ change_list = changes.get_changes()
+ if list(change_list):
+ self.fail("Auto migration deletes table for non-managed model")
+
+ def test_not_modified_auto(self):
+
+ fake_defs = {
+ 'non_managed.legacy': {
+ 'Meta': {'object_name': 'Legacy', 'db_table': "'legacy_table'", 'managed': 'False'},
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True'}),
+ #'size': ('django.db.models.fields.IntegerField', [], {}) # The "change" is the addition of this field
+ }
+ }
+ class InitialMigration(SchemaMigration):
+ "Serves as fake previous migration"
+
+ def forwards(self, orm):
+ pass
+
+ def backwards(self, orm):
+ pass
+
+ models = fake_defs
+
+ complete_apps = ['non_managed']
+
+ from non_managed import models as dummy_import_to_force_loading_models # TODO: Does needing this indicate a bug in MokeyPatcher?
+ reload_module(dummy_import_to_force_loading_models) # really force...
+
+ migrations = Migrations("non_managed")
+ initial_orm = FakeORM(InitialMigration, "non_managed")
+ changes = AutoChanges(
+ migrations = migrations,
+ old_defs = fake_defs,
+ old_orm = initial_orm,
+ new_defs = self.full_defs
+ )
+ change_list = changes.get_changes()
+ if list(change_list):
+ self.fail("Auto migration changes table for non-managed model")
diff --git a/lib/python2.7/site-packages/south/tests/brokenapp/__init__.py b/lib/python2.7/site-packages/south/tests/brokenapp/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/brokenapp/__init__.py
diff --git a/lib/python2.7/site-packages/south/tests/brokenapp/migrations/0001_depends_on_unmigrated.py b/lib/python2.7/site-packages/south/tests/brokenapp/migrations/0001_depends_on_unmigrated.py
new file mode 100644
index 0000000..d53f836
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/brokenapp/migrations/0001_depends_on_unmigrated.py
@@ -0,0 +1,13 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ depends_on = [('unknown', '0001_initial')]
+
+ def forwards(self):
+ pass
+
+ def backwards(self):
+ pass
+
diff --git a/lib/python2.7/site-packages/south/tests/brokenapp/migrations/0002_depends_on_unknown.py b/lib/python2.7/site-packages/south/tests/brokenapp/migrations/0002_depends_on_unknown.py
new file mode 100644
index 0000000..389af80
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/brokenapp/migrations/0002_depends_on_unknown.py
@@ -0,0 +1,13 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ depends_on = [('fakeapp', '9999_unknown')]
+
+ def forwards(self):
+ pass
+
+ def backwards(self):
+ pass
+
diff --git a/lib/python2.7/site-packages/south/tests/brokenapp/migrations/0003_depends_on_higher.py b/lib/python2.7/site-packages/south/tests/brokenapp/migrations/0003_depends_on_higher.py
new file mode 100644
index 0000000..319069b
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/brokenapp/migrations/0003_depends_on_higher.py
@@ -0,0 +1,13 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ depends_on = [('brokenapp', '0004_higher')]
+
+ def forwards(self):
+ pass
+
+ def backwards(self):
+ pass
+
diff --git a/lib/python2.7/site-packages/south/tests/brokenapp/migrations/0004_higher.py b/lib/python2.7/site-packages/south/tests/brokenapp/migrations/0004_higher.py
new file mode 100644
index 0000000..d27ed3a
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/brokenapp/migrations/0004_higher.py
@@ -0,0 +1,11 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ def forwards(self):
+ pass
+
+ def backwards(self):
+ pass
+
diff --git a/lib/python2.7/site-packages/south/tests/brokenapp/migrations/__init__.py b/lib/python2.7/site-packages/south/tests/brokenapp/migrations/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/brokenapp/migrations/__init__.py
diff --git a/lib/python2.7/site-packages/south/tests/brokenapp/models.py b/lib/python2.7/site-packages/south/tests/brokenapp/models.py
new file mode 100644
index 0000000..a7d84dc
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/brokenapp/models.py
@@ -0,0 +1,55 @@
+# -*- coding: UTF-8 -*-
+
+from django.db import models
+from django.contrib.auth.models import User as UserAlias
+
+def default_func():
+ return "yays"
+
+# An empty case.
+class Other1(models.Model): pass
+
+# Nastiness.
+class HorribleModel(models.Model):
+ "A model to test the edge cases of model parsing"
+
+ ZERO, ONE = range(2)
+
+ # First, some nice fields
+ name = models.CharField(max_length=255)
+ short_name = models.CharField(max_length=50)
+ slug = models.SlugField(unique=True)
+
+ # A ForeignKey, to a model above, and then below
+ o1 = models.ForeignKey(Other1)
+ o2 = models.ForeignKey('Other2')
+
+ # Now to something outside
+ user = models.ForeignKey(UserAlias, related_name="horribles")
+
+ # Unicode!
+ code = models.CharField(max_length=25, default="↑↑↓↓←→←→BA")
+
+ # Odd defaults!
+ class_attr = models.IntegerField(default=ZERO)
+ func = models.CharField(max_length=25, default=default_func)
+
+ # Time to get nasty. Define a non-field choices, and use it
+ choices = [('hello', '1'), ('world', '2')]
+ choiced = models.CharField(max_length=20, choices=choices)
+
+ class Meta:
+ db_table = "my_fave"
+ verbose_name = "Dr. Strangelove," + \
+ """or how I learned to stop worrying
+and love the bomb"""
+
+ # Now spread over multiple lines
+ multiline = \
+ models.TextField(
+ )
+
+# Special case.
+class Other2(models.Model):
+ # Try loading a field without a newline after it (inspect hates this)
+ close_but_no_cigar = models.PositiveIntegerField(primary_key=True) \ No newline at end of file
diff --git a/lib/python2.7/site-packages/south/tests/circular_a/__init__.py b/lib/python2.7/site-packages/south/tests/circular_a/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/circular_a/__init__.py
diff --git a/lib/python2.7/site-packages/south/tests/circular_a/migrations/0001_first.py b/lib/python2.7/site-packages/south/tests/circular_a/migrations/0001_first.py
new file mode 100644
index 0000000..b0d90eb
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/circular_a/migrations/0001_first.py
@@ -0,0 +1,13 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ depends_on = [('circular_b', '0001_first')]
+
+ def forwards(self):
+ pass
+
+ def backwards(self):
+ pass
+
diff --git a/lib/python2.7/site-packages/south/tests/circular_a/migrations/__init__.py b/lib/python2.7/site-packages/south/tests/circular_a/migrations/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/circular_a/migrations/__init__.py
diff --git a/lib/python2.7/site-packages/south/tests/circular_a/models.py b/lib/python2.7/site-packages/south/tests/circular_a/models.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/circular_a/models.py
diff --git a/lib/python2.7/site-packages/south/tests/circular_b/__init__.py b/lib/python2.7/site-packages/south/tests/circular_b/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/circular_b/__init__.py
diff --git a/lib/python2.7/site-packages/south/tests/circular_b/migrations/0001_first.py b/lib/python2.7/site-packages/south/tests/circular_b/migrations/0001_first.py
new file mode 100644
index 0000000..b11b120
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/circular_b/migrations/0001_first.py
@@ -0,0 +1,13 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ depends_on = [('circular_a', '0001_first')]
+
+ def forwards(self):
+ pass
+
+ def backwards(self):
+ pass
+
diff --git a/lib/python2.7/site-packages/south/tests/circular_b/migrations/__init__.py b/lib/python2.7/site-packages/south/tests/circular_b/migrations/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/circular_b/migrations/__init__.py
diff --git a/lib/python2.7/site-packages/south/tests/circular_b/models.py b/lib/python2.7/site-packages/south/tests/circular_b/models.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/circular_b/models.py
diff --git a/lib/python2.7/site-packages/south/tests/db.py b/lib/python2.7/site-packages/south/tests/db.py
new file mode 100644
index 0000000..e63c563
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/db.py
@@ -0,0 +1,1060 @@
+import datetime
+from warnings import filterwarnings
+
+from south.db import db, generic
+from django.db import connection, models, IntegrityError as DjangoIntegrityError
+
+from south.tests import unittest, skipIf, skipUnless
+from south.utils.py3 import text_type, with_metaclass
+
+# Create a list of error classes from the various database libraries
+errors = []
+try:
+ from psycopg2 import ProgrammingError
+ errors.append(ProgrammingError)
+except ImportError:
+ pass
+errors = tuple(errors)
+
+# On SQL Server, the backend's IntegrityError is not (a subclass of) Django's.
+try:
+ from sql_server.pyodbc.base import IntegrityError as SQLServerIntegrityError
+ IntegrityError = (DjangoIntegrityError, SQLServerIntegrityError)
+except ImportError:
+ IntegrityError = DjangoIntegrityError
+
+try:
+ from south.db import mysql
+except ImportError:
+ mysql = None
+
+
+class TestOperations(unittest.TestCase):
+
+ """
+ Tests if the various DB abstraction calls work.
+ Can only test a limited amount due to DB differences.
+ """
+
+ def setUp(self):
+ db.debug = False
+ try:
+ import MySQLdb
+ except ImportError:
+ pass
+ else:
+ filterwarnings('ignore', category=MySQLdb.Warning)
+ db.clear_deferred_sql()
+ db.start_transaction()
+
+ def tearDown(self):
+ db.rollback_transaction()
+
+ def test_create(self):
+ """
+ Test creation of tables.
+ """
+ cursor = connection.cursor()
+ # It needs to take at least 2 args
+ self.assertRaises(TypeError, db.create_table)
+ self.assertRaises(TypeError, db.create_table, "test1")
+ # Empty tables (i.e. no columns) are not fine, so make at least 1
+ db.create_table("test1", [('email_confirmed', models.BooleanField(default=False))])
+ # And should exist
+ cursor.execute("SELECT * FROM test1")
+ # Make sure we can't do the same query on an empty table
+ try:
+ cursor.execute("SELECT * FROM nottheretest1")
+ except:
+ pass
+ else:
+ self.fail("Non-existent table could be selected!")
+
+ @skipUnless(db.raises_default_errors, 'This database does not raise errors on missing defaults.')
+ def test_create_default(self):
+ """
+ Test creation of tables, make sure defaults are not left in the database
+ """
+ db.create_table("test_create_default", [('a', models.IntegerField()),
+ ('b', models.IntegerField(default=17))])
+ cursor = connection.cursor()
+ self.assertRaises(IntegrityError, cursor.execute, "INSERT INTO test_create_default(a) VALUES (17)")
+
+ def test_delete(self):
+ """
+ Test deletion of tables.
+ """
+ cursor = connection.cursor()
+ db.create_table("test_deltable", [('email_confirmed', models.BooleanField(default=False))])
+ db.delete_table("test_deltable")
+ # Make sure it went
+ try:
+ cursor.execute("SELECT * FROM test_deltable")
+ except:
+ pass
+ else:
+ self.fail("Just-deleted table could be selected!")
+
+ def test_nonexistent_delete(self):
+ """
+ Test deletion of nonexistent tables.
+ """
+ try:
+ db.delete_table("test_nonexistdeltable")
+ except:
+ pass
+ else:
+ self.fail("Non-existent table could be deleted!")
+
+ def test_foreign_keys(self):
+ """
+ Tests foreign key creation, especially uppercase (see #61)
+ """
+ Test = db.mock_model(model_name='Test', db_table='test5a',
+ db_tablespace='', pk_field_name='ID',
+ pk_field_type=models.AutoField, pk_field_args=[])
+ db.create_table("test5a", [('ID', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True))])
+ db.create_table("test5b", [
+ ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
+ ('UNIQUE', models.ForeignKey(Test)),
+ ])
+ db.execute_deferred_sql()
+
+ @skipUnless(db.supports_foreign_keys, 'Foreign keys can only be deleted on '
+ 'engines that support them.')
+ def test_recursive_foreign_key_delete(self):
+ """
+ Test that recursive foreign keys are deleted correctly (see #1065)
+ """
+ Test = db.mock_model(model_name='Test', db_table='test_rec_fk_del',
+ db_tablespace='', pk_field_name='id',
+ pk_field_type=models.AutoField, pk_field_args=[])
+ db.create_table('test_rec_fk_del', [
+ ('id', models.AutoField(primary_key=True, auto_created=True)),
+ ('fk', models.ForeignKey(Test)),
+ ])
+ db.execute_deferred_sql()
+ db.delete_foreign_key('test_rec_fk_del', 'fk_id')
+
+ def test_rename(self):
+ """
+ Test column renaming
+ """
+ cursor = connection.cursor()
+ db.create_table("test_rn", [('spam', models.BooleanField(default=False))])
+ # Make sure we can select the column
+ cursor.execute("SELECT spam FROM test_rn")
+ # Rename it
+ db.rename_column("test_rn", "spam", "eggs")
+ cursor.execute("SELECT eggs FROM test_rn")
+ db.commit_transaction()
+ db.start_transaction()
+ try:
+ cursor.execute("SELECT spam FROM test_rn")
+ except:
+ pass
+ else:
+ self.fail("Just-renamed column could be selected!")
+ db.rollback_transaction()
+ db.delete_table("test_rn")
+ db.start_transaction()
+
+ def test_dry_rename(self):
+ """
+ Test column renaming while --dry-run is turned on (should do nothing)
+ See ticket #65
+ """
+ cursor = connection.cursor()
+ db.create_table("test_drn", [('spam', models.BooleanField(default=False))])
+ # Make sure we can select the column
+ cursor.execute("SELECT spam FROM test_drn")
+ # Rename it
+ db.dry_run = True
+ db.rename_column("test_drn", "spam", "eggs")
+ db.dry_run = False
+ cursor.execute("SELECT spam FROM test_drn")
+ db.commit_transaction()
+ db.start_transaction()
+ try:
+ cursor.execute("SELECT eggs FROM test_drn")
+ except:
+ pass
+ else:
+ self.fail("Dry-renamed new column could be selected!")
+ db.rollback_transaction()
+ db.delete_table("test_drn")
+ db.start_transaction()
+
+ def test_table_rename(self):
+ """
+ Test column renaming
+ """
+ cursor = connection.cursor()
+ db.create_table("testtr", [('spam', models.BooleanField(default=False))])
+ # Make sure we can select the column
+ cursor.execute("SELECT spam FROM testtr")
+ # Rename it
+ db.rename_table("testtr", "testtr2")
+ cursor.execute("SELECT spam FROM testtr2")
+ db.commit_transaction()
+ db.start_transaction()
+ try:
+ cursor.execute("SELECT spam FROM testtr")
+ except:
+ pass
+ else:
+ self.fail("Just-renamed column could be selected!")
+ db.rollback_transaction()
+ db.delete_table("testtr2")
+ db.start_transaction()
+
+ def test_percents_in_defaults(self):
+ """
+ Test that % in a default gets escaped to %%.
+ """
+ try:
+ db.create_table("testpind", [('cf', models.CharField(max_length=255, default="It should be 2%!"))])
+ except IndexError:
+ self.fail("% was not properly escaped in column SQL.")
+ db.delete_table("testpind")
+
+ def test_index(self):
+ """
+ Test the index operations
+ """
+ db.create_table("test3", [
+ ('SELECT', models.BooleanField(default=False)),
+ ('eggs', models.IntegerField(unique=True)),
+ ])
+ db.execute_deferred_sql()
+ # Add an index on that column
+ db.create_index("test3", ["SELECT"])
+ # Add another index on two columns
+ db.create_index("test3", ["SELECT", "eggs"])
+ # Delete them both
+ db.delete_index("test3", ["SELECT"])
+ db.delete_index("test3", ["SELECT", "eggs"])
+ # Delete the unique index/constraint
+ if db.backend_name != "sqlite3":
+ db.delete_unique("test3", ["eggs"])
+ db.delete_table("test3")
+
+ def test_primary_key(self):
+ """
+ Test the primary key operations
+ """
+
+ db.create_table("test_pk", [
+ ('id', models.IntegerField(primary_key=True)),
+ ('new_pkey', models.IntegerField()),
+ ('eggs', models.IntegerField(unique=True)),
+ ])
+ db.execute_deferred_sql()
+ # Remove the default primary key, and make eggs it
+ db.delete_primary_key("test_pk")
+ db.create_primary_key("test_pk", "new_pkey")
+ # Try inserting a now-valid row pair
+ db.execute("INSERT INTO test_pk (id, new_pkey, eggs) VALUES (1, 2, 3)")
+ db.execute("INSERT INTO test_pk (id, new_pkey, eggs) VALUES (1, 3, 4)")
+ db.delete_table("test_pk")
+
+ def test_primary_key_implicit(self):
+ """
+ Tests that changing primary key implicitly fails.
+ """
+ db.create_table("test_pki", [
+ ('id', models.IntegerField(primary_key=True)),
+ ('new_pkey', models.IntegerField()),
+ ('eggs', models.IntegerField(unique=True)),
+ ])
+ db.execute_deferred_sql()
+ # Fiddle with alter_column to attempt to make it remove the primary key
+ db.alter_column("test_pki", "id", models.IntegerField())
+ db.alter_column("test_pki", "new_pkey", models.IntegerField(primary_key=True))
+ # Try inserting a should-be-valid row pair
+ db.execute("INSERT INTO test_pki (id, new_pkey, eggs) VALUES (1, 2, 3)")
+ db.execute("INSERT INTO test_pki (id, new_pkey, eggs) VALUES (2, 2, 4)")
+ db.delete_table("test_pki")
+
+ def test_add_columns(self):
+ """
+ Test adding columns
+ """
+ db.create_table("test_addc", [
+ ('spam', models.BooleanField(default=False)),
+ ('eggs', models.IntegerField()),
+ ])
+ # Add a column
+ db.add_column("test_addc", "add1", models.IntegerField(default=3))
+ User = db.mock_model(model_name='User', db_table='auth_user', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={})
+ # insert some data so we can test the default value of the added fkey
+ db.execute("INSERT INTO test_addc (spam, eggs, add1) VALUES (%s, 1, 2)", [False])
+ db.add_column("test_addc", "user", models.ForeignKey(User, null=True))
+ db.execute_deferred_sql()
+ # try selecting from the user_id column to make sure it was actually created
+ val = db.execute("SELECT user_id FROM test_addc")[0][0]
+ self.assertEquals(val, None)
+ db.delete_column("test_addc", "add1")
+ # make sure adding an indexed field works
+ db.add_column("test_addc", "add2", models.CharField(max_length=15, db_index=True, default='pi'))
+ db.execute_deferred_sql()
+ db.delete_table("test_addc")
+
+ def test_delete_columns(self):
+ """
+ Test deleting columns
+ """
+ db.create_table("test_delc", [
+ ('spam', models.BooleanField(default=False)),
+ ('eggs', models.IntegerField(db_index=True, unique=True)),
+ ])
+ db.delete_column("test_delc", "eggs")
+
+ def test_add_nullbool_column(self):
+ """
+ Test adding NullBoolean columns
+ """
+ db.create_table("test_addnbc", [
+ ('spam', models.BooleanField(default=False)),
+ ('eggs', models.IntegerField()),
+ ])
+ # Add a column
+ db.add_column("test_addnbc", "add1", models.NullBooleanField())
+ # Add a column with a default
+ db.add_column("test_addnbc", "add2", models.NullBooleanField(default=True))
+ # insert some data so we can test the default values of the added column
+ db.execute("INSERT INTO test_addnbc (spam, eggs) VALUES (%s, 1)", [False])
+ # try selecting from the new columns to make sure they were properly created
+ false, null1, null2 = db.execute("SELECT spam,add1,add2 FROM test_addnbc")[0][0:3]
+ self.assertIsNone(null1, "Null boolean field with no value inserted returns non-null")
+ self.assertIsNone(null2, "Null boolean field (added with default) with no value inserted returns non-null")
+ self.assertEquals(false, False)
+ db.delete_table("test_addnbc")
+
+ def test_alter_columns(self):
+ """
+ Test altering columns
+ """
+ db.create_table("test_alterc", [
+ ('spam', models.BooleanField(default=False)),
+ ('eggs', models.IntegerField()),
+ ])
+ db.execute_deferred_sql()
+ # Change eggs to be a FloatField
+ db.alter_column("test_alterc", "eggs", models.FloatField())
+ db.execute_deferred_sql()
+ db.delete_table("test_alterc")
+ db.execute_deferred_sql()
+
+ def test_alter_char_default(self):
+ """
+ Test altering column defaults with char fields
+ """
+ db.create_table("test_altercd", [
+ ('spam', models.CharField(max_length=30)),
+ ('eggs', models.IntegerField()),
+ ])
+ # Change spam default
+ db.alter_column("test_altercd", "spam", models.CharField(max_length=30, default="loof", null=True))
+ # Assert the default is not in the database
+ db.execute("INSERT INTO test_altercd (eggs) values (12)")
+ null = db.execute("SELECT spam FROM test_altercd")[0][0]
+ self.assertFalse(null, "Default for char field was installed into database")
+
+ # Change again to a column with default and not null
+ db.alter_column("test_altercd", "spam", models.CharField(max_length=30, default="loof", null=False))
+ # Assert the default is not in the database
+ if 'oracle' in db.backend_name:
+ # Oracle special treatment -- nulls are always allowed in char columns, so
+ # inserting doesn't raise an integrity error; so we check again as above
+ db.execute("DELETE FROM test_altercd")
+ db.execute("INSERT INTO test_altercd (eggs) values (12)")
+ null = db.execute("SELECT spam FROM test_altercd")[0][0]
+ self.assertFalse(null, "Default for char field was installed into database")
+ else:
+ # For other backends, insert should now just fail
+ self.assertRaises(IntegrityError,
+ db.execute, "INSERT INTO test_altercd (eggs) values (12)")
+
+ @skipIf('oracle' in db.backend_name, "Oracle does not differentiate empty trings from null")
+ def test_default_empty_string(self):
+ """
+ Test altering column defaults with char fields
+ """
+ db.create_table("test_cd_empty", [
+ ('spam', models.CharField(max_length=30, default='')),
+ ('eggs', models.CharField(max_length=30)),
+ ])
+ # Create a record
+ db.execute("INSERT INTO test_cd_empty (spam, eggs) values ('1','2')")
+ # Add a column
+ db.add_column("test_cd_empty", "ham", models.CharField(max_length=30, default=''))
+
+ empty = db.execute("SELECT ham FROM test_cd_empty")[0][0]
+ self.assertEquals(empty, "", "Empty Default for char field isn't empty string")
+
+ @skipUnless('oracle' in db.backend_name, "Oracle does not differentiate empty trings from null")
+ def test_oracle_strings_null(self):
+ """
+ Test that under Oracle, CherFields are created as null even when specified not-null,
+ because otherwise they would not be able to hold empty strings (which Oracle equates
+ with nulls).
+ Verify fix of #1269.
+ """
+ db.create_table("test_ora_char_nulls", [
+ ('spam', models.CharField(max_length=30, null=True)),
+ ('eggs', models.CharField(max_length=30)),
+ ])
+ db.add_column("test_ora_char_nulls", "ham", models.CharField(max_length=30))
+ db.alter_column("test_ora_char_nulls", "spam", models.CharField(max_length=30, null=False))
+ # So, by the look of it, we should now have three not-null columns
+ db.execute("INSERT INTO test_ora_char_nulls VALUES (NULL, NULL, NULL)")
+
+
+ def test_mysql_defaults(self):
+ """
+ Test MySQL default handling for BLOB and TEXT.
+ """
+ db.create_table("test_altermyd", [
+ ('spam', models.BooleanField(default=False)),
+ ('eggs', models.TextField()),
+ ])
+ # Change eggs to be a FloatField
+ db.alter_column("test_altermyd", "eggs", models.TextField(null=True))
+ db.delete_table("test_altermyd")
+
+ def test_alter_column_postgres_multiword(self):
+ """
+ Tests altering columns with multiple words in Postgres types (issue #125)
+ e.g. 'datetime with time zone', look at django/db/backends/postgresql/creation.py
+ """
+ db.create_table("test_multiword", [
+ ('col_datetime', models.DateTimeField(null=True)),
+ ('col_integer', models.PositiveIntegerField(null=True)),
+ ('col_smallint', models.PositiveSmallIntegerField(null=True)),
+ ('col_float', models.FloatField(null=True)),
+ ])
+
+ # test if 'double precision' is preserved
+ db.alter_column('test_multiword', 'col_float', models.FloatField('float', null=True))
+
+ # test if 'CHECK ("%(column)s" >= 0)' is stripped
+ db.alter_column('test_multiword', 'col_integer', models.PositiveIntegerField(null=True))
+ db.alter_column('test_multiword', 'col_smallint', models.PositiveSmallIntegerField(null=True))
+
+ # test if 'with timezone' is preserved
+ if db.backend_name == "postgres":
+ db.execute("INSERT INTO test_multiword (col_datetime) VALUES ('2009-04-24 14:20:55+02')")
+ db.alter_column('test_multiword', 'col_datetime', models.DateTimeField(auto_now=True))
+ assert db.execute("SELECT col_datetime = '2009-04-24 14:20:55+02' FROM test_multiword")[0][0]
+
+ db.delete_table("test_multiword")
+
+ @skipUnless(db.has_check_constraints, 'Only applies to databases that '
+ 'support CHECK constraints.')
+ def test_alter_constraints(self):
+ """
+ Tests that going from a PostiveIntegerField to an IntegerField drops
+ the constraint on the database.
+ """
+ # Make the test table
+ db.create_table("test_alterc", [
+ ('num', models.PositiveIntegerField()),
+ ])
+ db.execute_deferred_sql()
+ # Add in some test values
+ db.execute("INSERT INTO test_alterc (num) VALUES (1)")
+ db.execute("INSERT INTO test_alterc (num) VALUES (2)")
+ # Ensure that adding a negative number is bad
+ db.commit_transaction()
+ db.start_transaction()
+ try:
+ db.execute("INSERT INTO test_alterc (num) VALUES (-3)")
+ except:
+ db.rollback_transaction()
+ else:
+ self.fail("Could insert a negative integer into a PositiveIntegerField.")
+ # Alter it to a normal IntegerField
+ db.alter_column("test_alterc", "num", models.IntegerField())
+ db.execute_deferred_sql()
+ # It should now work
+ db.execute("INSERT INTO test_alterc (num) VALUES (-3)")
+ db.delete_table("test_alterc")
+ # We need to match up for tearDown
+ db.start_transaction()
+
+ @skipIf(db.backend_name == "sqlite3", "SQLite backend doesn't support this "
+ "yet.")
+ def test_unique(self):
+ """
+ Tests creating/deleting unique constraints.
+ """
+ db.create_table("test_unique2", [
+ ('id', models.AutoField(primary_key=True)),
+ ])
+ db.create_table("test_unique", [
+ ('spam', models.BooleanField(default=False)),
+ ('eggs', models.IntegerField()),
+ ('ham', models.ForeignKey(db.mock_model('Unique2', 'test_unique2'))),
+ ])
+ db.execute_deferred_sql()
+ # Add a constraint
+ db.create_unique("test_unique", ["spam"])
+ db.execute_deferred_sql()
+ # Shouldn't do anything during dry-run
+ db.dry_run = True
+ db.delete_unique("test_unique", ["spam"])
+ db.dry_run = False
+ db.delete_unique("test_unique", ["spam"])
+ db.create_unique("test_unique", ["spam"])
+ # Special preparations for Sql Server
+ if db.backend_name == "pyodbc":
+ db.execute("SET IDENTITY_INSERT test_unique2 ON;")
+ db.execute("INSERT INTO test_unique2 (id) VALUES (1)")
+ db.execute("INSERT INTO test_unique2 (id) VALUES (2)")
+ db.commit_transaction()
+ db.start_transaction()
+
+
+ # Test it works
+ TRUE = (True,)
+ FALSE = (False,)
+ db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 0, 1)", TRUE)
+ db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 1, 2)", FALSE)
+ try:
+ db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 2, 1)", FALSE)
+ except:
+ db.rollback_transaction()
+ else:
+ self.fail("Could insert non-unique item.")
+
+ # Drop that, add one only on eggs
+ db.delete_unique("test_unique", ["spam"])
+ db.execute("DELETE FROM test_unique")
+ db.create_unique("test_unique", ["eggs"])
+ db.start_transaction()
+
+ # Test similarly
+ db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 0, 1)", TRUE)
+ db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 1, 2)", FALSE)
+ try:
+ db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 1, 1)", TRUE)
+ except:
+ db.rollback_transaction()
+ else:
+ self.fail("Could insert non-unique item.")
+
+ # Drop those, test combined constraints
+ db.delete_unique("test_unique", ["eggs"])
+ db.execute("DELETE FROM test_unique")
+ db.create_unique("test_unique", ["spam", "eggs", "ham_id"])
+ db.start_transaction()
+ # Test similarly
+ db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 0, 1)", TRUE)
+ db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 1, 1)", FALSE)
+ try:
+ db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 0, 1)", TRUE)
+ except:
+ db.rollback_transaction()
+ else:
+ self.fail("Could insert non-unique pair.")
+ db.delete_unique("test_unique", ["spam", "eggs", "ham_id"])
+ db.start_transaction()
+
+ def test_alter_unique(self):
+ """
+ Tests that unique constraints are not affected when
+ altering columns (that's handled by create_/delete_unique)
+ """
+ db.create_table("test_alter_unique", [
+ ('spam', models.IntegerField()),
+ ('eggs', models.IntegerField(unique=True)),
+ ])
+ db.execute_deferred_sql()
+
+ # Make sure the unique constraint is created
+ db.execute('INSERT INTO test_alter_unique (spam, eggs) VALUES (0, 42)')
+ db.commit_transaction()
+ db.start_transaction()
+ try:
+ db.execute("INSERT INTO test_alter_unique (spam, eggs) VALUES (1, 42)")
+ except:
+ pass
+ else:
+ self.fail("Could insert the same integer twice into a unique field.")
+ db.rollback_transaction()
+
+ # Alter without unique=True (should not affect anything)
+ db.alter_column("test_alter_unique", "eggs", models.IntegerField())
+
+ # Insertion should still fail
+ db.start_transaction()
+ try:
+ db.execute("INSERT INTO test_alter_unique (spam, eggs) VALUES (1, 42)")
+ except:
+ pass
+ else:
+ self.fail("Could insert the same integer twice into a unique field after alter_column with unique=False.")
+ db.rollback_transaction()
+
+ # Delete the unique index/constraint
+ if db.backend_name != "sqlite3":
+ db.delete_unique("test_alter_unique", ["eggs"])
+ db.delete_table("test_alter_unique")
+ db.start_transaction()
+
+ # Test multi-field constraint
+ db.create_table("test_alter_unique2", [
+ ('spam', models.IntegerField()),
+ ('eggs', models.IntegerField()),
+ ])
+ db.create_unique('test_alter_unique2', ('spam', 'eggs'))
+ db.execute_deferred_sql()
+ db.execute('INSERT INTO test_alter_unique2 (spam, eggs) VALUES (0, 42)')
+ db.commit_transaction()
+ # Verify that constraint works
+ db.start_transaction()
+ try:
+ db.execute("INSERT INTO test_alter_unique2 (spam, eggs) VALUES (1, 42)")
+ except:
+ self.fail("Looks like multi-field unique constraint applied to only one field.")
+ db.rollback_transaction()
+ db.start_transaction()
+ try:
+ db.execute("INSERT INTO test_alter_unique2 (spam, eggs) VALUES (0, 43)")
+ except:
+ self.fail("Looks like multi-field unique constraint applied to only one field.")
+ db.rollback_transaction()
+ db.start_transaction()
+ try:
+ db.execute("INSERT INTO test_alter_unique2 (spam, eggs) VALUES (0, 42)")
+ except:
+ pass
+ else:
+ self.fail("Could insert the same pair twice into unique-together fields.")
+ db.rollback_transaction()
+ # Altering one column should not drop or modify multi-column constraint
+ db.alter_column("test_alter_unique2", "eggs", models.PositiveIntegerField())
+ db.start_transaction()
+ try:
+ db.execute("INSERT INTO test_alter_unique2 (spam, eggs) VALUES (1, 42)")
+ except:
+ self.fail("Altering one column broken multi-column unique constraint.")
+ db.rollback_transaction()
+ db.start_transaction()
+ try:
+ db.execute("INSERT INTO test_alter_unique2 (spam, eggs) VALUES (0, 43)")
+ except:
+ self.fail("Altering one column broken multi-column unique constraint.")
+ db.rollback_transaction()
+ db.start_transaction()
+ try:
+ db.execute("INSERT INTO test_alter_unique2 (spam, eggs) VALUES (0, 42)")
+ except:
+ pass
+ else:
+ self.fail("Could insert the same pair twice into unique-together fields after alter_column with unique=False.")
+ db.rollback_transaction()
+ db.delete_table("test_alter_unique2")
+ db.start_transaction()
+
+ def test_capitalised_constraints(self):
+ """
+ Under PostgreSQL at least, capitalised constraints must be quoted.
+ """
+ db.create_table("test_capconst", [
+ ('SOMECOL', models.PositiveIntegerField(primary_key=True)),
+ ])
+ # Alter it so it's not got the check constraint
+ db.alter_column("test_capconst", "SOMECOL", models.IntegerField())
+
+ def test_text_default(self):
+ """
+ MySQL cannot have blank defaults on TEXT columns.
+ """
+ db.create_table("test_textdef", [
+ ('textcol', models.TextField(blank=True)),
+ ])
+
+ def test_text_to_char(self):
+ """
+ On Oracle, you can't simply ALTER TABLE MODIFY a textfield to a charfield
+ """
+ value = "kawabanga"
+ db.create_table("test_text_to_char", [
+ ('textcol', models.TextField()),
+ ])
+ db.execute_deferred_sql()
+ db.execute("INSERT INTO test_text_to_char VALUES (%s)", [value])
+ db.alter_column("test_text_to_char", "textcol", models.CharField(max_length=100))
+ db.execute_deferred_sql()
+ after = db.execute("select * from test_text_to_char")[0][0]
+ self.assertEqual(value, after, "Change from text to char altered value [ %r != %r ]" % (value, after))
+
+ def test_char_to_text(self):
+ """
+ On Oracle, you can't simply ALTER TABLE MODIFY a charfield to a textfield either
+ """
+ value = "agnabawak"
+ db.create_table("test_char_to_text", [
+ ('textcol', models.CharField(max_length=100)),
+ ])
+ db.execute_deferred_sql()
+ db.execute("INSERT INTO test_char_to_text VALUES (%s)", [value])
+ db.alter_column("test_char_to_text", "textcol", models.TextField())
+ db.execute_deferred_sql()
+ after = db.execute("select * from test_char_to_text")[0][0]
+ after = text_type(after) # Oracle text fields return a sort of lazy string -- force evaluation
+ self.assertEqual(value, after, "Change from char to text altered value [ %r != %r ]" % (value, after))
+
+ @skipUnless(db.raises_default_errors, 'This database does not raise errors on missing defaults.')
+ def test_datetime_default(self):
+ """
+ Test that defaults are correctly not created for datetime columns
+ """
+ end_of_world = datetime.datetime(2012, 12, 21, 0, 0, 1)
+
+ try:
+ from django.utils import timezone
+ except ImportError:
+ pass
+ else:
+ from django.conf import settings
+ if getattr(settings, 'USE_TZ', False):
+ end_of_world = end_of_world.replace(tzinfo=timezone.utc)
+
+ db.create_table("test_datetime_def", [
+ ('col0', models.IntegerField(null=True)),
+ ('col1', models.DateTimeField(default=end_of_world)),
+ ('col2', models.DateTimeField(null=True)),
+ ])
+ db.execute_deferred_sql()
+ # insert a row
+ db.execute("INSERT INTO test_datetime_def (col0, col1, col2) values (null,%s,null)", [end_of_world])
+ db.alter_column("test_datetime_def", "col2", models.DateTimeField(default=end_of_world))
+ db.add_column("test_datetime_def", "col3", models.DateTimeField(default=end_of_world))
+ db.execute_deferred_sql()
+ db.commit_transaction()
+ # In the single existing row, we now expect col1=col2=col3=end_of_world...
+ db.start_transaction()
+ ends = db.execute("select col1,col2,col3 from test_datetime_def")[0]
+ self.failUnlessEqual(len(ends), 3)
+ for e in ends:
+ self.failUnlessEqual(e, end_of_world)
+ db.commit_transaction()
+ # ...but there should not be a default in the database for col1 or col3
+ for cols in ["col1,col2", "col2,col3"]:
+ db.start_transaction()
+ statement = "insert into test_datetime_def (col0,%s) values (null,%%s,%%s)" % cols
+ self.assertRaises(
+ IntegrityError,
+ db.execute, statement, [end_of_world, end_of_world]
+ )
+ db.rollback_transaction()
+
+ db.start_transaction() # To preserve the sanity and semantics of this test class
+
+ def test_add_unique_fk(self):
+ """
+ Test adding a ForeignKey with unique=True or a OneToOneField
+ """
+ db.create_table("test_add_unique_fk", [
+ ('spam', models.BooleanField(default=False))
+ ])
+
+ db.add_column("test_add_unique_fk", "mock1", models.ForeignKey(db.mock_model('User', 'auth_user'), null=True, unique=True))
+ db.add_column("test_add_unique_fk", "mock2", models.OneToOneField(db.mock_model('User', 'auth_user'), null=True))
+ db.execute_deferred_sql()
+
+ db.delete_table("test_add_unique_fk")
+
+ @skipUnless(db.has_check_constraints, 'Only applies to databases that '
+ 'support CHECK constraints.')
+ def test_column_constraint(self):
+ """
+ Tests that the value constraint of PositiveIntegerField is enforced on
+ the database level.
+ """
+ db.create_table("test_column_constraint", [
+ ('spam', models.PositiveIntegerField()),
+ ])
+ db.execute_deferred_sql()
+
+ # Make sure we can't insert negative values
+ db.commit_transaction()
+ db.start_transaction()
+ try:
+ db.execute("INSERT INTO test_column_constraint VALUES (-42)")
+ except:
+ pass
+ else:
+ self.fail("Could insert a negative value into a PositiveIntegerField.")
+ db.rollback_transaction()
+
+ # remove constraint
+ db.alter_column("test_column_constraint", "spam", models.IntegerField())
+ db.execute_deferred_sql()
+ # make sure the insertion works now
+ db.execute('INSERT INTO test_column_constraint VALUES (-42)')
+ db.execute('DELETE FROM test_column_constraint')
+
+ # add it back again
+ db.alter_column("test_column_constraint", "spam", models.PositiveIntegerField())
+ db.execute_deferred_sql()
+ # it should fail again
+ db.start_transaction()
+ try:
+ db.execute("INSERT INTO test_column_constraint VALUES (-42)")
+ except:
+ pass
+ else:
+ self.fail("Could insert a negative value after changing an IntegerField to a PositiveIntegerField.")
+ db.rollback_transaction()
+
+ db.delete_table("test_column_constraint")
+ db.start_transaction()
+
+ def test_sql_defaults(self):
+ """
+ Test that sql default value is correct for non-string field types.
+ Datetimes are handled in test_datetime_default.
+ """
+
+ class CustomField(with_metaclass(models.SubfieldBase, models.CharField)):
+ description = 'CustomField'
+ def get_default(self):
+ if self.has_default():
+ if callable(self.default):
+ return self.default()
+ return self.default
+ return super(CustomField, self).get_default()
+ def get_prep_value(self, value):
+ if not value:
+ return value
+ return ','.join(map(str, value))
+ def to_python(self, value):
+ if not value or isinstance(value, list):
+ return value
+ return list(map(int, value.split(',')))
+
+ false_value = db.has_booleans and 'False' or '0'
+ defaults = (
+ (models.CharField(default='sukasuka'), 'DEFAULT \'sukasuka'),
+ (models.BooleanField(default=False), 'DEFAULT %s' % false_value),
+ (models.IntegerField(default=42), 'DEFAULT 42'),
+ (CustomField(default=[2012, 2018, 2021, 2036]), 'DEFAULT \'2012,2018,2021,2036')
+ )
+ for field, sql_test_str in defaults:
+ sql = db.column_sql('fish', 'YAAAAAAZ', field)
+ if sql_test_str not in sql:
+ self.fail("default sql value was not properly generated for field %r.\nSql was %s" % (field, sql))
+
+ def test_make_added_foreign_key_not_null(self):
+ # Table for FK to target
+ User = db.mock_model(model_name='User', db_table='auth_user', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={})
+ # Table with no foreign key
+ db.create_table("test_fk", [
+ ('eggs', models.IntegerField()),
+ ])
+ db.execute_deferred_sql()
+
+ # Add foreign key
+ db.add_column("test_fk", 'foreik', models.ForeignKey(User, null=True))
+ db.execute_deferred_sql()
+
+ # Make the FK not null
+ db.alter_column("test_fk", "foreik_id", models.ForeignKey(User))
+ db.execute_deferred_sql()
+
+ def test_make_foreign_key_null(self):
+ # Table for FK to target
+ User = db.mock_model(model_name='User', db_table='auth_user', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={})
+ # Table with no foreign key
+ db.create_table("test_make_fk_null", [
+ ('eggs', models.IntegerField()),
+ ('foreik', models.ForeignKey(User))
+ ])
+ db.execute_deferred_sql()
+
+ # Make the FK null
+ db.alter_column("test_make_fk_null", "foreik_id", models.ForeignKey(User, null=True))
+ db.execute_deferred_sql()
+
+ def test_change_foreign_key_target(self):
+ # Tables for FK to target
+ User = db.mock_model(model_name='User', db_table='auth_user', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={})
+ db.create_table("test_fk_changed_target", [
+ ('eggs', models.IntegerField(primary_key=True)),
+ ])
+ Egg = db.mock_model(model_name='Egg', db_table='test_fk_changed_target', db_tablespace='', pk_field_name='eggs', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={})
+ # Table with a foreign key to the wrong table
+ db.create_table("test_fk_changing", [
+ ('egg', models.ForeignKey(User, null=True)),
+ ])
+ db.execute_deferred_sql()
+
+ # Change foreign key pointing
+ db.alter_column("test_fk_changing", "egg_id", models.ForeignKey(Egg, null=True))
+ db.execute_deferred_sql()
+
+ # Test that it is pointing at the right table now
+ try:
+ non_user_id = db.execute("SELECT MAX(id) FROM auth_user")[0][0] + 1
+ except (TypeError, IndexError):
+ # Got a "None" or no records, treat as 0
+ non_user_id = 17
+ db.execute("INSERT INTO test_fk_changed_target (eggs) VALUES (%s)", [non_user_id])
+ db.execute("INSERT INTO test_fk_changing (egg_id) VALUES (%s)", [non_user_id])
+ db.commit_transaction()
+ db.start_transaction() # The test framework expects tests to end in transaction
+
+ def test_alter_double_indexed_column(self):
+ # Table for FK to target
+ User = db.mock_model(model_name='User', db_table='auth_user', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={})
+ # Table with no foreign key
+ db.create_table("test_2indexed", [
+ ('eggs', models.IntegerField()),
+ ('foreik', models.ForeignKey(User))
+ ])
+ db.create_unique("test_2indexed", ["eggs", "foreik_id"])
+ db.execute_deferred_sql()
+
+ # Make the FK null
+ db.alter_column("test_2indexed", "foreik_id", models.ForeignKey(User, null=True))
+ db.execute_deferred_sql()
+
+class TestCacheGeneric(unittest.TestCase):
+ base_ops_cls = generic.DatabaseOperations
+ def setUp(self):
+ class CacheOps(self.base_ops_cls):
+ def __init__(self):
+ self._constraint_cache = {}
+ self.cache_filled = 0
+ self.settings = {'NAME': 'db'}
+
+ def _fill_constraint_cache(self, db, table):
+ self.cache_filled += 1
+ self._constraint_cache.setdefault(db, {})
+ self._constraint_cache[db].setdefault(table, {})
+
+ @generic.invalidate_table_constraints
+ def clear_con(self, table):
+ pass
+
+ @generic.copy_column_constraints
+ def cp_column(self, table, column_old, column_new):
+ pass
+
+ @generic.delete_column_constraints
+ def rm_column(self, table, column):
+ pass
+
+ @generic.copy_column_constraints
+ @generic.delete_column_constraints
+ def mv_column(self, table, column_old, column_new):
+ pass
+
+ def _get_setting(self, attr):
+ return self.settings[attr]
+ self.CacheOps = CacheOps
+
+ def test_cache(self):
+ ops = self.CacheOps()
+ self.assertEqual(0, ops.cache_filled)
+ self.assertFalse(ops.lookup_constraint('db', 'table'))
+ self.assertEqual(1, ops.cache_filled)
+ self.assertFalse(ops.lookup_constraint('db', 'table'))
+ self.assertEqual(1, ops.cache_filled)
+ ops.clear_con('table')
+ self.assertEqual(1, ops.cache_filled)
+ self.assertFalse(ops.lookup_constraint('db', 'table'))
+ self.assertEqual(2, ops.cache_filled)
+ self.assertFalse(ops.lookup_constraint('db', 'table', 'column'))
+ self.assertEqual(2, ops.cache_filled)
+
+ cache = ops._constraint_cache
+ cache['db']['table']['column'] = 'constraint'
+ self.assertEqual('constraint', ops.lookup_constraint('db', 'table', 'column'))
+ self.assertEqual([('column', 'constraint')], ops.lookup_constraint('db', 'table'))
+ self.assertEqual(2, ops.cache_filled)
+
+ # invalidate_table_constraints
+ ops.clear_con('new_table')
+ self.assertEqual('constraint', ops.lookup_constraint('db', 'table', 'column'))
+ self.assertEqual(2, ops.cache_filled)
+
+ self.assertFalse(ops.lookup_constraint('db', 'new_table'))
+ self.assertEqual(3, ops.cache_filled)
+
+ # delete_column_constraints
+ cache['db']['table']['column'] = 'constraint'
+ self.assertEqual('constraint', ops.lookup_constraint('db', 'table', 'column'))
+ ops.rm_column('table', 'column')
+ self.assertEqual([], ops.lookup_constraint('db', 'table', 'column'))
+ self.assertEqual([], ops.lookup_constraint('db', 'table', 'noexist_column'))
+
+ # copy_column_constraints
+ cache['db']['table']['column'] = 'constraint'
+ self.assertEqual('constraint', ops.lookup_constraint('db', 'table', 'column'))
+ ops.cp_column('table', 'column', 'column_new')
+ self.assertEqual('constraint', ops.lookup_constraint('db', 'table', 'column_new'))
+ self.assertEqual('constraint', ops.lookup_constraint('db', 'table', 'column'))
+
+ # copy + delete
+ cache['db']['table']['column'] = 'constraint'
+ self.assertEqual('constraint', ops.lookup_constraint('db', 'table', 'column'))
+ ops.mv_column('table', 'column', 'column_new')
+ self.assertEqual('constraint', ops.lookup_constraint('db', 'table', 'column_new'))
+ self.assertEqual([], ops.lookup_constraint('db', 'table', 'column'))
+
+ def test_valid(self):
+ ops = self.CacheOps()
+ # none of these should vivify a table into a valid state
+ self.assertFalse(ops._is_valid_cache('db', 'table'))
+ self.assertFalse(ops._is_valid_cache('db', 'table'))
+ ops.clear_con('table')
+ self.assertFalse(ops._is_valid_cache('db', 'table'))
+ ops.rm_column('table', 'column')
+ self.assertFalse(ops._is_valid_cache('db', 'table'))
+
+ # these should change the cache state
+ ops.lookup_constraint('db', 'table')
+ self.assertTrue(ops._is_valid_cache('db', 'table'))
+ ops.lookup_constraint('db', 'table', 'column')
+ self.assertTrue(ops._is_valid_cache('db', 'table'))
+ ops.clear_con('table')
+ self.assertFalse(ops._is_valid_cache('db', 'table'))
+
+ def test_valid_implementation(self):
+ # generic fills the cache on a per-table basis
+ ops = self.CacheOps()
+ self.assertFalse(ops._is_valid_cache('db', 'table'))
+ self.assertFalse(ops._is_valid_cache('db', 'other_table'))
+ ops.lookup_constraint('db', 'table')
+ self.assertTrue(ops._is_valid_cache('db', 'table'))
+ self.assertFalse(ops._is_valid_cache('db', 'other_table'))
+ ops.lookup_constraint('db', 'other_table')
+ self.assertTrue(ops._is_valid_cache('db', 'table'))
+ self.assertTrue(ops._is_valid_cache('db', 'other_table'))
+ ops.clear_con('table')
+ self.assertFalse(ops._is_valid_cache('db', 'table'))
+ self.assertTrue(ops._is_valid_cache('db', 'other_table'))
+
+if mysql:
+ class TestCacheMysql(TestCacheGeneric):
+ base_ops_cls = mysql.DatabaseOperations
+
+ def test_valid_implementation(self):
+ # mysql fills the cache on a per-db basis
+ ops = self.CacheOps()
+ self.assertFalse(ops._is_valid_cache('db', 'table'))
+ self.assertFalse(ops._is_valid_cache('db', 'other_table'))
+ ops.lookup_constraint('db', 'table')
+ self.assertTrue(ops._is_valid_cache('db', 'table'))
+ self.assertTrue(ops._is_valid_cache('db', 'other_table'))
+ ops.lookup_constraint('db', 'other_table')
+ self.assertTrue(ops._is_valid_cache('db', 'table'))
+ self.assertTrue(ops._is_valid_cache('db', 'other_table'))
+ ops.clear_con('table')
+ self.assertFalse(ops._is_valid_cache('db', 'table'))
+ self.assertTrue(ops._is_valid_cache('db', 'other_table'))
diff --git a/lib/python2.7/site-packages/south/tests/db_firebird.py b/lib/python2.7/site-packages/south/tests/db_firebird.py
new file mode 100644
index 0000000..2b6bd53
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/db_firebird.py
@@ -0,0 +1,39 @@
+from django.db import models
+
+from south.db import db
+from south.tests import unittest, skipUnless
+
+
+class FirebirdTests(unittest.TestCase):
+
+ """
+ Tests firebird related issues
+ """
+
+ def setUp(self):
+ print('=' * 80)
+ print('Begin Firebird test')
+
+ def tearDown(self):
+ print('End Firebird test')
+ print('=' * 80)
+
+ @skipUnless(db.backend_name == "firebird", "Firebird-only test")
+ def test_firebird_double_index_creation_1317(self):
+ """
+ Tests foreign key creation, especially uppercase (see #61)
+ """
+ Test = db.mock_model(model_name='Test',
+ db_table='test5a',
+ db_tablespace='',
+ pk_field_name='ID',
+ pk_field_type=models.AutoField,
+ pk_field_args=[]
+ )
+ db.create_table("test5a", [('ID', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True))])
+ db.create_table("test5b", [
+ ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
+ ('UNIQUE', models.ForeignKey(Test)),
+ ])
+ db.execute_deferred_sql()
+
diff --git a/lib/python2.7/site-packages/south/tests/db_mysql.py b/lib/python2.7/site-packages/south/tests/db_mysql.py
new file mode 100644
index 0000000..e83596c
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/db_mysql.py
@@ -0,0 +1,164 @@
+# Additional MySQL-specific tests
+# Written by: F. Gabriel Gosselin <gabrielNOSPAM@evidens.ca>
+# Based on tests by: aarranz
+from south.tests import unittest, skipUnless
+
+
+from south.db import db, generic, mysql
+from django.db import connection, models
+
+from south.utils.py3 import with_metaclass
+
+
+# A class decoration may be used in lieu of this when Python 2.5 is the
+# minimum.
+class TestMySQLOperationsMeta(type):
+
+ def __new__(mcs, name, bases, dict_):
+ decorator = skipUnless(db.backend_name == "mysql", 'MySQL-specific tests')
+
+ for key, method in dict_.items():
+ if key.startswith('test'):
+ dict_[key] = decorator(method)
+
+ return type.__new__(mcs, name, bases, dict_)
+
+class TestMySQLOperations(with_metaclass(TestMySQLOperationsMeta, unittest.TestCase)):
+ """MySQL-specific tests"""
+
+ def setUp(self):
+ db.debug = False
+ db.clear_deferred_sql()
+
+ def tearDown(self):
+ pass
+
+ def _create_foreign_tables(self, main_name, reference_name):
+ # Create foreign table and model
+ Foreign = db.mock_model(model_name='Foreign', db_table=reference_name,
+ db_tablespace='', pk_field_name='id',
+ pk_field_type=models.AutoField,
+ pk_field_args=[])
+ db.create_table(reference_name, [
+ ('id', models.AutoField(primary_key=True)),
+ ])
+ # Create table with foreign key
+ db.create_table(main_name, [
+ ('id', models.AutoField(primary_key=True)),
+ ('foreign', models.ForeignKey(Foreign)),
+ ])
+ return Foreign
+
+ def test_constraint_references(self):
+ """Tests that referred table is reported accurately"""
+ main_table = 'test_cns_ref'
+ reference_table = 'test_cr_foreign'
+ db.start_transaction()
+ self._create_foreign_tables(main_table, reference_table)
+ db.execute_deferred_sql()
+ constraint = db._find_foreign_constraints(main_table, 'foreign_id')[0]
+ references = db._lookup_constraint_references(main_table, constraint)
+ self.assertEquals((reference_table, 'id'), references)
+ db.delete_table(main_table)
+ db.delete_table(reference_table)
+
+ def test_reverse_column_constraint(self):
+ """Tests that referred column in a foreign key (ex. id) is found"""
+ main_table = 'test_reverse_ref'
+ reference_table = 'test_rr_foreign'
+ db.start_transaction()
+ self._create_foreign_tables(main_table, reference_table)
+ db.execute_deferred_sql()
+ inverse = db._lookup_reverse_constraint(reference_table, 'id')
+ (cname, rev_table, rev_column) = inverse[0]
+ self.assertEquals(main_table, rev_table)
+ self.assertEquals('foreign_id', rev_column)
+ db.delete_table(main_table)
+ db.delete_table(reference_table)
+
+ def test_delete_fk_column(self):
+ main_table = 'test_drop_foreign'
+ ref_table = 'test_df_ref'
+ self._create_foreign_tables(main_table, ref_table)
+ db.execute_deferred_sql()
+ constraints = db._find_foreign_constraints(main_table, 'foreign_id')
+ self.assertEquals(len(constraints), 1)
+ db.delete_column(main_table, 'foreign_id')
+ constraints = db._find_foreign_constraints(main_table, 'foreign_id')
+ self.assertEquals(len(constraints), 0)
+ db.delete_table(main_table)
+ db.delete_table(ref_table)
+
+ def test_rename_fk_column(self):
+ main_table = 'test_rename_foreign'
+ ref_table = 'test_rf_ref'
+ self._create_foreign_tables(main_table, ref_table)
+ db.execute_deferred_sql()
+ constraints = db._find_foreign_constraints(main_table, 'foreign_id')
+ self.assertEquals(len(constraints), 1)
+ db.rename_column(main_table, 'foreign_id', 'reference_id')
+ db.execute_deferred_sql() #Create constraints
+ constraints = db._find_foreign_constraints(main_table, 'reference_id')
+ self.assertEquals(len(constraints), 1)
+ db.delete_table(main_table)
+ db.delete_table(ref_table)
+
+ def test_rename_fk_inbound(self):
+ """
+ Tests that the column referred to by an external column can be renamed.
+ Edge case, but also useful as stepping stone to renaming tables.
+ """
+ main_table = 'test_rename_fk_inbound'
+ ref_table = 'test_rfi_ref'
+ self._create_foreign_tables(main_table, ref_table)
+ db.execute_deferred_sql()
+ constraints = db._lookup_reverse_constraint(ref_table, 'id')
+ self.assertEquals(len(constraints), 1)
+ db.rename_column(ref_table, 'id', 'rfi_id')
+ db.execute_deferred_sql() #Create constraints
+ constraints = db._lookup_reverse_constraint(ref_table, 'rfi_id')
+ self.assertEquals(len(constraints), 1)
+ cname = db._find_foreign_constraints(main_table, 'foreign_id')[0]
+ (rtable, rcolumn) = db._lookup_constraint_references(main_table, cname)
+ self.assertEquals(rcolumn, 'rfi_id')
+ db.delete_table(main_table)
+ db.delete_table(ref_table)
+
+ def test_rename_constrained_table(self):
+ """Renames a table with a foreign key column (towards another table)"""
+ main_table = 'test_rn_table'
+ ref_table = 'test_rt_ref'
+ renamed_table = 'test_renamed_table'
+ self._create_foreign_tables(main_table, ref_table)
+ db.execute_deferred_sql()
+ constraints = db._find_foreign_constraints(main_table, 'foreign_id')
+ self.assertEquals(len(constraints), 1)
+ db.rename_table(main_table, renamed_table)
+ db.execute_deferred_sql() #Create constraints
+ constraints = db._find_foreign_constraints(renamed_table, 'foreign_id')
+ self.assertEquals(len(constraints), 1)
+ (rtable, rcolumn) = db._lookup_constraint_references(
+ renamed_table, constraints[0])
+ self.assertEquals(rcolumn, 'id')
+ db.delete_table(renamed_table)
+ db.delete_table(ref_table)
+
+ def test_renamed_referenced_table(self):
+ """Rename a table referred to in a foreign key"""
+ main_table = 'test_rn_refd_table'
+ ref_table = 'test_rrt_ref'
+ renamed_table = 'test_renamed_ref'
+ self._create_foreign_tables(main_table, ref_table)
+ db.execute_deferred_sql()
+ constraints = db._lookup_reverse_constraint(ref_table)
+ self.assertEquals(len(constraints), 1)
+ db.rename_table(ref_table, renamed_table)
+ db.execute_deferred_sql() #Create constraints
+ constraints = db._find_foreign_constraints(main_table, 'foreign_id')
+ self.assertEquals(len(constraints), 1)
+ (rtable, rcolumn) = db._lookup_constraint_references(
+ main_table, constraints[0])
+ self.assertEquals(renamed_table, rtable)
+ db.delete_table(main_table)
+ db.delete_table(renamed_table)
+
diff --git a/lib/python2.7/site-packages/south/tests/deps_a/__init__.py b/lib/python2.7/site-packages/south/tests/deps_a/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/deps_a/__init__.py
diff --git a/lib/python2.7/site-packages/south/tests/deps_a/migrations/0001_a.py b/lib/python2.7/site-packages/south/tests/deps_a/migrations/0001_a.py
new file mode 100644
index 0000000..d27ed3a
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/deps_a/migrations/0001_a.py
@@ -0,0 +1,11 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ def forwards(self):
+ pass
+
+ def backwards(self):
+ pass
+
diff --git a/lib/python2.7/site-packages/south/tests/deps_a/migrations/0002_a.py b/lib/python2.7/site-packages/south/tests/deps_a/migrations/0002_a.py
new file mode 100644
index 0000000..d27ed3a
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/deps_a/migrations/0002_a.py
@@ -0,0 +1,11 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ def forwards(self):
+ pass
+
+ def backwards(self):
+ pass
+
diff --git a/lib/python2.7/site-packages/south/tests/deps_a/migrations/0003_a.py b/lib/python2.7/site-packages/south/tests/deps_a/migrations/0003_a.py
new file mode 100644
index 0000000..d27ed3a
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/deps_a/migrations/0003_a.py
@@ -0,0 +1,11 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ def forwards(self):
+ pass
+
+ def backwards(self):
+ pass
+
diff --git a/lib/python2.7/site-packages/south/tests/deps_a/migrations/0004_a.py b/lib/python2.7/site-packages/south/tests/deps_a/migrations/0004_a.py
new file mode 100644
index 0000000..e5c2977
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/deps_a/migrations/0004_a.py
@@ -0,0 +1,13 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ depends_on = [('deps_b', '0003_b')]
+
+ def forwards(self):
+ pass
+
+ def backwards(self):
+ pass
+
diff --git a/lib/python2.7/site-packages/south/tests/deps_a/migrations/0005_a.py b/lib/python2.7/site-packages/south/tests/deps_a/migrations/0005_a.py
new file mode 100644
index 0000000..d27ed3a
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/deps_a/migrations/0005_a.py
@@ -0,0 +1,11 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ def forwards(self):
+ pass
+
+ def backwards(self):
+ pass
+
diff --git a/lib/python2.7/site-packages/south/tests/deps_a/migrations/__init__.py b/lib/python2.7/site-packages/south/tests/deps_a/migrations/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/deps_a/migrations/__init__.py
diff --git a/lib/python2.7/site-packages/south/tests/deps_a/models.py b/lib/python2.7/site-packages/south/tests/deps_a/models.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/deps_a/models.py
diff --git a/lib/python2.7/site-packages/south/tests/deps_b/__init__.py b/lib/python2.7/site-packages/south/tests/deps_b/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/deps_b/__init__.py
diff --git a/lib/python2.7/site-packages/south/tests/deps_b/migrations/0001_b.py b/lib/python2.7/site-packages/south/tests/deps_b/migrations/0001_b.py
new file mode 100644
index 0000000..d27ed3a
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/deps_b/migrations/0001_b.py
@@ -0,0 +1,11 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ def forwards(self):
+ pass
+
+ def backwards(self):
+ pass
+
diff --git a/lib/python2.7/site-packages/south/tests/deps_b/migrations/0002_b.py b/lib/python2.7/site-packages/south/tests/deps_b/migrations/0002_b.py
new file mode 100644
index 0000000..459ea5d
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/deps_b/migrations/0002_b.py
@@ -0,0 +1,13 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ depends_on = [('deps_a', '0002_a')]
+
+ def forwards(self):
+ pass
+
+ def backwards(self):
+ pass
+
diff --git a/lib/python2.7/site-packages/south/tests/deps_b/migrations/0003_b.py b/lib/python2.7/site-packages/south/tests/deps_b/migrations/0003_b.py
new file mode 100644
index 0000000..1692888
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/deps_b/migrations/0003_b.py
@@ -0,0 +1,13 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ depends_on = [('deps_a', '0003_a')]
+
+ def forwards(self):
+ pass
+
+ def backwards(self):
+ pass
+
diff --git a/lib/python2.7/site-packages/south/tests/deps_b/migrations/0004_b.py b/lib/python2.7/site-packages/south/tests/deps_b/migrations/0004_b.py
new file mode 100644
index 0000000..d27ed3a
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/deps_b/migrations/0004_b.py
@@ -0,0 +1,11 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ def forwards(self):
+ pass
+
+ def backwards(self):
+ pass
+
diff --git a/lib/python2.7/site-packages/south/tests/deps_b/migrations/0005_b.py b/lib/python2.7/site-packages/south/tests/deps_b/migrations/0005_b.py
new file mode 100644
index 0000000..d27ed3a
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/deps_b/migrations/0005_b.py
@@ -0,0 +1,11 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ def forwards(self):
+ pass
+
+ def backwards(self):
+ pass
+
diff --git a/lib/python2.7/site-packages/south/tests/deps_b/migrations/__init__.py b/lib/python2.7/site-packages/south/tests/deps_b/migrations/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/deps_b/migrations/__init__.py
diff --git a/lib/python2.7/site-packages/south/tests/deps_b/models.py b/lib/python2.7/site-packages/south/tests/deps_b/models.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/deps_b/models.py
diff --git a/lib/python2.7/site-packages/south/tests/deps_c/__init__.py b/lib/python2.7/site-packages/south/tests/deps_c/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/deps_c/__init__.py
diff --git a/lib/python2.7/site-packages/south/tests/deps_c/migrations/0001_c.py b/lib/python2.7/site-packages/south/tests/deps_c/migrations/0001_c.py
new file mode 100644
index 0000000..d27ed3a
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/deps_c/migrations/0001_c.py
@@ -0,0 +1,11 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ def forwards(self):
+ pass
+
+ def backwards(self):
+ pass
+
diff --git a/lib/python2.7/site-packages/south/tests/deps_c/migrations/0002_c.py b/lib/python2.7/site-packages/south/tests/deps_c/migrations/0002_c.py
new file mode 100644
index 0000000..d27ed3a
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/deps_c/migrations/0002_c.py
@@ -0,0 +1,11 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ def forwards(self):
+ pass
+
+ def backwards(self):
+ pass
+
diff --git a/lib/python2.7/site-packages/south/tests/deps_c/migrations/0003_c.py b/lib/python2.7/site-packages/south/tests/deps_c/migrations/0003_c.py
new file mode 100644
index 0000000..d27ed3a
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/deps_c/migrations/0003_c.py
@@ -0,0 +1,11 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ def forwards(self):
+ pass
+
+ def backwards(self):
+ pass
+
diff --git a/lib/python2.7/site-packages/south/tests/deps_c/migrations/0004_c.py b/lib/python2.7/site-packages/south/tests/deps_c/migrations/0004_c.py
new file mode 100644
index 0000000..d27ed3a
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/deps_c/migrations/0004_c.py
@@ -0,0 +1,11 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ def forwards(self):
+ pass
+
+ def backwards(self):
+ pass
+
diff --git a/lib/python2.7/site-packages/south/tests/deps_c/migrations/0005_c.py b/lib/python2.7/site-packages/south/tests/deps_c/migrations/0005_c.py
new file mode 100644
index 0000000..459ea5d
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/deps_c/migrations/0005_c.py
@@ -0,0 +1,13 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ depends_on = [('deps_a', '0002_a')]
+
+ def forwards(self):
+ pass
+
+ def backwards(self):
+ pass
+
diff --git a/lib/python2.7/site-packages/south/tests/deps_c/migrations/__init__.py b/lib/python2.7/site-packages/south/tests/deps_c/migrations/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/deps_c/migrations/__init__.py
diff --git a/lib/python2.7/site-packages/south/tests/deps_c/models.py b/lib/python2.7/site-packages/south/tests/deps_c/models.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/deps_c/models.py
diff --git a/lib/python2.7/site-packages/south/tests/emptyapp/__init__.py b/lib/python2.7/site-packages/south/tests/emptyapp/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/emptyapp/__init__.py
diff --git a/lib/python2.7/site-packages/south/tests/emptyapp/migrations/__init__.py b/lib/python2.7/site-packages/south/tests/emptyapp/migrations/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/emptyapp/migrations/__init__.py
diff --git a/lib/python2.7/site-packages/south/tests/emptyapp/models.py b/lib/python2.7/site-packages/south/tests/emptyapp/models.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/emptyapp/models.py
diff --git a/lib/python2.7/site-packages/south/tests/fakeapp/__init__.py b/lib/python2.7/site-packages/south/tests/fakeapp/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/fakeapp/__init__.py
diff --git a/lib/python2.7/site-packages/south/tests/fakeapp/migrations/0001_spam.py b/lib/python2.7/site-packages/south/tests/fakeapp/migrations/0001_spam.py
new file mode 100644
index 0000000..9739648
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/fakeapp/migrations/0001_spam.py
@@ -0,0 +1,17 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ def forwards(self):
+ # Model 'Spam'
+ db.create_table("southtest_spam", (
+ ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
+ ('weight', models.FloatField()),
+ ('expires', models.DateTimeField()),
+ ('name', models.CharField(max_length=255))
+ ))
+
+ def backwards(self):
+ db.delete_table("southtest_spam")
+
diff --git a/lib/python2.7/site-packages/south/tests/fakeapp/migrations/0002_eggs.py b/lib/python2.7/site-packages/south/tests/fakeapp/migrations/0002_eggs.py
new file mode 100644
index 0000000..3ec8399
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/fakeapp/migrations/0002_eggs.py
@@ -0,0 +1,20 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ def forwards(self):
+
+ Spam = db.mock_model(model_name='Spam', db_table='southtest_spam', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField)
+
+ db.create_table("southtest_eggs", (
+ ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
+ ('size', models.FloatField()),
+ ('quantity', models.IntegerField()),
+ ('spam', models.ForeignKey(Spam)),
+ ))
+
+ def backwards(self):
+
+ db.delete_table("southtest_eggs")
+
diff --git a/lib/python2.7/site-packages/south/tests/fakeapp/migrations/0003_alter_spam.py b/lib/python2.7/site-packages/south/tests/fakeapp/migrations/0003_alter_spam.py
new file mode 100644
index 0000000..39126c2
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/fakeapp/migrations/0003_alter_spam.py
@@ -0,0 +1,18 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ def forwards(self):
+
+ db.alter_column("southtest_spam", 'weight', models.FloatField(null=True))
+
+ def backwards(self):
+
+ db.alter_column("southtest_spam", 'weight', models.FloatField())
+
+ models = {
+ "fakeapp.bug135": {
+ 'date': ('models.DateTimeField', [], {'default': 'datetime.datetime(2009, 5, 6, 15, 33, 15, 780013)'}),
+ }
+ }
diff --git a/lib/python2.7/site-packages/south/tests/fakeapp/migrations/__init__.py b/lib/python2.7/site-packages/south/tests/fakeapp/migrations/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/fakeapp/migrations/__init__.py
diff --git a/lib/python2.7/site-packages/south/tests/fakeapp/models.py b/lib/python2.7/site-packages/south/tests/fakeapp/models.py
new file mode 100644
index 0000000..cc39eb7
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/fakeapp/models.py
@@ -0,0 +1,111 @@
+# -*- coding: UTF-8 -*-
+
+from django.db import models
+from django.contrib.auth.models import User as UserAlias
+
+from south.modelsinspector import add_introspection_rules
+
+on_delete_is_available = hasattr(models, "PROTECT") # models here is django.db.models
+
+def default_func():
+ return "yays"
+
+# An empty case.
+class Other1(models.Model): pass
+
+# Another one
+class Other3(models.Model): pass
+def get_sentinel_object():
+ """
+ A function to return the object to be used in place of any deleted object,
+ when using the SET option for on_delete.
+ """
+ # Create a new one, so we always have an instance to test with. Can't work!
+ return Other3()
+
+# Nastiness.
+class HorribleModel(models.Model):
+ "A model to test the edge cases of model parsing"
+
+ ZERO, ONE = 0, 1
+
+ # First, some nice fields
+ name = models.CharField(max_length=255)
+ short_name = models.CharField(max_length=50)
+ slug = models.SlugField(unique=True)
+
+ # A ForeignKey, to a model above, and then below
+ o1 = models.ForeignKey(Other1)
+ o2 = models.ForeignKey('Other2')
+
+ if on_delete_is_available:
+ o_set_null_on_delete = models.ForeignKey('Other3', null=True, on_delete=models.SET_NULL)
+ o_cascade_delete = models.ForeignKey('Other3', null=True, on_delete=models.CASCADE, related_name="cascademe")
+ o_protect = models.ForeignKey('Other3', null=True, on_delete=models.PROTECT, related_name="dontcascademe")
+ o_default_on_delete = models.ForeignKey('Other3', null=True, default=1, on_delete=models.SET_DEFAULT, related_name="setmedefault")
+ o_set_on_delete_function = models.ForeignKey('Other3', null=True, default=1, on_delete=models.SET(get_sentinel_object), related_name="setsentinel")
+ o_set_on_delete_value = models.ForeignKey('Other3', null=True, default=1, on_delete=models.SET(get_sentinel_object()), related_name="setsentinelwithactualvalue") # dubious case
+ o_no_action_on_delete = models.ForeignKey('Other3', null=True, default=1, on_delete=models.DO_NOTHING, related_name="deletemeatyourperil")
+
+
+ # Now to something outside
+ user = models.ForeignKey(UserAlias, related_name="horribles")
+
+ # Unicode!
+ code = models.CharField(max_length=25, default="↑↑↓↓←→←→BA")
+
+ # Odd defaults!
+ class_attr = models.IntegerField(default=ZERO)
+ func = models.CharField(max_length=25, default=default_func)
+
+ # Time to get nasty. Define a non-field choices, and use it
+ choices = [('hello', '1'), ('world', '2')]
+ choiced = models.CharField(max_length=20, choices=choices)
+
+ class Meta:
+ db_table = "my_fave"
+ verbose_name = "Dr. Strangelove," + \
+ """or how I learned to stop worrying
+and love the bomb"""
+
+ # Now spread over multiple lines
+ multiline = \
+ models.TextField(
+ )
+
+# Special case.
+class Other2(models.Model):
+ # Try loading a field without a newline after it (inspect hates this)
+ close_but_no_cigar = models.PositiveIntegerField(primary_key=True)
+
+class CustomField(models.IntegerField):
+ def __init__(self, an_other_model, **kwargs):
+ super(CustomField, self).__init__(**kwargs)
+ self.an_other_model = an_other_model
+
+add_introspection_rules([
+ (
+ [CustomField],
+ [],
+ {'an_other_model': ('an_other_model', {})},
+ ),
+], ['^south\.tests\.fakeapp\.models\.CustomField'])
+
+class BaseModel(models.Model):
+ pass
+
+class SubModel(BaseModel):
+ others = models.ManyToManyField(Other1)
+ custom = CustomField(Other2)
+
+class CircularA(models.Model):
+ c = models.ForeignKey('CircularC')
+
+class CircularB(models.Model):
+ a = models.ForeignKey(CircularA)
+
+class CircularC(models.Model):
+ b = models.ForeignKey(CircularB)
+
+class Recursive(models.Model):
+ self = models.ForeignKey('self')
diff --git a/lib/python2.7/site-packages/south/tests/freezer.py b/lib/python2.7/site-packages/south/tests/freezer.py
new file mode 100644
index 0000000..82c4402
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/freezer.py
@@ -0,0 +1,15 @@
+from south.tests import unittest
+
+from south.creator.freezer import model_dependencies
+from south.tests.fakeapp import models
+
+class TestFreezer(unittest.TestCase):
+ def test_dependencies(self):
+ self.assertEqual(set(model_dependencies(models.SubModel)),
+ set([models.BaseModel, models.Other1, models.Other2]))
+
+ self.assertEqual(set(model_dependencies(models.CircularA)),
+ set([models.CircularA, models.CircularB, models.CircularC]))
+
+ self.assertEqual(set(model_dependencies(models.Recursive)),
+ set([models.Recursive]))
diff --git a/lib/python2.7/site-packages/south/tests/inspector.py b/lib/python2.7/site-packages/south/tests/inspector.py
new file mode 100644
index 0000000..dcd6d57
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/inspector.py
@@ -0,0 +1,109 @@
+
+from south.tests import Monkeypatcher, skipUnless
+from south.modelsinspector import (convert_on_delete_handler, get_value,
+ IsDefault, models, value_clean)
+
+from fakeapp.models import HorribleModel, get_sentinel_object
+
+
+on_delete_is_available = hasattr(models, "PROTECT") # models here is django.db.models
+skipUnlessOnDeleteAvailable = skipUnless(on_delete_is_available, "not testing on_delete -- not available on Django<1.3")
+
+class TestModelInspector(Monkeypatcher):
+
+ """
+ Tests if the various parts of the modelinspector work.
+ """
+
+ def test_get_value(self):
+
+ # Let's start nicely.
+ name = HorribleModel._meta.get_field_by_name("name")[0]
+ slug = HorribleModel._meta.get_field_by_name("slug")[0]
+ user = HorribleModel._meta.get_field_by_name("user")[0]
+
+ # Simple int retrieval
+ self.assertEqual(
+ get_value(name, ["max_length", {}]),
+ "255",
+ )
+
+ # Bool retrieval
+ self.assertEqual(
+ get_value(slug, ["unique", {}]),
+ "True",
+ )
+
+ # String retrieval
+ self.assertEqual(
+ get_value(user, ["rel.related_name", {}]),
+ "'horribles'",
+ )
+
+ # Default triggering
+ self.assertEqual(
+ get_value(slug, ["unique", {"default": False}]),
+ "True",
+ )
+ self.assertRaises(
+ IsDefault,
+ get_value,
+ slug,
+ ["unique", {"default": True}],
+ )
+
+ @skipUnlessOnDeleteAvailable
+ def test_get_value_on_delete(self):
+
+ # First validate the FK fields with on_delete options
+ o_set_null_on_delete = HorribleModel._meta.get_field_by_name("o_set_null_on_delete")[0]
+ o_cascade_delete = HorribleModel._meta.get_field_by_name("o_cascade_delete")[0]
+ o_protect = HorribleModel._meta.get_field_by_name("o_protect")[0]
+ o_default_on_delete = HorribleModel._meta.get_field_by_name("o_default_on_delete")[0]
+ o_set_on_delete_function = HorribleModel._meta.get_field_by_name("o_set_on_delete_function")[0]
+ o_set_on_delete_value = HorribleModel._meta.get_field_by_name("o_set_on_delete_value")[0]
+ o_no_action_on_delete = HorribleModel._meta.get_field_by_name("o_no_action_on_delete")[0]
+ # TODO this is repeated from the introspection_details in modelsinspector:
+ # better to refactor that so we can reference these settings, in case they
+ # must change at some point.
+ on_delete = ["rel.on_delete", {"default": models.CASCADE, "is_django_function": True, "converter": convert_on_delete_handler, }]
+
+ # Foreign Key cascade update/delete
+ self.assertRaises(
+ IsDefault,
+ get_value,
+ o_cascade_delete,
+ on_delete,
+ )
+ self.assertEqual(
+ get_value(o_protect, on_delete),
+ "models.PROTECT",
+ )
+ self.assertEqual(
+ get_value(o_no_action_on_delete, on_delete),
+ "models.DO_NOTHING",
+ )
+ self.assertEqual(
+ get_value(o_set_null_on_delete, on_delete),
+ "models.SET_NULL",
+ )
+ self.assertEqual(
+ get_value(o_default_on_delete, on_delete),
+ "models.SET_DEFAULT",
+ )
+ # For now o_set_on_delete raises, see modelsinspector.py
+ #self.assertEqual(
+ # get_value(o_set_on_delete_function, on_delete),
+ # "models.SET(get_sentinel_object)",
+ #)
+ self.assertRaises(
+ ValueError,
+ get_value,
+ o_set_on_delete_function,
+ on_delete,
+ )
+ self.assertEqual(
+ get_value(o_set_on_delete_value, on_delete),
+ "models.SET(%s)" % value_clean(get_sentinel_object()),
+ )
+ \ No newline at end of file
diff --git a/lib/python2.7/site-packages/south/tests/logger.py b/lib/python2.7/site-packages/south/tests/logger.py
new file mode 100644
index 0000000..78d159d
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/logger.py
@@ -0,0 +1,82 @@
+import io
+import logging
+import os
+import tempfile
+from south.tests import unittest
+import sys
+
+from django.conf import settings
+from django.db import connection, models
+
+from south.db import db
+from south.logger import close_logger
+
+class TestLogger(unittest.TestCase):
+
+ """
+ Tests if the logging is working reasonably. Some tests ignored if you don't
+ have write permission to the disk.
+ """
+
+ def setUp(self):
+ db.debug = False
+ self.test_path = tempfile.mkstemp(suffix=".south.log")[1]
+
+ def test_db_execute_logging_nofile(self):
+ "Does logging degrade nicely if SOUTH_LOGGING_ON not set?"
+ settings.SOUTH_LOGGING_ON = False # this needs to be set to False
+ # to avoid issues where other tests
+ # set this to True. settings is shared
+ # between these tests.
+ db.create_table("test9", [('email_confirmed', models.BooleanField(default=False))])
+
+ def test_db_execute_logging_off_with_basic_config(self):
+ """
+ Does the south logger avoid outputing debug information with
+ south logging turned off and python logging configured with
+ a basic config?"
+ """
+ settings.SOUTH_LOGGING_ON = False
+
+ # Set root logger to capture WARNING and worse
+ logging_stream = io.StringIO()
+ logging.basicConfig(stream=logging_stream, level=logging.WARNING)
+
+ db.create_table("test12", [('email_confirmed', models.BooleanField(default=False))])
+
+ # since south logging is off, and our root logger is at WARNING
+ # we should not find DEBUG info in the log
+ self.assertEqual(logging_stream.getvalue(), '')
+
+ def test_db_execute_logging_validfile(self):
+ "Does logging work when passing in a valid file?"
+ settings.SOUTH_LOGGING_ON = True
+ settings.SOUTH_LOGGING_FILE = self.test_path
+ # Check to see if we can make the logfile
+ try:
+ fh = open(self.test_path, "w")
+ except IOError:
+ # Permission was denied, ignore the test.
+ return
+ else:
+ fh.close()
+ # Do an action which logs
+ db.create_table("test10", [('email_confirmed', models.BooleanField(default=False))])
+ # Close the logged file
+ close_logger()
+ try:
+ os.remove(self.test_path)
+ except:
+ # It's a tempfile, it's not vital we remove it.
+ pass
+
+ def test_db_execute_logging_missingfilename(self):
+ "Does logging raise an error if there is a missing filename?"
+ settings.SOUTH_LOGGING_ON = True
+ settings.SOUTH_LOGGING_FILE = None
+ self.assertRaises(
+ IOError,
+ db.create_table,
+ "test11",
+ [('email_confirmed', models.BooleanField(default=False))],
+ )
diff --git a/lib/python2.7/site-packages/south/tests/logic.py b/lib/python2.7/site-packages/south/tests/logic.py
new file mode 100644
index 0000000..2b21cef
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/logic.py
@@ -0,0 +1,902 @@
+from south.tests import unittest
+
+import datetime
+import sys
+try:
+ set # builtin, python >=2.6
+except NameError:
+ from sets import Set as set # in stdlib, python >=2.3
+
+from south import exceptions
+from south.migration import migrate_app
+from south.migration.base import all_migrations, Migrations
+from south.creator.changes import ManualChanges
+from south.migration.utils import depends, flatten, get_app_label
+from south.models import MigrationHistory
+from south.tests import Monkeypatcher
+from south.db import db
+
+
+
+class TestBrokenMigration(Monkeypatcher):
+ installed_apps = ["fakeapp", "otherfakeapp", "brokenapp"]
+
+ def test_broken_dependencies(self):
+ self.assertRaises(
+ exceptions.DependsOnUnmigratedApplication,
+ Migrations.calculate_dependencies,
+ force=True,
+ )
+ #depends_on_unknown = self.brokenapp['0002_depends_on_unknown']
+ #self.assertRaises(exceptions.DependsOnUnknownMigration,
+ # depends_on_unknown.dependencies)
+ #depends_on_higher = self.brokenapp['0003_depends_on_higher']
+ #self.assertRaises(exceptions.DependsOnHigherMigration,
+ # depends_on_higher.dependencies)
+
+
+class TestMigration(Monkeypatcher):
+ installed_apps = ["fakeapp", "otherfakeapp"]
+
+ def setUp(self):
+ super(TestMigration, self).setUp()
+ self.fakeapp = Migrations('fakeapp')
+ self.otherfakeapp = Migrations('otherfakeapp')
+ Migrations.calculate_dependencies(force=True)
+
+ def test_str(self):
+ migrations = [str(m) for m in self.fakeapp]
+ self.assertEqual(['fakeapp:0001_spam',
+ 'fakeapp:0002_eggs',
+ 'fakeapp:0003_alter_spam'],
+ migrations)
+
+ def test_repr(self):
+ migrations = [repr(m) for m in self.fakeapp]
+ self.assertEqual(['<Migration: fakeapp:0001_spam>',
+ '<Migration: fakeapp:0002_eggs>',
+ '<Migration: fakeapp:0003_alter_spam>'],
+ migrations)
+
+ def test_app_label(self):
+ self.assertEqual(['fakeapp', 'fakeapp', 'fakeapp'],
+ [m.app_label() for m in self.fakeapp])
+
+ def test_name(self):
+ self.assertEqual(['0001_spam', '0002_eggs', '0003_alter_spam'],
+ [m.name() for m in self.fakeapp])
+
+ def test_full_name(self):
+ self.assertEqual(['fakeapp.migrations.0001_spam',
+ 'fakeapp.migrations.0002_eggs',
+ 'fakeapp.migrations.0003_alter_spam'],
+ [m.full_name() for m in self.fakeapp])
+
+ def test_migration(self):
+ # Can't use vanilla import, modules beginning with numbers aren't in grammar
+ M1 = __import__("fakeapp.migrations.0001_spam", {}, {}, ['Migration']).Migration
+ M2 = __import__("fakeapp.migrations.0002_eggs", {}, {}, ['Migration']).Migration
+ M3 = __import__("fakeapp.migrations.0003_alter_spam", {}, {}, ['Migration']).Migration
+ self.assertEqual([M1, M2, M3],
+ [m.migration().Migration for m in self.fakeapp])
+ self.assertRaises(exceptions.UnknownMigration,
+ self.fakeapp['9999_unknown'].migration)
+
+ def test_previous(self):
+ self.assertEqual([None,
+ self.fakeapp['0001_spam'],
+ self.fakeapp['0002_eggs']],
+ [m.previous() for m in self.fakeapp])
+
+ def test_dependencies(self):
+ "Test that the dependency detection works."
+ self.assertEqual([
+ set([]),
+ set([self.fakeapp['0001_spam']]),
+ set([self.fakeapp['0002_eggs']])
+ ],
+ [m.dependencies for m in self.fakeapp],
+ )
+ self.assertEqual([
+ set([self.fakeapp['0001_spam']]),
+ set([self.otherfakeapp['0001_first']]),
+ set([
+ self.otherfakeapp['0002_second'],
+ self.fakeapp['0003_alter_spam'],
+ ])
+ ],
+ [m.dependencies for m in self.otherfakeapp],
+ )
+
+ def test_forwards_plan(self):
+ self.assertEqual([
+ [self.fakeapp['0001_spam']],
+ [
+ self.fakeapp['0001_spam'],
+ self.fakeapp['0002_eggs']
+ ],
+ [
+ self.fakeapp['0001_spam'],
+ self.fakeapp['0002_eggs'],
+ self.fakeapp['0003_alter_spam'],
+ ]
+ ],
+ [m.forwards_plan() for m in self.fakeapp],
+ )
+ self.assertEqual([
+ [
+ self.fakeapp['0001_spam'],
+ self.otherfakeapp['0001_first']
+ ],
+ [
+ self.fakeapp['0001_spam'],
+ self.otherfakeapp['0001_first'],
+ self.otherfakeapp['0002_second']
+ ],
+ [
+ self.fakeapp['0001_spam'],
+ self.otherfakeapp['0001_first'],
+ self.otherfakeapp['0002_second'],
+ self.fakeapp['0002_eggs'],
+ self.fakeapp['0003_alter_spam'],
+ self.otherfakeapp['0003_third'],
+ ]
+ ],
+ [m.forwards_plan() for m in self.otherfakeapp],
+ )
+
+ def test_is_before(self):
+ F1 = self.fakeapp['0001_spam']
+ F2 = self.fakeapp['0002_eggs']
+ F3 = self.fakeapp['0003_alter_spam']
+ O1 = self.otherfakeapp['0001_first']
+ O2 = self.otherfakeapp['0002_second']
+ O3 = self.otherfakeapp['0003_third']
+ self.assertTrue(F1.is_before(F2))
+ self.assertTrue(F1.is_before(F3))
+ self.assertTrue(F2.is_before(F3))
+ self.assertEqual(O3.is_before(O1), False)
+ self.assertEqual(O3.is_before(O2), False)
+ self.assertEqual(O2.is_before(O2), False)
+ self.assertEqual(O2.is_before(O1), False)
+ self.assertEqual(F2.is_before(O1), None)
+ self.assertEqual(F2.is_before(O2), None)
+ self.assertEqual(F2.is_before(O3), None)
+
+
+class TestMigrationDependencies(Monkeypatcher):
+ installed_apps = ['deps_a', 'deps_b', 'deps_c']
+
+ def setUp(self):
+ super(TestMigrationDependencies, self).setUp()
+ self.deps_a = Migrations('deps_a')
+ self.deps_b = Migrations('deps_b')
+ self.deps_c = Migrations('deps_c')
+ Migrations.calculate_dependencies(force=True)
+
+ def test_dependencies(self):
+ self.assertEqual(
+ [
+ set([]),
+ set([self.deps_a['0001_a']]),
+ set([self.deps_a['0002_a']]),
+ set([
+ self.deps_a['0003_a'],
+ self.deps_b['0003_b'],
+ ]),
+ set([self.deps_a['0004_a']]),
+ ],
+ [m.dependencies for m in self.deps_a],
+ )
+ self.assertEqual(
+ [
+ set([]),
+ set([
+ self.deps_b['0001_b'],
+ self.deps_a['0002_a']
+ ]),
+ set([
+ self.deps_b['0002_b'],
+ self.deps_a['0003_a']
+ ]),
+ set([self.deps_b['0003_b']]),
+ set([self.deps_b['0004_b']]),
+ ],
+ [m.dependencies for m in self.deps_b],
+ )
+ self.assertEqual(
+ [
+ set([]),
+ set([self.deps_c['0001_c']]),
+ set([self.deps_c['0002_c']]),
+ set([self.deps_c['0003_c']]),
+ set([
+ self.deps_c['0004_c'],
+ self.deps_a['0002_a']
+ ]),
+ ],
+ [m.dependencies for m in self.deps_c],
+ )
+
+ def test_dependents(self):
+ self.assertEqual([set([self.deps_a['0002_a']]),
+ set([self.deps_c['0005_c'],
+ self.deps_b['0002_b'],
+ self.deps_a['0003_a']]),
+ set([self.deps_b['0003_b'],
+ self.deps_a['0004_a']]),
+ set([self.deps_a['0005_a']]),
+ set([])],
+ [m.dependents for m in self.deps_a])
+ self.assertEqual([set([self.deps_b['0002_b']]),
+ set([self.deps_b['0003_b']]),
+ set([self.deps_b['0004_b'],
+ self.deps_a['0004_a']]),
+ set([self.deps_b['0005_b']]),
+ set([])],
+ [m.dependents for m in self.deps_b])
+ self.assertEqual([set([self.deps_c['0002_c']]),
+ set([self.deps_c['0003_c']]),
+ set([self.deps_c['0004_c']]),
+ set([self.deps_c['0005_c']]),
+ set([])],
+ [m.dependents for m in self.deps_c])
+
+ def test_forwards_plan(self):
+ self.assertEqual([[self.deps_a['0001_a']],
+ [self.deps_a['0001_a'],
+ self.deps_a['0002_a']],
+ [self.deps_a['0001_a'],
+ self.deps_a['0002_a'],
+ self.deps_a['0003_a']],
+ [self.deps_b['0001_b'],
+ self.deps_a['0001_a'],
+ self.deps_a['0002_a'],
+ self.deps_b['0002_b'],
+ self.deps_a['0003_a'],
+ self.deps_b['0003_b'],
+ self.deps_a['0004_a']],
+ [self.deps_b['0001_b'],
+ self.deps_a['0001_a'],
+ self.deps_a['0002_a'],
+ self.deps_b['0002_b'],
+ self.deps_a['0003_a'],
+ self.deps_b['0003_b'],
+ self.deps_a['0004_a'],
+ self.deps_a['0005_a']]],
+ [m.forwards_plan() for m in self.deps_a])
+ self.assertEqual([[self.deps_b['0001_b']],
+ [self.deps_b['0001_b'],
+ self.deps_a['0001_a'],
+ self.deps_a['0002_a'],
+ self.deps_b['0002_b']],
+ [self.deps_b['0001_b'],
+ self.deps_a['0001_a'],
+ self.deps_a['0002_a'],
+ self.deps_b['0002_b'],
+ self.deps_a['0003_a'],
+ self.deps_b['0003_b']],
+ [self.deps_b['0001_b'],
+ self.deps_a['0001_a'],
+ self.deps_a['0002_a'],
+ self.deps_b['0002_b'],
+ self.deps_a['0003_a'],
+ self.deps_b['0003_b'],
+ self.deps_b['0004_b']],
+ [self.deps_b['0001_b'],
+ self.deps_a['0001_a'],
+ self.deps_a['0002_a'],
+ self.deps_b['0002_b'],
+ self.deps_a['0003_a'],
+ self.deps_b['0003_b'],
+ self.deps_b['0004_b'],
+ self.deps_b['0005_b']]],
+ [m.forwards_plan() for m in self.deps_b])
+ self.assertEqual([[self.deps_c['0001_c']],
+ [self.deps_c['0001_c'],
+ self.deps_c['0002_c']],
+ [self.deps_c['0001_c'],
+ self.deps_c['0002_c'],
+ self.deps_c['0003_c']],
+ [self.deps_c['0001_c'],
+ self.deps_c['0002_c'],
+ self.deps_c['0003_c'],
+ self.deps_c['0004_c']],
+ [self.deps_c['0001_c'],
+ self.deps_c['0002_c'],
+ self.deps_c['0003_c'],
+ self.deps_c['0004_c'],
+ self.deps_a['0001_a'],
+ self.deps_a['0002_a'],
+ self.deps_c['0005_c']]],
+ [m.forwards_plan() for m in self.deps_c])
+
+ def test_backwards_plan(self):
+ self.assertEqual([
+ [
+ self.deps_c['0005_c'],
+ self.deps_b['0005_b'],
+ self.deps_b['0004_b'],
+ self.deps_a['0005_a'],
+ self.deps_a['0004_a'],
+ self.deps_b['0003_b'],
+ self.deps_b['0002_b'],
+ self.deps_a['0003_a'],
+ self.deps_a['0002_a'],
+ self.deps_a['0001_a'],
+ ],
+ [
+ self.deps_c['0005_c'],
+ self.deps_b['0005_b'],
+ self.deps_b['0004_b'],
+ self.deps_a['0005_a'],
+ self.deps_a['0004_a'],
+ self.deps_b['0003_b'],
+ self.deps_b['0002_b'],
+ self.deps_a['0003_a'],
+ self.deps_a['0002_a'],
+ ],
+ [
+ self.deps_b['0005_b'],
+ self.deps_b['0004_b'],
+ self.deps_a['0005_a'],
+ self.deps_a['0004_a'],
+ self.deps_b['0003_b'],
+ self.deps_a['0003_a'],
+ ],
+ [
+ self.deps_a['0005_a'],
+ self.deps_a['0004_a'],
+ ],
+ [
+ self.deps_a['0005_a'],
+ ]
+ ], [m.backwards_plan() for m in self.deps_a])
+ self.assertEqual([
+ [
+ self.deps_b['0005_b'],
+ self.deps_b['0004_b'],
+ self.deps_a['0005_a'],
+ self.deps_a['0004_a'],
+ self.deps_b['0003_b'],
+ self.deps_b['0002_b'],
+ self.deps_b['0001_b'],
+ ],
+ [
+ self.deps_b['0005_b'],
+ self.deps_b['0004_b'],
+ self.deps_a['0005_a'],
+ self.deps_a['0004_a'],
+ self.deps_b['0003_b'],
+ self.deps_b['0002_b'],
+ ],
+ [
+ self.deps_b['0005_b'],
+ self.deps_b['0004_b'],
+ self.deps_a['0005_a'],
+ self.deps_a['0004_a'],
+ self.deps_b['0003_b'],
+ ],
+ [
+ self.deps_b['0005_b'],
+ self.deps_b['0004_b'],
+ ],
+ [
+ self.deps_b['0005_b'],
+ ],
+ ], [m.backwards_plan() for m in self.deps_b])
+ self.assertEqual([
+ [
+ self.deps_c['0005_c'],
+ self.deps_c['0004_c'],
+ self.deps_c['0003_c'],
+ self.deps_c['0002_c'],
+ self.deps_c['0001_c'],
+ ],
+ [
+ self.deps_c['0005_c'],
+ self.deps_c['0004_c'],
+ self.deps_c['0003_c'],
+ self.deps_c['0002_c'],
+ ],
+ [
+ self.deps_c['0005_c'],
+ self.deps_c['0004_c'],
+ self.deps_c['0003_c'],
+ ],
+ [
+ self.deps_c['0005_c'],
+ self.deps_c['0004_c'],
+ ],
+ [self.deps_c['0005_c']]
+ ], [m.backwards_plan() for m in self.deps_c])
+
+
+class TestCircularDependencies(Monkeypatcher):
+ installed_apps = ["circular_a", "circular_b"]
+
+ def test_plans(self):
+ Migrations.calculate_dependencies(force=True)
+ circular_a = Migrations('circular_a')
+ circular_b = Migrations('circular_b')
+ self.assertRaises(
+ exceptions.CircularDependency,
+ circular_a[-1].forwards_plan,
+ )
+ self.assertRaises(
+ exceptions.CircularDependency,
+ circular_b[-1].forwards_plan,
+ )
+ self.assertRaises(
+ exceptions.CircularDependency,
+ circular_a[-1].backwards_plan,
+ )
+ self.assertRaises(
+ exceptions.CircularDependency,
+ circular_b[-1].backwards_plan,
+ )
+
+
+class TestMigrations(Monkeypatcher):
+ installed_apps = ["fakeapp", "otherfakeapp"]
+
+ def test_all(self):
+
+ M1 = Migrations(__import__("fakeapp", {}, {}, ['']))
+ M2 = Migrations(__import__("otherfakeapp", {}, {}, ['']))
+
+ self.assertEqual(
+ [M1, M2],
+ list(all_migrations()),
+ )
+
+ def test(self):
+
+ M1 = Migrations(__import__("fakeapp", {}, {}, ['']))
+
+ self.assertEqual(M1, Migrations("fakeapp"))
+ self.assertEqual(M1, Migrations(self.create_fake_app("fakeapp")))
+
+ def test_application(self):
+ fakeapp = Migrations("fakeapp")
+ application = __import__("fakeapp", {}, {}, [''])
+ self.assertEqual(application, fakeapp.application)
+
+ def test_migration(self):
+ # Can't use vanilla import, modules beginning with numbers aren't in grammar
+ M1 = __import__("fakeapp.migrations.0001_spam", {}, {}, ['Migration']).Migration
+ M2 = __import__("fakeapp.migrations.0002_eggs", {}, {}, ['Migration']).Migration
+ migration = Migrations('fakeapp')
+ self.assertEqual(M1, migration['0001_spam'].migration().Migration)
+ self.assertEqual(M2, migration['0002_eggs'].migration().Migration)
+ self.assertRaises(exceptions.UnknownMigration,
+ migration['0001_jam'].migration)
+
+ def test_guess_migration(self):
+ # Can't use vanilla import, modules beginning with numbers aren't in grammar
+ M1 = __import__("fakeapp.migrations.0001_spam", {}, {}, ['Migration']).Migration
+ migration = Migrations('fakeapp')
+ self.assertEqual(M1, migration.guess_migration("0001_spam").migration().Migration)
+ self.assertEqual(M1, migration.guess_migration("0001_spa").migration().Migration)
+ self.assertEqual(M1, migration.guess_migration("0001_sp").migration().Migration)
+ self.assertEqual(M1, migration.guess_migration("0001_s").migration().Migration)
+ self.assertEqual(M1, migration.guess_migration("0001_").migration().Migration)
+ self.assertEqual(M1, migration.guess_migration("0001").migration().Migration)
+ self.assertRaises(exceptions.UnknownMigration,
+ migration.guess_migration, "0001-spam")
+ self.assertRaises(exceptions.MultiplePrefixMatches,
+ migration.guess_migration, "000")
+ self.assertRaises(exceptions.MultiplePrefixMatches,
+ migration.guess_migration, "")
+ self.assertRaises(exceptions.UnknownMigration,
+ migration.guess_migration, "0001_spams")
+ self.assertRaises(exceptions.UnknownMigration,
+ migration.guess_migration, "0001_jam")
+
+ def test_app_label(self):
+ names = ['fakeapp', 'otherfakeapp']
+ self.assertEqual(names,
+ [Migrations(n).app_label() for n in names])
+
+ def test_full_name(self):
+ names = ['fakeapp', 'otherfakeapp']
+ self.assertEqual([n + '.migrations' for n in names],
+ [Migrations(n).full_name() for n in names])
+
+
+class TestMigrationLogic(Monkeypatcher):
+
+ """
+ Tests if the various logic functions in migration actually work.
+ """
+
+ installed_apps = ["fakeapp", "otherfakeapp"]
+
+ def setUp(self):
+ super(TestMigrationLogic, self).setUp()
+ MigrationHistory.objects.all().delete()
+
+ def assertListEqual(self, list1, list2, msg=None):
+ list1 = set(list1)
+ list2 = set(list2)
+ return self.assert_(list1 == list2, "%s is not equal to %s" % (list1, list2))
+
+ def test_find_ghost_migrations(self):
+ pass
+
+ def test_apply_migrations(self):
+ migrations = Migrations("fakeapp")
+
+ # We should start with no migrations
+ self.assertEqual(list(MigrationHistory.objects.all()), [])
+
+ # Apply them normally
+ migrate_app(migrations, target_name=None, fake=False,
+ load_initial_data=True)
+
+ # We should finish with all migrations
+ self.assertListEqual(
+ (("fakeapp", "0001_spam"),
+ ("fakeapp", "0002_eggs"),
+ ("fakeapp", "0003_alter_spam"),),
+ MigrationHistory.objects.values_list("app_name", "migration"),
+ )
+
+ # Now roll them backwards
+ migrate_app(migrations, target_name="zero", fake=False)
+
+ # Finish with none
+ self.assertEqual(list(MigrationHistory.objects.all()), [])
+
+
+ def test_migration_merge_forwards(self):
+ migrations = Migrations("fakeapp")
+
+ # We should start with no migrations
+ self.assertEqual(list(MigrationHistory.objects.all()), [])
+
+ # Insert one in the wrong order
+ MigrationHistory.objects.create(app_name = "fakeapp",
+ migration = "0002_eggs",
+ applied = datetime.datetime.now())
+
+ # Did it go in?
+ self.assertListEqual(
+ (("fakeapp", "0002_eggs"),),
+ MigrationHistory.objects.values_list("app_name", "migration"),
+ )
+
+ # Apply them normally
+ self.assertRaises(exceptions.InconsistentMigrationHistory,
+ migrate_app,
+ migrations, target_name=None, fake=False)
+ self.assertRaises(exceptions.InconsistentMigrationHistory,
+ migrate_app,
+ migrations, target_name='zero', fake=False)
+ try:
+ migrate_app(migrations, target_name=None, fake=False)
+ except exceptions.InconsistentMigrationHistory as e:
+ self.assertEqual(
+ [
+ (
+ migrations['0002_eggs'],
+ migrations['0001_spam'],
+ )
+ ],
+ e.problems,
+ )
+ try:
+ migrate_app(migrations, target_name="zero", fake=False)
+ except exceptions.InconsistentMigrationHistory as e:
+ self.assertEqual(
+ [
+ (
+ migrations['0002_eggs'],
+ migrations['0001_spam'],
+ )
+ ],
+ e.problems,
+ )
+
+ # Nothing should have changed (no merge mode!)
+ self.assertListEqual(
+ (("fakeapp", "0002_eggs"),),
+ MigrationHistory.objects.values_list("app_name", "migration"),
+ )
+
+ # Apply with merge
+ migrate_app(migrations, target_name=None, merge=True, fake=False)
+
+ # We should finish with all migrations
+ self.assertListEqual(
+ (("fakeapp", "0001_spam"),
+ ("fakeapp", "0002_eggs"),
+ ("fakeapp", "0003_alter_spam"),),
+ MigrationHistory.objects.values_list("app_name", "migration"),
+ )
+
+ # Now roll them backwards
+ migrate_app(migrations, target_name="0002", fake=False)
+ migrate_app(migrations, target_name="0001", fake=True)
+ migrate_app(migrations, target_name="zero", fake=False)
+
+ # Finish with none
+ self.assertEqual(list(MigrationHistory.objects.all()), [])
+
+ def test_alter_column_null(self):
+
+ def null_ok(eat_exception=True):
+ from django.db import connection, transaction
+ # the DBAPI introspection module fails on postgres NULLs.
+ cursor = connection.cursor()
+
+ # SQLite has weird now()
+ if db.backend_name == "sqlite3":
+ now_func = "DATETIME('NOW')"
+ # So does SQLServer... should we be using a backend attribute?
+ elif db.backend_name == "pyodbc":
+ now_func = "GETDATE()"
+ elif db.backend_name == "oracle":
+ now_func = "SYSDATE"
+ else:
+ now_func = "NOW()"
+
+ try:
+ if db.backend_name == "pyodbc":
+ cursor.execute("SET IDENTITY_INSERT southtest_spam ON;")
+ cursor.execute("INSERT INTO southtest_spam (id, weight, expires, name) VALUES (100, NULL, %s, 'whatever');" % now_func)
+ except:
+ if eat_exception:
+ transaction.rollback()
+ return False
+ else:
+ raise
+ else:
+ cursor.execute("DELETE FROM southtest_spam")
+ transaction.commit()
+ return True
+
+ MigrationHistory.objects.all().delete()
+ migrations = Migrations("fakeapp")
+
+ # by default name is NOT NULL
+ migrate_app(migrations, target_name="0002", fake=False)
+ self.failIf(null_ok())
+ self.assertListEqual(
+ (("fakeapp", "0001_spam"),
+ ("fakeapp", "0002_eggs"),),
+ MigrationHistory.objects.values_list("app_name", "migration"),
+ )
+
+ # after 0003, it should be NULL
+ migrate_app(migrations, target_name="0003", fake=False)
+ self.assert_(null_ok(False))
+ self.assertListEqual(
+ (("fakeapp", "0001_spam"),
+ ("fakeapp", "0002_eggs"),
+ ("fakeapp", "0003_alter_spam"),),
+ MigrationHistory.objects.values_list("app_name", "migration"),
+ )
+
+ # make sure it is NOT NULL again
+ migrate_app(migrations, target_name="0002", fake=False)
+ self.failIf(null_ok(), 'weight not null after migration')
+ self.assertListEqual(
+ (("fakeapp", "0001_spam"),
+ ("fakeapp", "0002_eggs"),),
+ MigrationHistory.objects.values_list("app_name", "migration"),
+ )
+
+ # finish with no migrations, otherwise other tests fail...
+ migrate_app(migrations, target_name="zero", fake=False)
+ self.assertEqual(list(MigrationHistory.objects.all()), [])
+
+ def test_dependencies(self):
+
+ fakeapp = Migrations("fakeapp")
+ otherfakeapp = Migrations("otherfakeapp")
+
+ # Test a simple path
+ self.assertEqual([fakeapp['0001_spam'],
+ fakeapp['0002_eggs'],
+ fakeapp['0003_alter_spam']],
+ fakeapp['0003_alter_spam'].forwards_plan())
+
+ # And a complex one.
+ self.assertEqual(
+ [
+ fakeapp['0001_spam'],
+ otherfakeapp['0001_first'],
+ otherfakeapp['0002_second'],
+ fakeapp['0002_eggs'],
+ fakeapp['0003_alter_spam'],
+ otherfakeapp['0003_third']
+ ],
+ otherfakeapp['0003_third'].forwards_plan(),
+ )
+
+
+class TestMigrationUtils(Monkeypatcher):
+ installed_apps = ["fakeapp", "otherfakeapp"]
+
+ def test_get_app_label(self):
+ self.assertEqual(
+ "southtest",
+ get_app_label(self.create_fake_app("southtest.models")),
+ )
+ self.assertEqual(
+ "baz",
+ get_app_label(self.create_fake_app("foo.bar.baz.models")),
+ )
+
+class TestUtils(unittest.TestCase):
+
+ def test_flatten(self):
+ self.assertEqual([], list(flatten(iter([]))))
+ self.assertEqual([], list(flatten(iter([iter([]), ]))))
+ self.assertEqual([1], list(flatten(iter([1]))))
+ self.assertEqual([1, 2], list(flatten(iter([1, 2]))))
+ self.assertEqual([1, 2], list(flatten(iter([iter([1]), 2]))))
+ self.assertEqual([1, 2], list(flatten(iter([iter([1, 2])]))))
+ self.assertEqual([1, 2, 3], list(flatten(iter([iter([1, 2]), 3]))))
+ self.assertEqual([1, 2, 3],
+ list(flatten(iter([iter([1]), iter([2]), 3]))))
+ self.assertEqual([1, 2, 3],
+ list(flatten([[1], [2], 3])))
+
+ def test_depends(self):
+ graph = {'A1': []}
+ self.assertEqual(['A1'],
+ depends('A1', lambda n: graph[n]))
+ graph = {'A1': [],
+ 'A2': ['A1'],
+ 'A3': ['A2']}
+ self.assertEqual(['A1', 'A2', 'A3'],
+ depends('A3', lambda n: graph[n]))
+ graph = {'A1': [],
+ 'A2': ['A1'],
+ 'A3': ['A2', 'A1']}
+ self.assertEqual(['A1', 'A2', 'A3'],
+ depends('A3', lambda n: graph[n]))
+ graph = {'A1': [],
+ 'A2': ['A1'],
+ 'A3': ['A2', 'A1', 'B1'],
+ 'B1': []}
+ self.assertEqual(
+ ['B1', 'A1', 'A2', 'A3'],
+ depends('A3', lambda n: graph[n]),
+ )
+ graph = {'A1': [],
+ 'A2': ['A1'],
+ 'A3': ['A2', 'A1', 'B2'],
+ 'B1': [],
+ 'B2': ['B1']}
+ self.assertEqual(
+ ['B1', 'B2', 'A1', 'A2', 'A3'],
+ depends('A3', lambda n: graph[n]),
+ )
+ graph = {'A1': [],
+ 'A2': ['A1', 'B1'],
+ 'A3': ['A2'],
+ 'B1': ['A1']}
+ self.assertEqual(['A1', 'B1', 'A2', 'A3'],
+ depends('A3', lambda n: graph[n]))
+ graph = {'A1': [],
+ 'A2': ['A1'],
+ 'A3': ['A2', 'A1', 'B2'],
+ 'B1': [],
+ 'B2': ['B1', 'C1'],
+ 'C1': ['B1']}
+ self.assertEqual(
+ ['B1', 'C1', 'B2', 'A1', 'A2', 'A3'],
+ depends('A3', lambda n: graph[n]),
+ )
+ graph = {'A1': [],
+ 'A2': ['A1'],
+ 'A3': ['A2', 'B2', 'A1', 'C1'],
+ 'B1': ['A1'],
+ 'B2': ['B1', 'C2', 'A1'],
+ 'C1': ['B1'],
+ 'C2': ['C1', 'A1'],
+ 'C3': ['C2']}
+ self.assertEqual(
+ ['A1', 'B1', 'C1', 'C2', 'B2', 'A2', 'A3'],
+ depends('A3', lambda n: graph[n]),
+ )
+
+ def assertCircularDependency(self, trace, target, graph):
+ "Custom assertion that checks a circular dependency is detected correctly."
+ self.assertRaises(
+ exceptions.CircularDependency,
+ depends,
+ target,
+ lambda n: graph[n],
+ )
+ try:
+ depends(target, lambda n: graph[n])
+ except exceptions.CircularDependency as e:
+ self.assertEqual(trace, e.trace)
+
+ def test_depends_cycle(self):
+ graph = {'A1': ['A1']}
+ self.assertCircularDependency(
+ ['A1', 'A1'],
+ 'A1',
+ graph,
+ )
+ graph = {'A1': [],
+ 'A2': ['A1', 'A2'],
+ 'A3': ['A2']}
+ self.assertCircularDependency(
+ ['A2', 'A2'],
+ 'A3',
+ graph,
+ )
+ graph = {'A1': [],
+ 'A2': ['A1'],
+ 'A3': ['A2', 'A3'],
+ 'A4': ['A3']}
+ self.assertCircularDependency(
+ ['A3', 'A3'],
+ 'A4',
+ graph,
+ )
+ graph = {'A1': ['B1'],
+ 'B1': ['A1']}
+ self.assertCircularDependency(
+ ['A1', 'B1', 'A1'],
+ 'A1',
+ graph,
+ )
+ graph = {'A1': [],
+ 'A2': ['A1', 'B2'],
+ 'A3': ['A2'],
+ 'B1': [],
+ 'B2': ['B1', 'A2'],
+ 'B3': ['B2']}
+ self.assertCircularDependency(
+ ['A2', 'B2', 'A2'],
+ 'A3',
+ graph,
+ )
+ graph = {'A1': [],
+ 'A2': ['A1', 'B3'],
+ 'A3': ['A2'],
+ 'B1': [],
+ 'B2': ['B1', 'A2'],
+ 'B3': ['B2']}
+ self.assertCircularDependency(
+ ['A2', 'B3', 'B2', 'A2'],
+ 'A3',
+ graph,
+ )
+ graph = {'A1': [],
+ 'A2': ['A1'],
+ 'A3': ['A2', 'B2'],
+ 'A4': ['A3'],
+ 'B1': ['A3'],
+ 'B2': ['B1']}
+ self.assertCircularDependency(
+ ['A3', 'B2', 'B1', 'A3'],
+ 'A4',
+ graph,
+ )
+
+class TestManualChanges(Monkeypatcher):
+ installed_apps = ["fakeapp", "otherfakeapp"]
+
+ def test_suggest_name(self):
+ migrations = Migrations('fakeapp')
+ change = ManualChanges(migrations,
+ [],
+ ['fakeapp.slug'],
+ [])
+ self.assertEquals(change.suggest_name(),
+ 'add_field_fakeapp_slug')
+
+ change = ManualChanges(migrations,
+ [],
+ [],
+ ['fakeapp.slug'])
+ self.assertEquals(change.suggest_name(),
+ 'add_index_fakeapp_slug')
diff --git a/lib/python2.7/site-packages/south/tests/non_managed/__init__.py b/lib/python2.7/site-packages/south/tests/non_managed/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/non_managed/__init__.py
diff --git a/lib/python2.7/site-packages/south/tests/non_managed/migrations/__init__.py b/lib/python2.7/site-packages/south/tests/non_managed/migrations/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/non_managed/migrations/__init__.py
diff --git a/lib/python2.7/site-packages/south/tests/non_managed/models.py b/lib/python2.7/site-packages/south/tests/non_managed/models.py
new file mode 100644
index 0000000..e520d94
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/non_managed/models.py
@@ -0,0 +1,16 @@
+# -*- coding: UTF-8 -*-
+
+"""
+An app with a model that is not managed for testing that South does
+not try to manage it in any way
+"""
+from django.db import models
+
+class Legacy(models.Model):
+
+ name = models.CharField(max_length=10)
+ size = models.IntegerField()
+
+ class Meta:
+ db_table = "legacy_table"
+ managed = False
diff --git a/lib/python2.7/site-packages/south/tests/otherfakeapp/__init__.py b/lib/python2.7/site-packages/south/tests/otherfakeapp/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/otherfakeapp/__init__.py
diff --git a/lib/python2.7/site-packages/south/tests/otherfakeapp/migrations/0001_first.py b/lib/python2.7/site-packages/south/tests/otherfakeapp/migrations/0001_first.py
new file mode 100644
index 0000000..ad9c095
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/otherfakeapp/migrations/0001_first.py
@@ -0,0 +1,15 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ depends_on = (
+ ("fakeapp", "0001_spam"),
+ )
+
+ def forwards(self):
+ pass
+
+ def backwards(self):
+ pass
+
diff --git a/lib/python2.7/site-packages/south/tests/otherfakeapp/migrations/0002_second.py b/lib/python2.7/site-packages/south/tests/otherfakeapp/migrations/0002_second.py
new file mode 100644
index 0000000..7c0fb0c
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/otherfakeapp/migrations/0002_second.py
@@ -0,0 +1,11 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ def forwards(self):
+ pass
+
+ def backwards(self):
+ pass
+
diff --git a/lib/python2.7/site-packages/south/tests/otherfakeapp/migrations/0003_third.py b/lib/python2.7/site-packages/south/tests/otherfakeapp/migrations/0003_third.py
new file mode 100644
index 0000000..fa8ed97
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/otherfakeapp/migrations/0003_third.py
@@ -0,0 +1,14 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ depends_on = (
+ ("fakeapp", "0003_alter_spam"),
+ )
+
+ def forwards(self):
+ pass
+
+ def backwards(self):
+ pass
diff --git a/lib/python2.7/site-packages/south/tests/otherfakeapp/migrations/__init__.py b/lib/python2.7/site-packages/south/tests/otherfakeapp/migrations/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/otherfakeapp/migrations/__init__.py
diff --git a/lib/python2.7/site-packages/south/tests/otherfakeapp/models.py b/lib/python2.7/site-packages/south/tests/otherfakeapp/models.py
new file mode 100644
index 0000000..93a4b8e
--- /dev/null
+++ b/lib/python2.7/site-packages/south/tests/otherfakeapp/models.py
@@ -0,0 +1 @@
+# This file left intentionally blank. \ No newline at end of file
diff --git a/lib/python2.7/site-packages/south/utils/__init__.py b/lib/python2.7/site-packages/south/utils/__init__.py
new file mode 100644
index 0000000..8d7297e
--- /dev/null
+++ b/lib/python2.7/site-packages/south/utils/__init__.py
@@ -0,0 +1,73 @@
+"""
+Generally helpful utility functions.
+"""
+
+
+def _ask_for_it_by_name(name):
+ "Returns an object referenced by absolute path."
+ bits = str(name).split(".")
+
+ ## what if there is no absolute reference?
+ if len(bits) > 1:
+ modulename = ".".join(bits[:-1])
+ else:
+ modulename = bits[0]
+
+ module = __import__(modulename, {}, {}, bits[-1])
+
+ if len(bits) == 1:
+ return module
+ else:
+ return getattr(module, bits[-1])
+
+
+def ask_for_it_by_name(name):
+ "Returns an object referenced by absolute path. (Memoised outer wrapper)"
+ if name not in ask_for_it_by_name.cache:
+ ask_for_it_by_name.cache[name] = _ask_for_it_by_name(name)
+ return ask_for_it_by_name.cache[name]
+ask_for_it_by_name.cache = {}
+
+
+def get_attribute(item, attribute):
+ """
+ Like getattr, but recursive (i.e. you can ask for 'foo.bar.yay'.)
+ """
+ value = item
+ for part in attribute.split("."):
+ value = getattr(value, part)
+ return value
+
+def auto_through(field):
+ "Returns if the M2M class passed in has an autogenerated through table or not."
+ return (
+ # Django 1.0/1.1
+ (not field.rel.through)
+ or
+ # Django 1.2+
+ getattr(getattr(field.rel.through, "_meta", None), "auto_created", False)
+ )
+
+def auto_model(model):
+ "Returns if the given model was automatically generated."
+ return getattr(model._meta, "auto_created", False)
+
+def memoize(function):
+ "Standard memoization decorator."
+ name = function.__name__
+ _name = '_' + name
+
+ def method(self):
+ if not hasattr(self, _name):
+ value = function(self)
+ setattr(self, _name, value)
+ return getattr(self, _name)
+
+ def invalidate():
+ if hasattr(method, _name):
+ delattr(method, _name)
+
+ method.__name__ = function.__name__
+ method.__doc__ = function.__doc__
+ method._invalidate = invalidate
+ return method
diff --git a/lib/python2.7/site-packages/south/utils/datetime_utils.py b/lib/python2.7/site-packages/south/utils/datetime_utils.py
new file mode 100644
index 0000000..a416935
--- /dev/null
+++ b/lib/python2.7/site-packages/south/utils/datetime_utils.py
@@ -0,0 +1,28 @@
+from datetime import *
+
+import django
+from django.conf import settings
+
+if django.VERSION[:2] >= (1, 4) and getattr(settings, 'USE_TZ', False):
+ from django.utils import timezone
+ from datetime import datetime as _datetime
+
+ class datetime(_datetime):
+ """
+ A custom datetime.datetime class which acts as a compatibility
+ layer between South and Django 1.4's timezone aware datetime
+ instances.
+
+ It basically adds the default timezone (as configured in Django's
+ settings) automatically if no tzinfo is given.
+ """
+ def __new__(cls, year, month, day,
+ hour=0, minute=0, second=0, microsecond=0, tzinfo=None):
+
+ dt = _datetime(year, month, day,
+ hour, minute, second, microsecond,
+ tzinfo=tzinfo)
+ if tzinfo is None:
+ default_timezone = timezone.get_default_timezone()
+ dt = timezone.make_aware(dt, default_timezone)
+ return dt
diff --git a/lib/python2.7/site-packages/south/utils/py3.py b/lib/python2.7/site-packages/south/utils/py3.py
new file mode 100644
index 0000000..732e904
--- /dev/null
+++ b/lib/python2.7/site-packages/south/utils/py3.py
@@ -0,0 +1,28 @@
+"""
+Python 2 + 3 compatibility functions. This is a very small subset of six.
+"""
+
+import sys
+
+PY3 = sys.version_info[0] == 3
+
+if PY3:
+ string_types = str,
+ text_type = str
+ raw_input = input
+
+ import io
+ StringIO = io.StringIO
+
+else:
+ string_types = basestring,
+ text_type = unicode
+ raw_input = raw_input
+
+ import cStringIO
+ StringIO = cStringIO.StringIO
+
+
+def with_metaclass(meta, base=object):
+ """Create a base class with a metaclass."""
+ return meta("NewBase", (base,), {})
diff --git a/lib/python2.7/site-packages/south/v2.py b/lib/python2.7/site-packages/south/v2.py
new file mode 100644
index 0000000..22afed2
--- /dev/null
+++ b/lib/python2.7/site-packages/south/v2.py
@@ -0,0 +1,25 @@
+"""
+API versioning file; we can tell what kind of migrations things are
+by what class they inherit from (if none, it's a v1).
+"""
+
+from south.utils import ask_for_it_by_name
+
+class BaseMigration(object):
+
+ def gf(self, field_name):
+ "Gets a field by absolute reference."
+ field = ask_for_it_by_name(field_name)
+ field.model = FakeModel
+ return field
+
+class SchemaMigration(BaseMigration):
+ pass
+
+class DataMigration(BaseMigration):
+ # Data migrations shouldn't be dry-run
+ no_dry_run = True
+
+class FakeModel(object):
+ "Fake model so error messages on fields don't explode"
+ pass