summaryrefslogtreecommitdiff
path: root/lib/python2.7/site-packages/south/db
diff options
context:
space:
mode:
Diffstat (limited to 'lib/python2.7/site-packages/south/db')
-rw-r--r--lib/python2.7/site-packages/south/db/__init__.py83
-rw-r--r--lib/python2.7/site-packages/south/db/firebird.py362
-rw-r--r--lib/python2.7/site-packages/south/db/generic.py1164
-rw-r--r--lib/python2.7/site-packages/south/db/mysql.py290
-rw-r--r--lib/python2.7/site-packages/south/db/oracle.py345
-rw-r--r--lib/python2.7/site-packages/south/db/postgresql_psycopg2.py96
-rw-r--r--lib/python2.7/site-packages/south/db/sql_server/__init__.py0
-rw-r--r--lib/python2.7/site-packages/south/db/sql_server/pyodbc.py444
-rw-r--r--lib/python2.7/site-packages/south/db/sqlite3.py272
9 files changed, 3056 insertions, 0 deletions
diff --git a/lib/python2.7/site-packages/south/db/__init__.py b/lib/python2.7/site-packages/south/db/__init__.py
new file mode 100644
index 0000000..b9b7168
--- /dev/null
+++ b/lib/python2.7/site-packages/south/db/__init__.py
@@ -0,0 +1,83 @@
+
+# Establish the common DatabaseOperations instance, which we call 'db'.
+# Much thanks to cmkmrr for a lot of the code base here
+
+from django.conf import settings
+import sys
+
+# A few aliases, because there's FQMNs now
+engine_modules = {
+ 'django.db.backends.postgresql_psycopg2': 'postgresql_psycopg2',
+ 'django.db.backends.sqlite3': 'sqlite3',
+ 'django.db.backends.mysql': 'mysql',
+ 'mysql_oursql.standard': 'mysql',
+ 'django.db.backends.oracle': 'oracle',
+ 'sql_server.pyodbc': 'sql_server.pyodbc', #django-pyodbc-azure
+ 'django_pyodbc': 'sql_server.pyodbc', #django-pyodbc
+ 'sqlserver_ado': 'sql_server.pyodbc', #django-mssql
+ 'firebird': 'firebird', #django-firebird
+ 'django.contrib.gis.db.backends.postgis': 'postgresql_psycopg2',
+ 'django.contrib.gis.db.backends.spatialite': 'sqlite3',
+ 'django.contrib.gis.db.backends.mysql': 'mysql',
+ 'django.contrib.gis.db.backends.oracle': 'oracle',
+ 'doj.backends.zxjdbc.postgresql': 'postgresql_psycopg2', #django-jython
+ 'doj.backends.zxjdbc.mysql': 'mysql', #django-jython
+ 'doj.backends.zxjdbc.oracle': 'oracle', #django-jython
+}
+
+# First, work out if we're multi-db or not, and which databases we have
+try:
+ from django.db import DEFAULT_DB_ALIAS
+except ImportError:
+ #### 1.1 or below ####
+ # We'll 'fake' multi-db; set the default alias
+ DEFAULT_DB_ALIAS = 'default'
+ # SOUTH_DATABASE_ADAPTER is an optional override if you have a different module
+ engine = getattr(settings, "SOUTH_DATABASE_ADAPTER", "south.db.%s" % settings.DATABASE_ENGINE)
+ # And then, we have one database with one engine
+ db_engines = {DEFAULT_DB_ALIAS: engine}
+else:
+ #### 1.2 or above ####
+ # Loop over the defined databases, gathering up their engines
+ db_engines = dict([
+ # Note we check to see if contrib.gis has overridden us.
+ (alias, "south.db.%s" % engine_modules[db_settings['ENGINE']])
+ for alias, db_settings in settings.DATABASES.items()
+ if db_settings['ENGINE'] in engine_modules
+ ])
+ # Update with any overrides
+ db_engines.update(getattr(settings, "SOUTH_DATABASE_ADAPTERS", {}))
+ # Check there's no None engines, or...
+ for alias, engine in db_engines.items():
+ if engine is None:
+ # They've used a backend we don't support
+ sys.stderr.write(
+ (
+ "There is no South database module for your database backend '%s'. " + \
+ "Please either choose a supported database, check for " + \
+ "SOUTH_DATABASE_ADAPTER[S] settings, " + \
+ "or remove South from INSTALLED_APPS.\n"
+ ) % (settings.DATABASES[alias]['ENGINE'],)
+ )
+ sys.exit(1)
+
+# Now, turn that into a dict of <alias: south db module>
+dbs = {}
+try:
+ for alias, module_name in db_engines.items():
+ module = __import__(module_name, {}, {}, [''])
+ dbs[alias] = module.DatabaseOperations(alias)
+except ImportError:
+ # This error should only be triggered on 1.1 and below.
+ sys.stderr.write(
+ (
+ "There is no South database module '%s' for your database. " + \
+ "Please either choose a supported database, check for " + \
+ "SOUTH_DATABASE_ADAPTER[S] settings, " + \
+ "or remove South from INSTALLED_APPS.\n"
+ ) % (module_name,)
+ )
+ sys.exit(1)
+
+# Finally, to make old migrations work, keep 'db' around as the default database
+db = dbs[DEFAULT_DB_ALIAS]
diff --git a/lib/python2.7/site-packages/south/db/firebird.py b/lib/python2.7/site-packages/south/db/firebird.py
new file mode 100644
index 0000000..a079819
--- /dev/null
+++ b/lib/python2.7/site-packages/south/db/firebird.py
@@ -0,0 +1,362 @@
+# firebird
+
+from __future__ import print_function
+
+import datetime
+
+from django.db import connection, models
+from django.core.management.color import no_style
+from django.db.utils import DatabaseError
+
+from south.db import generic
+from south.utils.py3 import string_types
+
+class DatabaseOperations(generic.DatabaseOperations):
+ backend_name = 'firebird'
+ alter_string_set_type = 'ALTER %(column)s TYPE %(type)s'
+ alter_string_set_default = 'ALTER %(column)s SET DEFAULT %(default)s;'
+ alter_string_drop_null = ''
+ add_column_string = 'ALTER TABLE %s ADD %s;'
+ delete_column_string = 'ALTER TABLE %s DROP %s;'
+ rename_table_sql = ''
+
+ # Features
+ allows_combined_alters = False
+ has_booleans = False
+
+ def _fill_constraint_cache(self, db_name, table_name):
+ self._constraint_cache.setdefault(db_name, {})
+ self._constraint_cache[db_name][table_name] = {}
+
+ rows = self.execute("""
+ SELECT
+ rc.RDB$CONSTRAINT_NAME,
+ rc.RDB$CONSTRAINT_TYPE,
+ cc.RDB$TRIGGER_NAME
+ FROM rdb$relation_constraints rc
+ JOIN rdb$check_constraints cc
+ ON rc.rdb$constraint_name = cc.rdb$constraint_name
+ WHERE rc.rdb$constraint_type = 'NOT NULL'
+ AND rc.rdb$relation_name = '%s'
+ """ % table_name)
+
+ for constraint, kind, column in rows:
+ self._constraint_cache[db_name][table_name].setdefault(column, set())
+ self._constraint_cache[db_name][table_name][column].add((kind, constraint))
+ return
+
+ def _alter_column_set_null(self, table_name, column_name, is_null):
+ sql = """
+ UPDATE RDB$RELATION_FIELDS SET RDB$NULL_FLAG = %(null_flag)s
+ WHERE RDB$FIELD_NAME = '%(column)s'
+ AND RDB$RELATION_NAME = '%(table_name)s'
+ """
+ null_flag = 'NULL' if is_null else '1'
+ return sql % {
+ 'null_flag': null_flag,
+ 'column': column_name.upper(),
+ 'table_name': table_name.upper()
+ }
+
+ def _column_has_default(self, params):
+ sql = """
+ SELECT a.RDB$DEFAULT_VALUE
+ FROM RDB$RELATION_FIELDS a
+ WHERE a.RDB$FIELD_NAME = '%(column)s'
+ AND a.RDB$RELATION_NAME = '%(table_name)s'
+ """
+ value = self.execute(sql % params)
+ return True if value else False
+
+
+ def _alter_set_defaults(self, field, name, params, sqls):
+ "Subcommand of alter_column that sets default values (overrideable)"
+ # Historically, we used to set defaults here.
+ # But since South 0.8, we don't ever set defaults on alter-column -- we only
+ # use database-level defaults as scaffolding when adding columns.
+ # However, we still sometimes need to remove defaults in alter-column.
+ if self._column_has_default(params):
+ sqls.append(('ALTER COLUMN %s DROP DEFAULT' % (self.quote_name(name),), []))
+
+
+ @generic.invalidate_table_constraints
+ def create_table(self, table_name, fields):
+ columns = []
+ autoinc_sql = ''
+
+ for field_name, field in fields:
+ # avoid default values in CREATE TABLE statements (#925)
+ field._suppress_default = True
+
+ col = self.column_sql(table_name, field_name, field)
+ if not col:
+ continue
+
+ columns.append(col)
+ if isinstance(field, models.AutoField):
+ field_name = field.db_column or field.column
+ autoinc_sql = connection.ops.autoinc_sql(table_name, field_name)
+
+ self.execute(self.create_table_sql % {
+ "table": self.quote_name(table_name),
+ "columns": ', '.join([col for col in columns if col]),
+ })
+
+ if autoinc_sql:
+ self.execute(autoinc_sql[0])
+ self.execute(autoinc_sql[1])
+
+ def rename_table(self, old_table_name, table_name):
+ """
+ Renames table is not supported by firebird.
+ This involve recreate all related objects (store procedure, views, triggers, etc)
+ """
+ pass
+
+ @generic.invalidate_table_constraints
+ def delete_table(self, table_name, cascade=False):
+ """
+ Deletes the table 'table_name'.
+ Firebird will also delete any triggers associated with the table.
+ """
+ super(DatabaseOperations, self).delete_table(table_name, cascade=False)
+
+ # Also, drop sequence if exists
+ sql = connection.ops.drop_sequence_sql(table_name)
+ if sql:
+ try:
+ self.execute(sql)
+ except:
+ pass
+
+ def column_sql(self, table_name, field_name, field, tablespace='', with_name=True, field_prepared=False):
+ """
+ Creates the SQL snippet for a column. Used by add_column and add_table.
+ """
+
+ # If the field hasn't already been told its attribute name, do so.
+ if not field_prepared:
+ field.set_attributes_from_name(field_name)
+
+ # hook for the field to do any resolution prior to it's attributes being queried
+ if hasattr(field, 'south_init'):
+ field.south_init()
+
+ # Possible hook to fiddle with the fields (e.g. defaults & TEXT on MySQL)
+ field = self._field_sanity(field)
+
+ try:
+ sql = field.db_type(connection=self._get_connection())
+ except TypeError:
+ sql = field.db_type()
+
+ if sql:
+ # Some callers, like the sqlite stuff, just want the extended type.
+ if with_name:
+ field_output = [self.quote_name(field.column), sql]
+ else:
+ field_output = [sql]
+
+ if field.primary_key:
+ field_output.append('NOT NULL PRIMARY KEY')
+ elif field.unique:
+ # Just use UNIQUE (no indexes any more, we have delete_unique)
+ field_output.append('UNIQUE')
+
+ sql = ' '.join(field_output)
+ sqlparams = ()
+
+ # if the field is "NOT NULL" and a default value is provided, create the column with it
+ # this allows the addition of a NOT NULL field to a table with existing rows
+ if not getattr(field, '_suppress_default', False):
+ if field.has_default():
+ default = field.get_default()
+ # If the default is actually None, don't add a default term
+ if default is not None:
+ # If the default is a callable, then call it!
+ if callable(default):
+ default = default()
+ # Now do some very cheap quoting. TODO: Redesign return values to avoid this.
+ if isinstance(default, string_types):
+ default = "'%s'" % default.replace("'", "''")
+ elif isinstance(default, (datetime.date, datetime.time, datetime.datetime)):
+ default = "'%s'" % default
+ elif isinstance(default, bool):
+ default = int(default)
+ # Escape any % signs in the output (bug #317)
+ if isinstance(default, string_types):
+ default = default.replace("%", "%%")
+ # Add it in
+ sql += " DEFAULT %s"
+ sqlparams = (default)
+ elif (not field.null and field.blank) or (field.get_default() == ''):
+ if field.empty_strings_allowed and self._get_connection().features.interprets_empty_strings_as_nulls:
+ sql += " DEFAULT ''"
+ # Error here would be nice, but doesn't seem to play fair.
+ #else:
+ # raise ValueError("Attempting to add a non null column that isn't character based without an explicit default value.")
+
+ # Firebird need set not null after of default value keyword
+ if not field.primary_key and not field.null:
+ sql += ' NOT NULL'
+
+ if field.rel and self.supports_foreign_keys:
+ self.add_deferred_sql(
+ self.foreign_key_sql(
+ table_name,
+ field.column,
+ field.rel.to._meta.db_table,
+ field.rel.to._meta.get_field(field.rel.field_name).column
+ )
+ )
+
+ # Things like the contrib.gis module fields have this in 1.1 and below
+ if hasattr(field, 'post_create_sql'):
+ for stmt in field.post_create_sql(no_style(), table_name):
+ self.add_deferred_sql(stmt)
+
+ # Avoid double index creation (#1317)
+ # Firebird creates an index implicity for each foreign key field
+ # sql_indexes_for_field tries to create an index for that field too
+ if not field.rel:
+ # In 1.2 and above, you have to ask the DatabaseCreation stuff for it.
+ # This also creates normal indexes in 1.1.
+ if hasattr(self._get_connection().creation, "sql_indexes_for_field"):
+ # Make a fake model to pass in, with only db_table
+ model = self.mock_model("FakeModelForGISCreation", table_name)
+ for stmt in self._get_connection().creation.sql_indexes_for_field(model, field, no_style()):
+ self.add_deferred_sql(stmt)
+
+ if sql:
+ return sql % sqlparams
+ else:
+ return None
+
+
+ def _drop_constraints(self, table_name, name, field):
+ if self.has_check_constraints:
+ check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK")
+ for constraint in check_constraints:
+ self.execute(self.delete_check_sql % {
+ 'table': self.quote_name(table_name),
+ 'constraint': self.quote_name(constraint),
+ })
+
+ # Drop or add UNIQUE constraint
+ unique_constraint = list(self._constraints_affecting_columns(table_name, [name], "UNIQUE"))
+ if field.unique and not unique_constraint:
+ self.create_unique(table_name, [name])
+ elif not field.unique and unique_constraint:
+ self.delete_unique(table_name, [name])
+
+ # Drop all foreign key constraints
+ try:
+ self.delete_foreign_key(table_name, name)
+ except ValueError:
+ # There weren't any
+ pass
+
+
+ @generic.invalidate_table_constraints
+ def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False):
+ """
+ Alters the given column name so it will match the given field.
+ Note that conversion between the two by the database must be possible.
+ Will not automatically add _id by default; to have this behavour, pass
+ explicit_name=False.
+
+ @param table_name: The name of the table to add the column to
+ @param name: The name of the column to alter
+ @param field: The new field definition to use
+ """
+
+ if self.dry_run:
+ if self.debug:
+ print(' - no dry run output for alter_column() due to dynamic DDL, sorry')
+ return
+
+
+ # hook for the field to do any resolution prior to it's attributes being queried
+ if hasattr(field, 'south_init'):
+ field.south_init()
+
+ # Add _id or whatever if we need to
+ field.set_attributes_from_name(name)
+ if not explicit_name:
+ name = field.column
+ else:
+ field.column = name
+
+ if not ignore_constraints:
+ # Drop all check constraints. Note that constraints will be added back
+ # with self.alter_string_set_type and self.alter_string_drop_null.
+ self._drop_constraints(table_name, name, field)
+
+ # First, change the type
+ params = {
+ "column": self.quote_name(name),
+ "type": self._db_type_for_alter_column(field),
+ "table_name": table_name
+ }
+
+ # SQLs is a list of (SQL, values) pairs.
+ sqls = []
+ sqls_extra = []
+
+ # Only alter the column if it has a type (Geometry ones sometimes don't)
+ if params["type"] is not None:
+ sqls.append((self.alter_string_set_type % params, []))
+
+ # Add any field- and backend- specific modifications
+ self._alter_add_column_mods(field, name, params, sqls)
+
+ # Next, nullity: modified, firebird doesn't support DROP NOT NULL
+ sqls_extra.append(self._alter_column_set_null(table_name, name, field.null))
+
+ # Next, set any default
+ self._alter_set_defaults(field, name, params, sqls)
+
+ # Finally, actually change the column
+ if self.allows_combined_alters:
+ sqls, values = list(zip(*sqls))
+ self.execute(
+ "ALTER TABLE %s %s;" % (self.quote_name(table_name), ", ".join(sqls)),
+ generic.flatten(values),
+ )
+ else:
+ # Databases like e.g. MySQL don't like more than one alter at once.
+ for sql, values in sqls:
+ try:
+ self.execute("ALTER TABLE %s %s;" % (self.quote_name(table_name), sql), values)
+ except DatabaseError as e:
+ print(e)
+
+
+ # Execute extra sql, which don't need ALTER TABLE statement
+ for sql in sqls_extra:
+ self.execute(sql)
+
+ if not ignore_constraints:
+ # Add back FK constraints if needed
+ if field.rel and self.supports_foreign_keys:
+ self.execute(
+ self.foreign_key_sql(
+ table_name,
+ field.column,
+ field.rel.to._meta.db_table,
+ field.rel.to._meta.get_field(field.rel.field_name).column
+ )
+ )
+
+ @generic.copy_column_constraints
+ @generic.delete_column_constraints
+ def rename_column(self, table_name, old, new):
+ if old == new:
+ # Short-circuit out
+ return []
+
+ self.execute('ALTER TABLE %s ALTER %s TO %s;' % (
+ self.quote_name(table_name),
+ self.quote_name(old),
+ self.quote_name(new),
+ ))
diff --git a/lib/python2.7/site-packages/south/db/generic.py b/lib/python2.7/site-packages/south/db/generic.py
new file mode 100644
index 0000000..5c19354
--- /dev/null
+++ b/lib/python2.7/site-packages/south/db/generic.py
@@ -0,0 +1,1164 @@
+from __future__ import print_function
+
+import re
+import sys
+
+from django.core.management.color import no_style
+from django.db import transaction, models
+from django.db.utils import DatabaseError
+from django.db.backends.util import truncate_name
+from django.db.backends.creation import BaseDatabaseCreation
+from django.db.models.fields import NOT_PROVIDED
+from django.dispatch import dispatcher
+from django.conf import settings
+from django.utils.datastructures import SortedDict
+try:
+ from django.utils.functional import cached_property
+except ImportError:
+ class cached_property(object):
+ """
+ Decorator that creates converts a method with a single
+ self argument into a property cached on the instance.
+ """
+ def __init__(self, func):
+ self.func = func
+
+ def __get__(self, instance, type):
+ res = instance.__dict__[self.func.__name__] = self.func(instance)
+ return res
+
+from south.logger import get_logger
+from south.utils.py3 import string_types, text_type
+
+
+def alias(attrname):
+ """
+ Returns a function which calls 'attrname' - for function aliasing.
+ We can't just use foo = bar, as this breaks subclassing.
+ """
+ def func(self, *args, **kwds):
+ return getattr(self, attrname)(*args, **kwds)
+ return func
+
+
+def invalidate_table_constraints(func):
+ def _cache_clear(self, table, *args, **opts):
+ self._set_cache(table, value=INVALID)
+ return func(self, table, *args, **opts)
+ return _cache_clear
+
+
+def delete_column_constraints(func):
+ def _column_rm(self, table, column, *args, **opts):
+ self._set_cache(table, column, value=[])
+ return func(self, table, column, *args, **opts)
+ return _column_rm
+
+
+def copy_column_constraints(func):
+ def _column_cp(self, table, column_old, column_new, *args, **opts):
+ db_name = self._get_setting('NAME')
+ self._set_cache(table, column_new, value=self.lookup_constraint(db_name, table, column_old))
+ return func(self, table, column_old, column_new, *args, **opts)
+ return _column_cp
+
+
+class INVALID(Exception):
+ def __repr__(self):
+ return 'INVALID'
+
+
+class DryRunError(ValueError):
+ pass
+
+
+class DatabaseOperations(object):
+ """
+ Generic SQL implementation of the DatabaseOperations.
+ Some of this code comes from Django Evolution.
+ """
+
+ alter_string_set_type = 'ALTER COLUMN %(column)s TYPE %(type)s'
+ alter_string_set_null = 'ALTER COLUMN %(column)s DROP NOT NULL'
+ alter_string_drop_null = 'ALTER COLUMN %(column)s SET NOT NULL'
+ delete_check_sql = 'ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s'
+ add_column_string = 'ALTER TABLE %s ADD COLUMN %s;'
+ delete_unique_sql = "ALTER TABLE %s DROP CONSTRAINT %s"
+ delete_foreign_key_sql = 'ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s'
+ create_table_sql = 'CREATE TABLE %(table)s (%(columns)s)'
+ max_index_name_length = 63
+ drop_index_string = 'DROP INDEX %(index_name)s'
+ delete_column_string = 'ALTER TABLE %s DROP COLUMN %s CASCADE;'
+ create_primary_key_string = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s PRIMARY KEY (%(columns)s)"
+ delete_primary_key_sql = "ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s"
+ add_check_constraint_fragment = "ADD CONSTRAINT %(constraint)s CHECK (%(check)s)"
+ rename_table_sql = "ALTER TABLE %s RENAME TO %s;"
+ backend_name = None
+ default_schema_name = "public"
+
+ # Features
+ allows_combined_alters = True
+ supports_foreign_keys = True
+ has_check_constraints = True
+ has_booleans = True
+ raises_default_errors = True
+
+ @cached_property
+ def has_ddl_transactions(self):
+ """
+ Tests the database using feature detection to see if it has
+ transactional DDL support.
+ """
+ self._possibly_initialise()
+ connection = self._get_connection()
+ if hasattr(connection.features, "confirm") and not connection.features._confirmed:
+ connection.features.confirm()
+ # Django 1.3's MySQLdb backend doesn't raise DatabaseError
+ exceptions = (DatabaseError, )
+ try:
+ from MySQLdb import OperationalError
+ exceptions += (OperationalError, )
+ except ImportError:
+ pass
+ # Now do the test
+ if getattr(connection.features, 'supports_transactions', True):
+ cursor = connection.cursor()
+ self.start_transaction()
+ cursor.execute('CREATE TABLE DDL_TRANSACTION_TEST (X INT)')
+ self.rollback_transaction()
+ try:
+ try:
+ cursor.execute('CREATE TABLE DDL_TRANSACTION_TEST (X INT)')
+ except exceptions:
+ return False
+ else:
+ return True
+ finally:
+ cursor.execute('DROP TABLE DDL_TRANSACTION_TEST')
+ else:
+ return False
+
+ def __init__(self, db_alias):
+ self.debug = False
+ self.deferred_sql = []
+ self.dry_run = False
+ self.pending_transactions = 0
+ self.pending_create_signals = []
+ self.db_alias = db_alias
+ self._constraint_cache = {}
+ self._initialised = False
+
+ def lookup_constraint(self, db_name, table_name, column_name=None):
+ """ return a set() of constraints for db_name.table_name.column_name """
+ def _lookup():
+ table = self._constraint_cache[db_name][table_name]
+ if table is INVALID:
+ raise INVALID
+ elif column_name is None:
+ return list(table.items())
+ else:
+ return table[column_name]
+
+ try:
+ ret = _lookup()
+ return ret
+ except INVALID:
+ del self._constraint_cache[db_name][table_name]
+ self._fill_constraint_cache(db_name, table_name)
+ except KeyError:
+ if self._is_valid_cache(db_name, table_name):
+ return []
+ self._fill_constraint_cache(db_name, table_name)
+
+ return self.lookup_constraint(db_name, table_name, column_name)
+
+ def _set_cache(self, table_name, column_name=None, value=INVALID):
+ db_name = self._get_setting('NAME')
+ try:
+ if column_name is not None:
+ self._constraint_cache[db_name][table_name][column_name] = value
+ else:
+ self._constraint_cache[db_name][table_name] = value
+ except (LookupError, TypeError):
+ pass
+
+ def _is_valid_cache(self, db_name, table_name):
+ # we cache per-table so if the table is there it is valid
+ try:
+ return self._constraint_cache[db_name][table_name] is not INVALID
+ except KeyError:
+ return False
+
+ def _is_multidb(self):
+ try:
+ from django.db import connections
+ connections # Prevents "unused import" warning
+ except ImportError:
+ return False
+ else:
+ return True
+
+ def _get_connection(self):
+ """
+ Returns a django connection for a given DB Alias
+ """
+ if self._is_multidb():
+ from django.db import connections
+ return connections[self.db_alias]
+ else:
+ from django.db import connection
+ return connection
+
+ def _get_setting(self, setting_name):
+ """
+ Allows code to get a setting (like, for example, STORAGE_ENGINE)
+ """
+ setting_name = setting_name.upper()
+ connection = self._get_connection()
+ if self._is_multidb():
+ # Django 1.2 and above
+ return connection.settings_dict[setting_name]
+ else:
+ # Django 1.1 and below
+ return getattr(settings, "DATABASE_%s" % setting_name)
+
+ def _has_setting(self, setting_name):
+ """
+ Existence-checking version of _get_setting.
+ """
+ try:
+ self._get_setting(setting_name)
+ except (KeyError, AttributeError):
+ return False
+ else:
+ return True
+
+ def _get_schema_name(self):
+ try:
+ return self._get_setting('schema')
+ except (KeyError, AttributeError):
+ return self.default_schema_name
+
+ def _possibly_initialise(self):
+ if not self._initialised:
+ self.connection_init()
+ self._initialised = True
+
+ def connection_init(self):
+ """
+ Run before any SQL to let database-specific config be sent as a command,
+ e.g. which storage engine (MySQL) or transaction serialisability level.
+ """
+ pass
+
+ def quote_name(self, name):
+ """
+ Uses the database backend to quote the given table/column name.
+ """
+ return self._get_connection().ops.quote_name(name)
+
+ def _print_sql_error(self, e, sql, params=[]):
+ print('FATAL ERROR - The following SQL query failed: %s' % sql, file=sys.stderr)
+ print('The error was: %s' % e, file=sys.stderr)
+
+ def execute(self, sql, params=[], print_all_errors=True):
+ """
+ Executes the given SQL statement, with optional parameters.
+ If the instance's debug attribute is True, prints out what it executes.
+ """
+
+ self._possibly_initialise()
+
+ cursor = self._get_connection().cursor()
+ if self.debug:
+ print(" = %s" % sql, params)
+
+ if self.dry_run:
+ return []
+
+ get_logger().debug(text_type('execute "%s" with params "%s"' % (sql, params)))
+
+ try:
+ cursor.execute(sql, params)
+ except DatabaseError as e:
+ if print_all_errors:
+ self._print_sql_error(e, sql, params)
+ raise
+
+ try:
+ return cursor.fetchall()
+ except:
+ return []
+
+ def execute_many(self, sql, regex=r"(?mx) ([^';]* (?:'[^']*'[^';]*)*)", comment_regex=r"(?mx) (?:^\s*$)|(?:--.*$)"):
+ """
+ Takes a SQL file and executes it as many separate statements.
+ (Some backends, such as Postgres, don't work otherwise.)
+ """
+ # Be warned: This function is full of dark magic. Make sure you really
+ # know regexes before trying to edit it.
+ # First, strip comments
+ sql = "\n".join([x.strip().replace("%", "%%") for x in re.split(comment_regex, sql) if x.strip()])
+ # Now execute each statement
+ for st in re.split(regex, sql)[1:][::2]:
+ self.execute(st)
+
+ def add_deferred_sql(self, sql):
+ """
+ Add a SQL statement to the deferred list, that won't be executed until
+ this instance's execute_deferred_sql method is run.
+ """
+ self.deferred_sql.append(sql)
+
+ def execute_deferred_sql(self):
+ """
+ Executes all deferred SQL, resetting the deferred_sql list
+ """
+ for sql in self.deferred_sql:
+ self.execute(sql)
+
+ self.deferred_sql = []
+
+ def clear_deferred_sql(self):
+ """
+ Resets the deferred_sql list to empty.
+ """
+ self.deferred_sql = []
+
+ def clear_run_data(self, pending_creates = None):
+ """
+ Resets variables to how they should be before a run. Used for dry runs.
+ If you want, pass in an old panding_creates to reset to.
+ """
+ self.clear_deferred_sql()
+ self.pending_create_signals = pending_creates or []
+
+ def get_pending_creates(self):
+ return self.pending_create_signals
+
+ @invalidate_table_constraints
+ def create_table(self, table_name, fields):
+ """
+ Creates the table 'table_name'. 'fields' is a tuple of fields,
+ each repsented by a 2-part tuple of field name and a
+ django.db.models.fields.Field object
+ """
+
+ if len(table_name) > 63:
+ print(" ! WARNING: You have a table name longer than 63 characters; this will not fully work on PostgreSQL or MySQL.")
+
+ # avoid default values in CREATE TABLE statements (#925)
+ for field_name, field in fields:
+ field._suppress_default = True
+
+ columns = [
+ self.column_sql(table_name, field_name, field)
+ for field_name, field in fields
+ ]
+
+ self.execute(self.create_table_sql % {
+ "table": self.quote_name(table_name),
+ "columns": ', '.join([col for col in columns if col]),
+ })
+
+ add_table = alias('create_table') # Alias for consistency's sake
+
+ @invalidate_table_constraints
+ def rename_table(self, old_table_name, table_name):
+ """
+ Renames the table 'old_table_name' to 'table_name'.
+ """
+ if old_table_name == table_name:
+ # Short-circuit out.
+ return
+ params = (self.quote_name(old_table_name), self.quote_name(table_name))
+ self.execute(self.rename_table_sql % params)
+ # Invalidate the not-yet-indexed table
+ self._set_cache(table_name, value=INVALID)
+
+ @invalidate_table_constraints
+ def delete_table(self, table_name, cascade=True):
+ """
+ Deletes the table 'table_name'.
+ """
+ params = (self.quote_name(table_name), )
+ if cascade:
+ self.execute('DROP TABLE %s CASCADE;' % params)
+ else:
+ self.execute('DROP TABLE %s;' % params)
+
+ drop_table = alias('delete_table')
+
+ @invalidate_table_constraints
+ def clear_table(self, table_name):
+ """
+ Deletes all rows from 'table_name'.
+ """
+ params = (self.quote_name(table_name), )
+ self.execute('DELETE FROM %s;' % params)
+
+ @invalidate_table_constraints
+ def add_column(self, table_name, name, field, keep_default=True):
+ """
+ Adds the column 'name' to the table 'table_name'.
+ Uses the 'field' paramater, a django.db.models.fields.Field instance,
+ to generate the necessary sql
+
+ @param table_name: The name of the table to add the column to
+ @param name: The name of the column to add
+ @param field: The field to use
+ """
+ sql = self.column_sql(table_name, name, field)
+ if sql:
+ params = (
+ self.quote_name(table_name),
+ sql,
+ )
+ sql = self.add_column_string % params
+ self.execute(sql)
+
+ # Now, drop the default if we need to
+ if field.default is not None:
+ field.default = NOT_PROVIDED
+ self.alter_column(table_name, name, field, explicit_name=False, ignore_constraints=True)
+
+ def _db_type_for_alter_column(self, field):
+ """
+ Returns a field's type suitable for ALTER COLUMN.
+ By default it just returns field.db_type().
+ To be overriden by backend specific subclasses
+ @param field: The field to generate type for
+ """
+ try:
+ return field.db_type(connection=self._get_connection())
+ except TypeError:
+ return field.db_type()
+
+ def _alter_add_column_mods(self, field, name, params, sqls):
+ """
+ Subcommand of alter_column that modifies column definitions beyond
+ the type string -- e.g. adding constraints where they cannot be specified
+ as part of the type (overrideable)
+ """
+ pass
+
+ def _alter_set_defaults(self, field, name, params, sqls):
+ "Subcommand of alter_column that sets default values (overrideable)"
+ # Historically, we used to set defaults here.
+ # But since South 0.8, we don't ever set defaults on alter-column -- we only
+ # use database-level defaults as scaffolding when adding columns.
+ # However, we still sometimes need to remove defaults in alter-column.
+ sqls.append(('ALTER COLUMN %s DROP DEFAULT' % (self.quote_name(name),), []))
+
+ def _update_nulls_to_default(self, params, field):
+ "Subcommand of alter_column that updates nulls to default value (overrideable)"
+ default = field.get_db_prep_save(field.get_default(), connection=self._get_connection())
+ self.execute('UPDATE %(table_name)s SET %(column)s=%%s WHERE %(column)s IS NULL' % params, [default])
+
+ @invalidate_table_constraints
+ def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False):
+ """
+ Alters the given column name so it will match the given field.
+ Note that conversion between the two by the database must be possible.
+ Will not automatically add _id by default; to have this behavour, pass
+ explicit_name=False.
+
+ @param table_name: The name of the table to add the column to
+ @param name: The name of the column to alter
+ @param field: The new field definition to use
+ """
+
+ if self.dry_run:
+ if self.debug:
+ print(' - no dry run output for alter_column() due to dynamic DDL, sorry')
+ return
+
+ # hook for the field to do any resolution prior to it's attributes being queried
+ if hasattr(field, 'south_init'):
+ field.south_init()
+
+ # Add _id or whatever if we need to
+ field.set_attributes_from_name(name)
+ if not explicit_name:
+ name = field.column
+ else:
+ field.column = name
+
+ if not ignore_constraints:
+ # Drop all check constraints. Note that constraints will be added back
+ # with self.alter_string_set_type and self.alter_string_drop_null.
+ if self.has_check_constraints:
+ check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK")
+ for constraint in check_constraints:
+ self.execute(self.delete_check_sql % {
+ 'table': self.quote_name(table_name),
+ 'constraint': self.quote_name(constraint),
+ })
+
+ # Drop all foreign key constraints
+ try:
+ self.delete_foreign_key(table_name, name)
+ except ValueError:
+ # There weren't any
+ pass
+
+ # First, change the type
+ params = {
+ "column": self.quote_name(name),
+ "type": self._db_type_for_alter_column(field),
+ "table_name": self.quote_name(table_name)
+ }
+
+ # SQLs is a list of (SQL, values) pairs.
+ sqls = []
+
+ # Only alter the column if it has a type (Geometry ones sometimes don't)
+ if params["type"] is not None:
+ sqls.append((self.alter_string_set_type % params, []))
+
+ # Add any field- and backend- specific modifications
+ self._alter_add_column_mods(field, name, params, sqls)
+ # Next, nullity
+ if field.null or field.has_default():
+ sqls.append((self.alter_string_set_null % params, []))
+ else:
+ sqls.append((self.alter_string_drop_null % params, []))
+
+ # Do defaults
+ self._alter_set_defaults(field, name, params, sqls)
+
+ # Actually change the column (step 1 -- Nullity may need to be fixed)
+ if self.allows_combined_alters:
+ sqls, values = zip(*sqls)
+ self.execute(
+ "ALTER TABLE %s %s;" % (self.quote_name(table_name), ", ".join(sqls)),
+ flatten(values),
+ )
+ else:
+ # Databases like e.g. MySQL don't like more than one alter at once.
+ for sql, values in sqls:
+ self.execute("ALTER TABLE %s %s;" % (self.quote_name(table_name), sql), values)
+
+ if not field.null and field.has_default():
+ # Final fixes
+ self._update_nulls_to_default(params, field)
+ self.execute("ALTER TABLE %s %s;" % (self.quote_name(table_name), self.alter_string_drop_null % params), [])
+
+ if not ignore_constraints:
+ # Add back FK constraints if needed
+ if field.rel and self.supports_foreign_keys:
+ self.execute(
+ self.foreign_key_sql(
+ table_name,
+ field.column,
+ field.rel.to._meta.db_table,
+ field.rel.to._meta.get_field(field.rel.field_name).column
+ )
+ )
+
+ def _fill_constraint_cache(self, db_name, table_name):
+
+ schema = self._get_schema_name()
+ ifsc_tables = ["constraint_column_usage", "key_column_usage"]
+
+ self._constraint_cache.setdefault(db_name, {})
+ self._constraint_cache[db_name][table_name] = {}
+
+ for ifsc_table in ifsc_tables:
+ rows = self.execute("""
+ SELECT kc.constraint_name, kc.column_name, c.constraint_type
+ FROM information_schema.%s AS kc
+ JOIN information_schema.table_constraints AS c ON
+ kc.table_schema = c.table_schema AND
+ kc.table_name = c.table_name AND
+ kc.constraint_name = c.constraint_name
+ WHERE
+ kc.table_schema = %%s AND
+ kc.table_name = %%s
+ """ % ifsc_table, [schema, table_name])
+ for constraint, column, kind in rows:
+ self._constraint_cache[db_name][table_name].setdefault(column, set())
+ self._constraint_cache[db_name][table_name][column].add((kind, constraint))
+ return
+
+ def _constraints_affecting_columns(self, table_name, columns, type="UNIQUE"):
+ """
+ Gets the names of the constraints affecting the given columns.
+ If columns is None, returns all constraints of the type on the table.
+ """
+ if self.dry_run:
+ raise DryRunError("Cannot get constraints for columns.")
+
+ if columns is not None:
+ columns = set(map(lambda s: s.lower(), columns))
+
+ db_name = self._get_setting('NAME')
+
+ cnames = {}
+ for col, constraints in self.lookup_constraint(db_name, table_name):
+ for kind, cname in constraints:
+ if kind == type:
+ cnames.setdefault(cname, set())
+ cnames[cname].add(col.lower())
+
+ for cname, cols in cnames.items():
+ if cols == columns or columns is None:
+ yield cname
+
+ @invalidate_table_constraints
+ def create_unique(self, table_name, columns):
+ """
+ Creates a UNIQUE constraint on the columns on the given table.
+ """
+
+ if not isinstance(columns, (list, tuple)):
+ columns = [columns]
+
+ name = self.create_index_name(table_name, columns, suffix="_uniq")
+
+ cols = ", ".join(map(self.quote_name, columns))
+ self.execute("ALTER TABLE %s ADD CONSTRAINT %s UNIQUE (%s)" % (
+ self.quote_name(table_name),
+ self.quote_name(name),
+ cols,
+ ))
+ return name
+
+ @invalidate_table_constraints
+ def delete_unique(self, table_name, columns):
+ """
+ Deletes a UNIQUE constraint on precisely the columns on the given table.
+ """
+
+ if not isinstance(columns, (list, tuple)):
+ columns = [columns]
+
+ # Dry runs mean we can't do anything.
+ if self.dry_run:
+ if self.debug:
+ print(' - no dry run output for delete_unique_column() due to dynamic DDL, sorry')
+ return
+
+ constraints = list(self._constraints_affecting_columns(table_name, columns))
+ if not constraints:
+ raise ValueError("Cannot find a UNIQUE constraint on table %s, columns %r" % (table_name, columns))
+ for constraint in constraints:
+ self.execute(self.delete_unique_sql % (
+ self.quote_name(table_name),
+ self.quote_name(constraint),
+ ))
+
+ def column_sql(self, table_name, field_name, field, tablespace='', with_name=True, field_prepared=False):
+ """
+ Creates the SQL snippet for a column. Used by add_column and add_table.
+ """
+
+ # If the field hasn't already been told its attribute name, do so.
+ if not field_prepared:
+ field.set_attributes_from_name(field_name)
+
+ # hook for the field to do any resolution prior to it's attributes being queried
+ if hasattr(field, 'south_init'):
+ field.south_init()
+
+ # Possible hook to fiddle with the fields (e.g. defaults & TEXT on MySQL)
+ field = self._field_sanity(field)
+
+ try:
+ sql = field.db_type(connection=self._get_connection())
+ except TypeError:
+ sql = field.db_type()
+
+ if sql:
+
+ # Some callers, like the sqlite stuff, just want the extended type.
+ if with_name:
+ field_output = [self.quote_name(field.column), sql]
+ else:
+ field_output = [sql]
+
+ field_output.append('%sNULL' % (not field.null and 'NOT ' or ''))
+ if field.primary_key:
+ field_output.append('PRIMARY KEY')
+ elif field.unique:
+ # Just use UNIQUE (no indexes any more, we have delete_unique)
+ field_output.append('UNIQUE')
+
+ tablespace = field.db_tablespace or tablespace
+ if tablespace and getattr(self._get_connection().features, "supports_tablespaces", False) and field.unique:
+ # We must specify the index tablespace inline, because we
+ # won't be generating a CREATE INDEX statement for this field.
+ field_output.append(self._get_connection().ops.tablespace_sql(tablespace, inline=True))
+
+ sql = ' '.join(field_output)
+ sqlparams = ()
+ # if the field is "NOT NULL" and a default value is provided, create the column with it
+ # this allows the addition of a NOT NULL field to a table with existing rows
+ if not getattr(field, '_suppress_default', False):
+ if field.has_default():
+ default = field.get_default()
+ # If the default is actually None, don't add a default term
+ if default is not None:
+ # If the default is a callable, then call it!
+ if callable(default):
+ default = default()
+
+ default = field.get_db_prep_save(default, connection=self._get_connection())
+ default = self._default_value_workaround(default)
+ # Now do some very cheap quoting. TODO: Redesign return values to avoid this.
+ if isinstance(default, string_types):
+ default = "'%s'" % default.replace("'", "''")
+ # Escape any % signs in the output (bug #317)
+ if isinstance(default, string_types):
+ default = default.replace("%", "%%")
+ # Add it in
+ sql += " DEFAULT %s"
+ sqlparams = (default)
+ elif (not field.null and field.blank) or (field.get_default() == ''):
+ if field.empty_strings_allowed and self._get_connection().features.interprets_empty_strings_as_nulls:
+ sql += " DEFAULT ''"
+ # Error here would be nice, but doesn't seem to play fair.
+ #else:
+ # raise ValueError("Attempting to add a non null column that isn't character based without an explicit default value.")
+
+ if field.rel and self.supports_foreign_keys:
+ self.add_deferred_sql(
+ self.foreign_key_sql(
+ table_name,
+ field.column,
+ field.rel.to._meta.db_table,
+ field.rel.to._meta.get_field(field.rel.field_name).column
+ )
+ )
+
+ # Things like the contrib.gis module fields have this in 1.1 and below
+ if hasattr(field, 'post_create_sql'):
+ for stmt in field.post_create_sql(no_style(), table_name):
+ self.add_deferred_sql(stmt)
+
+ # In 1.2 and above, you have to ask the DatabaseCreation stuff for it.
+ # This also creates normal indexes in 1.1.
+ if hasattr(self._get_connection().creation, "sql_indexes_for_field"):
+ # Make a fake model to pass in, with only db_table
+ model = self.mock_model("FakeModelForGISCreation", table_name)
+ for stmt in self._get_connection().creation.sql_indexes_for_field(model, field, no_style()):
+ self.add_deferred_sql(stmt)
+
+ if sql:
+ return sql % sqlparams
+ else:
+ return None
+
+ def _field_sanity(self, field):
+ """
+ Placeholder for DBMS-specific field alterations (some combos aren't valid,
+ e.g. DEFAULT and TEXT on MySQL)
+ """
+ return field
+
+ def _default_value_workaround(self, value):
+ """
+ DBMS-specific value alterations (this really works around
+ missing functionality in Django backends)
+ """
+ if isinstance(value, bool) and not self.has_booleans:
+ return int(value)
+ else:
+ return value
+
+ def foreign_key_sql(self, from_table_name, from_column_name, to_table_name, to_column_name):
+ """
+ Generates a full SQL statement to add a foreign key constraint
+ """
+ constraint_name = '%s_refs_%s_%s' % (from_column_name, to_column_name, self._digest(from_table_name, to_table_name))
+ return 'ALTER TABLE %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)%s;' % (
+ self.quote_name(from_table_name),
+ self.quote_name(self.shorten_name(constraint_name)),
+ self.quote_name(from_column_name),
+ self.quote_name(to_table_name),
+ self.quote_name(to_column_name),
+ self._get_connection().ops.deferrable_sql() # Django knows this
+ )
+
+ @invalidate_table_constraints
+ def delete_foreign_key(self, table_name, column):
+ """
+ Drop a foreign key constraint
+ """
+ if self.dry_run:
+ if self.debug:
+ print(' - no dry run output for delete_foreign_key() due to dynamic DDL, sorry')
+ return # We can't look at the DB to get the constraints
+ constraints = self._find_foreign_constraints(table_name, column)
+ if not constraints:
+ raise ValueError("Cannot find a FOREIGN KEY constraint on table %s, column %s" % (table_name, column))
+ for constraint_name in constraints:
+ self.execute(self.delete_foreign_key_sql % {
+ "table": self.quote_name(table_name),
+ "constraint": self.quote_name(constraint_name),
+ })
+
+ drop_foreign_key = alias('delete_foreign_key')
+
+ def _find_foreign_constraints(self, table_name, column_name=None):
+ constraints = self._constraints_affecting_columns(
+ table_name, [column_name], "FOREIGN KEY")
+
+ primary_key_columns = self._find_primary_key_columns(table_name)
+
+ if len(primary_key_columns) > 1:
+ # Composite primary keys cannot be referenced by a foreign key
+ return list(constraints)
+ else:
+ primary_key_columns.add(column_name)
+ recursive_constraints = set(self._constraints_affecting_columns(
+ table_name, primary_key_columns, "FOREIGN KEY"))
+ return list(recursive_constraints.union(constraints))
+
+ def _digest(self, *args):
+ """
+ Use django.db.backends.creation.BaseDatabaseCreation._digest
+ to create index name in Django style. An evil hack :(
+ """
+ if not hasattr(self, '_django_db_creation'):
+ self._django_db_creation = BaseDatabaseCreation(self._get_connection())
+ return self._django_db_creation._digest(*args)
+
+ def shorten_name(self, name):
+ return truncate_name(name, self._get_connection().ops.max_name_length())
+
+ def create_index_name(self, table_name, column_names, suffix=""):
+ """
+ Generate a unique name for the index
+ """
+
+ # If there is just one column in the index, use a default algorithm from Django
+ if len(column_names) == 1 and not suffix:
+ try:
+ _hash = self._digest([column_names[0]])
+ except TypeError:
+ # Django < 1.5 backward compatibility.
+ _hash = self._digest(column_names[0])
+ return self.shorten_name(
+ '%s_%s' % (table_name, _hash),
+ )
+
+ # Else generate the name for the index by South
+ table_name = table_name.replace('"', '').replace('.', '_')
+ index_unique_name = '_%x' % abs(hash((table_name, ','.join(column_names))))
+
+ # If the index name is too long, truncate it
+ index_name = ('%s_%s%s%s' % (table_name, column_names[0], index_unique_name, suffix)).replace('"', '').replace('.', '_')
+ if len(index_name) > self.max_index_name_length:
+ part = ('_%s%s%s' % (column_names[0], index_unique_name, suffix))
+ index_name = '%s%s' % (table_name[:(self.max_index_name_length - len(part))], part)
+
+ return index_name
+
+ def create_index_sql(self, table_name, column_names, unique=False, db_tablespace=''):
+ """
+ Generates a create index statement on 'table_name' for a list of 'column_names'
+ """
+ if not column_names:
+ print("No column names supplied on which to create an index")
+ return ''
+
+ connection = self._get_connection()
+ if db_tablespace and connection.features.supports_tablespaces:
+ tablespace_sql = ' ' + connection.ops.tablespace_sql(db_tablespace)
+ else:
+ tablespace_sql = ''
+
+ index_name = self.create_index_name(table_name, column_names)
+ return 'CREATE %sINDEX %s ON %s (%s)%s;' % (
+ unique and 'UNIQUE ' or '',
+ self.quote_name(index_name),
+ self.quote_name(table_name),
+ ','.join([self.quote_name(field) for field in column_names]),
+ tablespace_sql
+ )
+
+ @invalidate_table_constraints
+ def create_index(self, table_name, column_names, unique=False, db_tablespace=''):
+ """ Executes a create index statement """
+ sql = self.create_index_sql(table_name, column_names, unique, db_tablespace)
+ self.execute(sql)
+
+ @invalidate_table_constraints
+ def delete_index(self, table_name, column_names, db_tablespace=''):
+ """
+ Deletes an index created with create_index.
+ This is possible using only columns due to the deterministic
+ index naming function which relies on column names.
+ """
+ if isinstance(column_names, string_types):
+ column_names = [column_names]
+ name = self.create_index_name(table_name, column_names)
+ sql = self.drop_index_string % {
+ "index_name": self.quote_name(name),
+ "table_name": self.quote_name(table_name),
+ }
+ self.execute(sql)
+
+ drop_index = alias('delete_index')
+
+ @delete_column_constraints
+ def delete_column(self, table_name, name):
+ """
+ Deletes the column 'column_name' from the table 'table_name'.
+ """
+ params = (self.quote_name(table_name), self.quote_name(name))
+ self.execute(self.delete_column_string % params, [])
+
+ drop_column = alias('delete_column')
+
+ def rename_column(self, table_name, old, new):
+ """
+ Renames the column 'old' from the table 'table_name' to 'new'.
+ """
+ raise NotImplementedError("rename_column has no generic SQL syntax")
+
+ @invalidate_table_constraints
+ def delete_primary_key(self, table_name):
+ """
+ Drops the old primary key.
+ """
+ # Dry runs mean we can't do anything.
+ if self.dry_run:
+ if self.debug:
+ print(' - no dry run output for delete_primary_key() due to dynamic DDL, sorry')
+ return
+
+ constraints = list(self._constraints_affecting_columns(table_name, None, type="PRIMARY KEY"))
+ if not constraints:
+ raise ValueError("Cannot find a PRIMARY KEY constraint on table %s" % (table_name,))
+
+ for constraint in constraints:
+ self.execute(self.delete_primary_key_sql % {
+ "table": self.quote_name(table_name),
+ "constraint": self.quote_name(constraint),
+ })
+
+ drop_primary_key = alias('delete_primary_key')
+
+ @invalidate_table_constraints
+ def create_primary_key(self, table_name, columns):
+ """
+ Creates a new primary key on the specified columns.
+ """
+ if not isinstance(columns, (list, tuple)):
+ columns = [columns]
+ self.execute(self.create_primary_key_string % {
+ "table": self.quote_name(table_name),
+ "constraint": self.quote_name(table_name + "_pkey"),
+ "columns": ", ".join(map(self.quote_name, columns)),
+ })
+
+ def _find_primary_key_columns(self, table_name):
+ """
+ Find all columns of the primary key of the specified table
+ """
+ db_name = self._get_setting('NAME')
+
+ primary_key_columns = set()
+ for col, constraints in self.lookup_constraint(db_name, table_name):
+ for kind, cname in constraints:
+ if kind == 'PRIMARY KEY':
+ primary_key_columns.add(col.lower())
+
+ return primary_key_columns
+
+ def start_transaction(self):
+ """
+ Makes sure the following commands are inside a transaction.
+ Must be followed by a (commit|rollback)_transaction call.
+ """
+ if self.dry_run:
+ self.pending_transactions += 1
+ transaction.commit_unless_managed(using=self.db_alias)
+ transaction.enter_transaction_management(using=self.db_alias)
+ transaction.managed(True, using=self.db_alias)
+
+ def commit_transaction(self):
+ """
+ Commits the current transaction.
+ Must be preceded by a start_transaction call.
+ """
+ if self.dry_run:
+ return
+ transaction.commit(using=self.db_alias)
+ transaction.leave_transaction_management(using=self.db_alias)
+
+ def rollback_transaction(self):
+ """
+ Rolls back the current transaction.
+ Must be preceded by a start_transaction call.
+ """
+ if self.dry_run:
+ self.pending_transactions -= 1
+ transaction.rollback(using=self.db_alias)
+ transaction.leave_transaction_management(using=self.db_alias)
+
+ def rollback_transactions_dry_run(self):
+ """
+ Rolls back all pending_transactions during this dry run.
+ """
+ if not self.dry_run:
+ return
+ while self.pending_transactions > 0:
+ self.rollback_transaction()
+ if transaction.is_dirty(using=self.db_alias):
+ # Force an exception, if we're still in a dirty transaction.
+ # This means we are missing a COMMIT/ROLLBACK.
+ transaction.leave_transaction_management(using=self.db_alias)
+
+ def send_create_signal(self, app_label, model_names):
+ self.pending_create_signals.append((app_label, model_names))
+
+ def send_pending_create_signals(self, verbosity=0, interactive=False):
+ # Group app_labels together
+ signals = SortedDict()
+ for (app_label, model_names) in self.pending_create_signals:
+ try:
+ signals[app_label].extend(model_names)
+ except KeyError:
+ signals[app_label] = list(model_names)
+ # Send only one signal per app.
+ for (app_label, model_names) in signals.items():
+ self.really_send_create_signal(app_label, list(set(model_names)),
+ verbosity=verbosity,
+ interactive=interactive)
+ self.pending_create_signals = []
+
+ def really_send_create_signal(self, app_label, model_names,
+ verbosity=0, interactive=False):
+ """
+ Sends a post_syncdb signal for the model specified.
+
+ If the model is not found (perhaps it's been deleted?),
+ no signal is sent.
+
+ TODO: The behavior of django.contrib.* apps seems flawed in that
+ they don't respect created_models. Rather, they blindly execute
+ over all models within the app sending the signal. This is a
+ patch we should push Django to make For now, this should work.
+ """
+
+ if self.debug:
+ print(" - Sending post_syncdb signal for %s: %s" % (app_label, model_names))
+
+ app = models.get_app(app_label)
+ if not app:
+ return
+
+ created_models = []
+ for model_name in model_names:
+ model = models.get_model(app_label, model_name)
+ if model:
+ created_models.append(model)
+
+ if created_models:
+
+ if hasattr(dispatcher, "send"):
+ # Older djangos
+ dispatcher.send(signal=models.signals.post_syncdb, sender=app,
+ app=app, created_models=created_models,
+ verbosity=verbosity, interactive=interactive)
+ else:
+ if self._is_multidb():
+ # Django 1.2+
+ models.signals.post_syncdb.send(
+ sender=app,
+ app=app,
+ created_models=created_models,
+ verbosity=verbosity,
+ interactive=interactive,
+ db=self.db_alias,
+ )
+ else:
+ # Django 1.1 - 1.0
+ models.signals.post_syncdb.send(
+ sender=app,
+ app=app,
+ created_models=created_models,
+ verbosity=verbosity,
+ interactive=interactive,
+ )
+
+ def mock_model(self, model_name, db_table, db_tablespace='',
+ pk_field_name='id', pk_field_type=models.AutoField,
+ pk_field_args=[], pk_field_kwargs={}):
+ """
+ Generates a MockModel class that provides enough information
+ to be used by a foreign key/many-to-many relationship.
+
+ Migrations should prefer to use these rather than actual models
+ as models could get deleted over time, but these can remain in
+ migration files forever.
+
+ Depreciated.
+ """
+ class MockOptions(object):
+ def __init__(self):
+ self.db_table = db_table
+ self.db_tablespace = db_tablespace or settings.DEFAULT_TABLESPACE
+ self.object_name = model_name
+ self.module_name = model_name.lower()
+
+ if pk_field_type == models.AutoField:
+ pk_field_kwargs['primary_key'] = True
+
+ self.pk = pk_field_type(*pk_field_args, **pk_field_kwargs)
+ self.pk.set_attributes_from_name(pk_field_name)
+ self.abstract = False
+
+ def get_field_by_name(self, field_name):
+ # we only care about the pk field
+ return (self.pk, self.model, True, False)
+
+ def get_field(self, name):
+ # we only care about the pk field
+ return self.pk
+
+ class MockModel(object):
+ _meta = None
+
+ # We need to return an actual class object here, not an instance
+ MockModel._meta = MockOptions()
+ MockModel._meta.model = MockModel
+ return MockModel
+
+ def _db_positive_type_for_alter_column(self, klass, field):
+ """
+ A helper for subclasses overriding _db_type_for_alter_column:
+ Remove the check constraint from the type string for PositiveInteger
+ and PositiveSmallInteger fields.
+ @param klass: The type of the child (required to allow this to be used when it is subclassed)
+ @param field: The field to generate type for
+ """
+ super_result = super(klass, self)._db_type_for_alter_column(field)
+ if isinstance(field, (models.PositiveSmallIntegerField, models.PositiveIntegerField)):
+ return super_result.split(" ", 1)[0]
+ return super_result
+
+ def _alter_add_positive_check(self, klass, field, name, params, sqls):
+ """
+ A helper for subclasses overriding _alter_add_column_mods:
+ Add a check constraint verifying positivity to PositiveInteger and
+ PositiveSmallInteger fields.
+ """
+ super(klass, self)._alter_add_column_mods(field, name, params, sqls)
+ if isinstance(field, (models.PositiveSmallIntegerField, models.PositiveIntegerField)):
+ uniq_hash = abs(hash(tuple(params.values())))
+ d = dict(
+ constraint = "CK_%s_PSTV_%s" % (name, hex(uniq_hash)[2:]),
+ check = "%s >= 0" % self.quote_name(name))
+ sqls.append((self.add_check_constraint_fragment % d, []))
+
+
+# Single-level flattening of lists
+def flatten(ls):
+ nl = []
+ for l in ls:
+ nl += l
+ return nl
diff --git a/lib/python2.7/site-packages/south/db/mysql.py b/lib/python2.7/site-packages/south/db/mysql.py
new file mode 100644
index 0000000..3e87464
--- /dev/null
+++ b/lib/python2.7/site-packages/south/db/mysql.py
@@ -0,0 +1,290 @@
+# MySQL-specific implementations for south
+# Original author: Andrew Godwin
+# Patches by: F. Gabriel Gosselin <gabrielNOSPAM@evidens.ca>
+
+from south.db import generic
+from south.db.generic import DryRunError, INVALID
+from south.logger import get_logger
+
+
+def delete_column_constraints(func):
+ """
+ Decorates column operation functions for MySQL.
+ Deletes the constraints from the database and clears local cache.
+ """
+ def _column_rm(self, table_name, column_name, *args, **opts):
+ # Delete foreign key constraints
+ try:
+ self.delete_foreign_key(table_name, column_name)
+ except ValueError:
+ pass # If no foreign key on column, OK because it checks first
+ # Delete constraints referring to this column
+ try:
+ reverse = self._lookup_reverse_constraint(table_name, column_name)
+ for cname, rtable, rcolumn in reverse:
+ self.delete_foreign_key(rtable, rcolumn)
+ except DryRunError:
+ pass
+ return func(self, table_name, column_name, *args, **opts)
+ return _column_rm
+
+
+def copy_column_constraints(func):
+ """
+ Decorates column operation functions for MySQL.
+ Determines existing constraints and copies them to a new column
+ """
+ def _column_cp(self, table_name, column_old, column_new, *args, **opts):
+ # Copy foreign key constraint
+ try:
+ constraint = self._find_foreign_constraints(
+ table_name, column_old)[0]
+ refs = self._lookup_constraint_references(table_name, constraint)
+ if refs is not None:
+ (ftable, fcolumn) = refs
+ if ftable and fcolumn:
+ fk_sql = self.foreign_key_sql(
+ table_name, column_new, ftable, fcolumn)
+ get_logger().debug("Foreign key SQL: " + fk_sql)
+ self.add_deferred_sql(fk_sql)
+ except IndexError:
+ pass # No constraint exists so ignore
+ except DryRunError:
+ pass
+ # Copy constraints referring to this column
+ try:
+ reverse = self._lookup_reverse_constraint(table_name, column_old)
+ for cname, rtable, rcolumn in reverse:
+ fk_sql = self.foreign_key_sql(
+ rtable, rcolumn, table_name, column_new)
+ self.add_deferred_sql(fk_sql)
+ except DryRunError:
+ pass
+ return func(self, table_name, column_old, column_new, *args, **opts)
+ return _column_cp
+
+
+def invalidate_table_constraints(func):
+ """
+ For MySQL we grab all table constraints simultaneously, so this is
+ effective.
+ It further solves the issues of invalidating referred table constraints.
+ """
+ def _cache_clear(self, table, *args, **opts):
+ db_name = self._get_setting('NAME')
+ if db_name in self._constraint_cache:
+ del self._constraint_cache[db_name]
+ if db_name in self._reverse_cache:
+ del self._reverse_cache[db_name]
+ if db_name in self._constraint_references:
+ del self._constraint_references[db_name]
+ return func(self, table, *args, **opts)
+ return _cache_clear
+
+
+class DatabaseOperations(generic.DatabaseOperations):
+ """
+ MySQL implementation of database operations.
+
+ MySQL has no DDL transaction support This can confuse people when they ask
+ how to roll back - hence the dry runs, etc., found in the migration code.
+ """
+
+ backend_name = "mysql"
+ alter_string_set_type = ''
+ alter_string_set_null = 'MODIFY %(column)s %(type)s NULL;'
+ alter_string_drop_null = 'MODIFY %(column)s %(type)s NOT NULL;'
+ drop_index_string = 'DROP INDEX %(index_name)s ON %(table_name)s'
+ delete_primary_key_sql = "ALTER TABLE %(table)s DROP PRIMARY KEY"
+ delete_foreign_key_sql = "ALTER TABLE %(table)s DROP FOREIGN KEY %(constraint)s"
+ delete_unique_sql = "ALTER TABLE %s DROP INDEX %s"
+ rename_table_sql = "RENAME TABLE %s TO %s;"
+
+ allows_combined_alters = False
+ has_check_constraints = False
+ raises_default_errors = False
+
+ geom_types = ['geometry', 'point', 'linestring', 'polygon']
+ text_types = ['text', 'blob']
+
+ def __init__(self, db_alias):
+ self._constraint_references = {}
+ self._reverse_cache = {}
+ super(DatabaseOperations, self).__init__(db_alias)
+ if self._has_setting('STORAGE_ENGINE') and self._get_setting('STORAGE_ENGINE'):
+ self.create_table_sql = self.create_table_sql + ' ENGINE=%s' % self._get_setting('STORAGE_ENGINE')
+
+ def _is_valid_cache(self, db_name, table_name):
+ cache = self._constraint_cache
+ # we cache the whole db so if there are any tables table_name is valid
+ return db_name in cache and cache[db_name].get(table_name, None) is not INVALID
+
+ def _fill_constraint_cache(self, db_name, table_name):
+ # for MySQL grab all constraints for this database. It's just as cheap as a single column.
+ self._constraint_cache[db_name] = {}
+ self._constraint_cache[db_name][table_name] = {}
+ self._reverse_cache[db_name] = {}
+ self._constraint_references[db_name] = {}
+
+ name_query = """
+ SELECT kc.`constraint_name`, kc.`column_name`, kc.`table_name`,
+ kc.`referenced_table_name`, kc.`referenced_column_name`
+ FROM information_schema.key_column_usage AS kc
+ WHERE
+ kc.table_schema = %s
+ """
+ rows = self.execute(name_query, [db_name])
+ if not rows:
+ return
+ cnames = {}
+ for constraint, column, table, ref_table, ref_column in rows:
+ key = (table, constraint)
+ cnames.setdefault(key, set())
+ cnames[key].add((column, ref_table, ref_column))
+
+ type_query = """
+ SELECT c.constraint_name, c.table_name, c.constraint_type
+ FROM information_schema.table_constraints AS c
+ WHERE
+ c.table_schema = %s
+ """
+ rows = self.execute(type_query, [db_name])
+ for constraint, table, kind in rows:
+ key = (table, constraint)
+ self._constraint_cache[db_name].setdefault(table, {})
+ try:
+ cols = cnames[key]
+ except KeyError:
+ cols = set()
+ for column_set in cols:
+ (column, ref_table, ref_column) = column_set
+ self._constraint_cache[db_name][table].setdefault(column, set())
+ if kind == 'FOREIGN KEY':
+ self._constraint_cache[db_name][table][column].add((kind,
+ constraint))
+ # Create constraint lookup, see constraint_references
+ self._constraint_references[db_name][(table,
+ constraint)] = (ref_table, ref_column)
+ # Create reverse table lookup, reverse_lookup
+ self._reverse_cache[db_name].setdefault(ref_table, {})
+ self._reverse_cache[db_name][ref_table].setdefault(ref_column,
+ set())
+ self._reverse_cache[db_name][ref_table][ref_column].add(
+ (constraint, table, column))
+ else:
+ self._constraint_cache[db_name][table][column].add((kind,
+ constraint))
+
+ def connection_init(self):
+ """
+ Run before any SQL to let database-specific config be sent as a command,
+ e.g. which storage engine (MySQL) or transaction serialisability level.
+ """
+ cursor = self._get_connection().cursor()
+ if cursor.execute("SHOW variables WHERE Variable_Name='default_storage_engine';"):
+ engine_var = 'default_storage_engine'
+ else:
+ engine_var = 'storage_engine'
+ if self._has_setting('STORAGE_ENGINE') and self._get_setting('STORAGE_ENGINE'):
+ cursor.execute("SET %s=%s;" % (engine_var, self._get_setting('STORAGE_ENGINE')))
+
+ def start_transaction(self):
+ super(DatabaseOperations, self).start_transaction()
+ self.execute("SET FOREIGN_KEY_CHECKS=0;")
+
+ @copy_column_constraints
+ @delete_column_constraints
+ @invalidate_table_constraints
+ def rename_column(self, table_name, old, new):
+ if old == new or self.dry_run:
+ return []
+
+ rows = [x for x in self.execute('DESCRIBE %s' % (self.quote_name(table_name),)) if x[0] == old]
+
+ if not rows:
+ raise ValueError("No column '%s' in '%s'." % (old, table_name))
+
+ params = (
+ self.quote_name(table_name),
+ self.quote_name(old),
+ self.quote_name(new),
+ rows[0][1],
+ rows[0][2] == "YES" and "NULL" or "NOT NULL",
+ rows[0][4] and "DEFAULT " or "",
+ rows[0][4] and "%s" or "",
+ rows[0][5] or "",
+ )
+
+ sql = 'ALTER TABLE %s CHANGE COLUMN %s %s %s %s %s %s %s;' % params
+
+ if rows[0][4]:
+ self.execute(sql, (rows[0][4],))
+ else:
+ self.execute(sql)
+
+ @delete_column_constraints
+ def delete_column(self, table_name, name):
+ super(DatabaseOperations, self).delete_column(table_name, name)
+
+ @invalidate_table_constraints
+ def rename_table(self, old_table_name, table_name):
+ super(DatabaseOperations, self).rename_table(old_table_name,
+ table_name)
+
+ @invalidate_table_constraints
+ def delete_table(self, table_name):
+ super(DatabaseOperations, self).delete_table(table_name)
+
+ def _lookup_constraint_references(self, table_name, cname):
+ """
+ Provided an existing table and constraint, returns tuple of (foreign
+ table, column)
+ """
+ db_name = self._get_setting('NAME')
+ try:
+ return self._constraint_references[db_name][(table_name, cname)]
+ except KeyError:
+ return None
+
+ def _lookup_reverse_constraint(self, table_name, column_name=None):
+ """Look for the column referenced by a foreign constraint"""
+ db_name = self._get_setting('NAME')
+ if self.dry_run:
+ raise DryRunError("Cannot get constraints for columns.")
+
+ if not self._is_valid_cache(db_name, table_name):
+ # Piggy-back on lookup_constraint, ensures cache exists
+ self.lookup_constraint(db_name, table_name)
+
+ try:
+ table = self._reverse_cache[db_name][table_name]
+ if column_name == None:
+ return [(y, tuple(y)) for x, y in table.items()]
+ else:
+ return tuple(table[column_name])
+ except KeyError:
+ return []
+
+ def _field_sanity(self, field):
+ """
+ This particular override stops us sending DEFAULTs for BLOB/TEXT columns.
+ """
+ # MySQL does not support defaults for geometry columns also
+ type = self._db_type_for_alter_column(field).lower()
+ is_geom = True in [type.find(t) > -1 for t in self.geom_types]
+ is_text = True in [type.find(t) > -1 for t in self.text_types]
+
+ if is_geom or is_text:
+ field._suppress_default = True
+ return field
+
+ def _alter_set_defaults(self, field, name, params, sqls):
+ """
+ MySQL does not support defaults on text or blob columns.
+ """
+ type = params['type']
+ # MySQL does not support defaults for geometry columns also
+ is_geom = True in [type.find(t) > -1 for t in self.geom_types]
+ is_text = True in [type.find(t) > -1 for t in self.text_types]
+ if not is_geom and not is_text:
+ super(DatabaseOperations, self)._alter_set_defaults(field, name, params, sqls)
diff --git a/lib/python2.7/site-packages/south/db/oracle.py b/lib/python2.7/site-packages/south/db/oracle.py
new file mode 100644
index 0000000..79496d0
--- /dev/null
+++ b/lib/python2.7/site-packages/south/db/oracle.py
@@ -0,0 +1,345 @@
+from __future__ import print_function
+
+import os.path
+import sys
+import re
+import warnings
+import cx_Oracle
+
+
+from django.db import connection, models
+from django.db.backends.util import truncate_name
+from django.core.management.color import no_style
+from django.db.models.fields import NOT_PROVIDED
+from django.db.utils import DatabaseError
+
+# In revision r16016 function get_sequence_name has been transformed into
+# method of DatabaseOperations class. To make code backward-compatible we
+# need to handle both situations.
+try:
+ from django.db.backends.oracle.base import get_sequence_name\
+ as original_get_sequence_name
+except ImportError:
+ original_get_sequence_name = None
+
+from south.db import generic
+
+class DatabaseOperations(generic.DatabaseOperations):
+ """
+ Oracle implementation of database operations.
+ """
+ backend_name = 'oracle'
+
+ alter_string_set_type = 'ALTER TABLE %(table_name)s MODIFY %(column)s %(type)s %(nullity)s;'
+ alter_string_set_default = 'ALTER TABLE %(table_name)s MODIFY %(column)s DEFAULT %(default)s;'
+ alter_string_update_nulls_to_default = \
+ 'UPDATE %(table_name)s SET %(column)s = %(default)s WHERE %(column)s IS NULL;'
+ add_column_string = 'ALTER TABLE %s ADD %s;'
+ delete_column_string = 'ALTER TABLE %s DROP COLUMN %s;'
+ add_constraint_string = 'ALTER TABLE %(table_name)s ADD CONSTRAINT %(constraint)s %(clause)s'
+
+ allows_combined_alters = False
+ has_booleans = False
+
+ constraints_dict = {
+ 'P': 'PRIMARY KEY',
+ 'U': 'UNIQUE',
+ 'C': 'CHECK',
+ 'R': 'FOREIGN KEY'
+ }
+
+ def get_sequence_name(self, table_name):
+ if original_get_sequence_name is None:
+ return self._get_connection().ops._get_sequence_name(table_name)
+ else:
+ return original_get_sequence_name(table_name)
+
+ #TODO: This will cause very obscure bugs if anyone uses a column name or string value
+ # that looks like a column definition (with 'CHECK', 'DEFAULT' and/or 'NULL' in it)
+ # e.g. "CHECK MATE" varchar(10) DEFAULT 'NULL'
+ def adj_column_sql(self, col):
+ # Syntax fixes -- Oracle is picky about clause order
+ col = re.sub('(?P<constr>CHECK \(.*\))(?P<any>.*)(?P<default>DEFAULT \d+)',
+ lambda mo: '%s %s%s'%(mo.group('default'), mo.group('constr'), mo.group('any')), col) #syntax fix for boolean/integer field only
+ col = re.sub('(?P<not_null>(NOT )?NULL) (?P<misc>(.* )?)(?P<default>DEFAULT.+)',
+ lambda mo: '%s %s %s'%(mo.group('default'),mo.group('not_null'),mo.group('misc') or ''), col) #fix order of NULL/NOT NULL and DEFAULT
+ return col
+
+ def check_meta(self, table_name):
+ return table_name in [ m._meta.db_table for m in models.get_models() ] #caching provided by Django
+
+ def normalize_name(self, name):
+ """
+ Get the properly shortened and uppercased identifier as returned by quote_name(), but without the actual quotes.
+ """
+ nn = self.quote_name(name)
+ if nn[0] == '"' and nn[-1] == '"':
+ nn = nn[1:-1]
+ return nn
+
+ @generic.invalidate_table_constraints
+ def create_table(self, table_name, fields):
+ qn = self.quote_name(table_name)
+ columns = []
+ autoinc_sql = ''
+
+
+ for field_name, field in fields:
+
+ field = self._field_sanity(field)
+
+ # avoid default values in CREATE TABLE statements (#925)
+ field._suppress_default = True
+
+
+ col = self.column_sql(table_name, field_name, field)
+ if not col:
+ continue
+ col = self.adj_column_sql(col)
+
+ columns.append(col)
+ if isinstance(field, models.AutoField):
+ autoinc_sql = connection.ops.autoinc_sql(table_name, field_name)
+
+ sql = 'CREATE TABLE %s (%s);' % (qn, ', '.join([col for col in columns]))
+ self.execute(sql)
+ if autoinc_sql:
+ self.execute(autoinc_sql[0])
+ self.execute(autoinc_sql[1])
+
+ @generic.invalidate_table_constraints
+ def delete_table(self, table_name, cascade=True):
+ qn = self.quote_name(table_name)
+
+ # Note: PURGE is not valid syntax for Oracle 9i (it was added in 10)
+ if cascade:
+ self.execute('DROP TABLE %s CASCADE CONSTRAINTS;' % qn)
+ else:
+ self.execute('DROP TABLE %s;' % qn)
+
+ # If the table has an AutoField a sequence was created.
+ sequence_sql = """
+DECLARE
+ i INTEGER;
+BEGIN
+ SELECT COUNT(*) INTO i FROM USER_CATALOG
+ WHERE TABLE_NAME = '%(sq_name)s' AND TABLE_TYPE = 'SEQUENCE';
+ IF i = 1 THEN
+ EXECUTE IMMEDIATE 'DROP SEQUENCE "%(sq_name)s"';
+ END IF;
+END;
+/""" % {'sq_name': self.get_sequence_name(table_name)}
+ self.execute(sequence_sql)
+
+ @generic.invalidate_table_constraints
+ def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False):
+
+ if self.dry_run:
+ if self.debug:
+ print(' - no dry run output for alter_column() due to dynamic DDL, sorry')
+ return
+
+ qn = self.quote_name(table_name)
+
+ # hook for the field to do any resolution prior to it's attributes being queried
+ if hasattr(field, 'south_init'):
+ field.south_init()
+ field = self._field_sanity(field)
+
+ # Add _id or whatever if we need to
+ field.set_attributes_from_name(name)
+ if not explicit_name:
+ name = field.column
+ qn_col = self.quote_name(name)
+
+ # First, change the type
+ # This will actually also add any CHECK constraints needed,
+ # since e.g. 'type' for a BooleanField is 'NUMBER(1) CHECK (%(qn_column)s IN (0,1))'
+ params = {
+ 'table_name':qn,
+ 'column': qn_col,
+ 'type': self._db_type_for_alter_column(field),
+ 'nullity': 'NOT NULL',
+ 'default': 'NULL'
+ }
+ if field.null:
+ params['nullity'] = 'NULL'
+
+ sql_templates = [
+ (self.alter_string_set_type, params, []),
+ (self.alter_string_set_default, params, []),
+ ]
+ if not field.null and field.has_default():
+ # Use default for rows that had nulls. To support the case where
+ # the new default does not fit the old type, we need to first change
+ # the column type to the new type, but null=True; then set the default;
+ # then complete the type change.
+ def change_params(**kw):
+ "A little helper for non-destructively changing the params"
+ p = params.copy()
+ p.update(kw)
+ return p
+ sql_templates[:0] = [
+ (self.alter_string_set_type, change_params(nullity='NULL'),[]),
+ (self.alter_string_update_nulls_to_default, change_params(default="%s"), [field.get_default()]),
+ ]
+
+ if not ignore_constraints:
+ # drop CHECK constraints. Make sure this is executed before the ALTER TABLE statements
+ # generated above, since those statements recreate the constraints we delete here.
+ check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK")
+ for constraint in check_constraints:
+ self.execute(self.delete_check_sql % {
+ 'table': self.quote_name(table_name),
+ 'constraint': self.quote_name(constraint),
+ })
+ # Drop foreign constraints
+ try:
+ self.delete_foreign_key(qn, qn_col)
+ except ValueError:
+ # There weren't any
+ pass
+
+ for sql_template, params, args in sql_templates:
+ try:
+ self.execute(sql_template % params, args, print_all_errors=False)
+ except DatabaseError as exc:
+ description = str(exc)
+ # Oracle complains if a column is already NULL/NOT NULL
+ if 'ORA-01442' in description or 'ORA-01451' in description:
+ # so we just drop NULL/NOT NULL part from target sql and retry
+ params['nullity'] = ''
+ sql = sql_template % params
+ self.execute(sql)
+ # Oracle also has issues if we try to change a regular column
+ # to a LOB or vice versa (also REF, object, VARRAY or nested
+ # table, but these don't come up much in Django apps)
+ elif 'ORA-22858' in description or 'ORA-22859' in description:
+ self._alter_column_lob_workaround(table_name, name, field)
+ else:
+ self._print_sql_error(exc, sql_template % params)
+ raise
+
+ if not ignore_constraints:
+ # Add back FK constraints if needed
+ if field.rel: #and self.supports_foreign_keys:
+ self.add_deferred_sql(
+ self.foreign_key_sql(
+ qn[1:-1], # foreign_key_sql uses this as part of constraint name
+ qn_col[1:-1], # foreign_key_sql uses this as part of constraint name
+ field.rel.to._meta.db_table,
+ field.rel.to._meta.get_field(field.rel.field_name).column
+ )
+ )
+
+
+ def _alter_column_lob_workaround(self, table_name, name, field):
+ """
+ Oracle refuses to change a column type from/to LOB to/from a regular
+ column. In Django, this shows up when the field is changed from/to
+ a TextField.
+ What we need to do instead is:
+ - Rename the original column
+ - Add the desired field as new
+ - Update the table to transfer values from old to new
+ - Drop old column
+ """
+ renamed = self._generate_temp_name(name)
+ self.rename_column(table_name, name, renamed)
+ self.add_column(table_name, name, field, keep_default=False)
+ self.execute("UPDATE %s set %s=%s" % (
+ self.quote_name(table_name),
+ self.quote_name(name),
+ self.quote_name(renamed),
+ ))
+ self.delete_column(table_name, renamed)
+
+ def _generate_temp_name(self, for_name):
+ suffix = hex(hash(for_name)).upper()[1:]
+ return self.normalize_name(for_name + "_" + suffix)
+
+ @generic.copy_column_constraints #TODO: Appears to be nulled by the delete decorator below...
+ @generic.delete_column_constraints
+ def rename_column(self, table_name, old, new):
+ if old == new:
+ # Short-circuit out
+ return []
+ self.execute('ALTER TABLE %s RENAME COLUMN %s TO %s;' % (
+ self.quote_name(table_name),
+ self.quote_name(old),
+ self.quote_name(new),
+ ))
+
+ @generic.invalidate_table_constraints
+ def add_column(self, table_name, name, field, keep_default=False):
+ field = self._field_sanity(field)
+ sql = self.column_sql(table_name, name, field)
+ sql = self.adj_column_sql(sql)
+
+ if sql:
+ params = (
+ self.quote_name(table_name),
+ sql
+ )
+ sql = self.add_column_string % params
+ self.execute(sql)
+
+ # Now, drop the default if we need to
+ if field.default is not None:
+ field.default = NOT_PROVIDED
+ self.alter_column(table_name, name, field, explicit_name=False, ignore_constraints=True)
+
+ def delete_column(self, table_name, name):
+ return super(DatabaseOperations, self).delete_column(self.quote_name(table_name), name)
+
+ def lookup_constraint(self, db_name, table_name, column_name=None):
+ if column_name:
+ # Column names in the constraint cache come from the database,
+ # make sure we use the properly shortened/uppercased version
+ # for lookup.
+ column_name = self.normalize_name(column_name)
+ return super(DatabaseOperations, self).lookup_constraint(db_name, table_name, column_name)
+
+ def _constraints_affecting_columns(self, table_name, columns, type="UNIQUE"):
+ if columns:
+ columns = [self.normalize_name(c) for c in columns]
+ return super(DatabaseOperations, self)._constraints_affecting_columns(table_name, columns, type)
+
+ def _field_sanity(self, field):
+ """
+ This particular override stops us sending DEFAULTs for BooleanField.
+ """
+ if isinstance(field, models.BooleanField) and field.has_default():
+ field.default = int(field.to_python(field.get_default()))
+ # On Oracle, empty strings are null
+ if isinstance(field, (models.CharField, models.TextField)):
+ field.null = field.empty_strings_allowed
+ return field
+
+
+ def _default_value_workaround(self, value):
+ from datetime import date,time,datetime
+ if isinstance(value, (date,time,datetime)):
+ return "'%s'" % value
+ else:
+ return super(DatabaseOperations, self)._default_value_workaround(value)
+
+ def _fill_constraint_cache(self, db_name, table_name):
+ self._constraint_cache.setdefault(db_name, {})
+ self._constraint_cache[db_name][table_name] = {}
+
+ rows = self.execute("""
+ SELECT user_cons_columns.constraint_name,
+ user_cons_columns.column_name,
+ user_constraints.constraint_type
+ FROM user_constraints
+ JOIN user_cons_columns ON
+ user_constraints.table_name = user_cons_columns.table_name AND
+ user_constraints.constraint_name = user_cons_columns.constraint_name
+ WHERE user_constraints.table_name = '%s'
+ """ % self.normalize_name(table_name))
+
+ for constraint, column, kind in rows:
+ self._constraint_cache[db_name][table_name].setdefault(column, set())
+ self._constraint_cache[db_name][table_name][column].add((self.constraints_dict[kind], constraint))
+ return
diff --git a/lib/python2.7/site-packages/south/db/postgresql_psycopg2.py b/lib/python2.7/site-packages/south/db/postgresql_psycopg2.py
new file mode 100644
index 0000000..d6c63c4
--- /dev/null
+++ b/lib/python2.7/site-packages/south/db/postgresql_psycopg2.py
@@ -0,0 +1,96 @@
+from __future__ import print_function
+
+import uuid
+from django.db.backends.util import truncate_name
+from south.db import generic
+
+
+class DatabaseOperations(generic.DatabaseOperations):
+
+ """
+ PsycoPG2 implementation of database operations.
+ """
+
+ backend_name = "postgres"
+
+ def create_index_name(self, table_name, column_names, suffix=""):
+ """
+ Generate a unique name for the index
+
+ Django's logic for naming field indexes is different in the
+ postgresql_psycopg2 backend, so we follow that for single-column
+ indexes.
+ """
+
+ if len(column_names) == 1:
+ return truncate_name(
+ '%s_%s%s' % (table_name, column_names[0], suffix),
+ self._get_connection().ops.max_name_length()
+ )
+ return super(DatabaseOperations, self).create_index_name(table_name, column_names, suffix)
+
+ @generic.copy_column_constraints
+ @generic.delete_column_constraints
+ def rename_column(self, table_name, old, new):
+ if old == new:
+ # Short-circuit out
+ return []
+ self.execute('ALTER TABLE %s RENAME COLUMN %s TO %s;' % (
+ self.quote_name(table_name),
+ self.quote_name(old),
+ self.quote_name(new),
+ ))
+
+ @generic.invalidate_table_constraints
+ def rename_table(self, old_table_name, table_name):
+ "will rename the table and an associated ID sequence and primary key index"
+ # First, rename the table
+ generic.DatabaseOperations.rename_table(self, old_table_name, table_name)
+ # Then, try renaming the ID sequence
+ # (if you're using other AutoFields... your problem, unfortunately)
+
+ if self.execute(
+ """
+ SELECT 1
+ FROM information_schema.sequences
+ WHERE sequence_name = %s
+ """,
+ [old_table_name + '_id_seq']
+ ):
+ generic.DatabaseOperations.rename_table(self, old_table_name + "_id_seq", table_name + "_id_seq")
+
+ # Rename primary key index, will not rename other indices on
+ # the table that are used by django (e.g. foreign keys). Until
+ # figure out how, you need to do this yourself.
+
+ pkey_index_names = self.execute(
+ """
+ SELECT pg_index.indexrelid::regclass
+ FROM pg_index, pg_attribute
+ WHERE
+ indrelid = %s::regclass AND
+ pg_attribute.attrelid = indrelid AND
+ pg_attribute.attnum = any(pg_index.indkey)
+ AND indisprimary
+ """,
+ [table_name]
+ )
+ if old_table_name + "_pkey" in pkey_index_names:
+ generic.DatabaseOperations.rename_table(self, old_table_name + "_pkey", table_name + "_pkey")
+
+ def rename_index(self, old_index_name, index_name):
+ "Rename an index individually"
+ generic.DatabaseOperations.rename_table(self, old_index_name, index_name)
+
+ def _default_value_workaround(self, value):
+ "Support for UUIDs on psql"
+ if isinstance(value, uuid.UUID):
+ return str(value)
+ else:
+ return super(DatabaseOperations, self)._default_value_workaround(value)
+
+ def _db_type_for_alter_column(self, field):
+ return self._db_positive_type_for_alter_column(DatabaseOperations, field)
+
+ def _alter_add_column_mods(self, field, name, params, sqls):
+ return self._alter_add_positive_check(DatabaseOperations, field, name, params, sqls)
diff --git a/lib/python2.7/site-packages/south/db/sql_server/__init__.py b/lib/python2.7/site-packages/south/db/sql_server/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/python2.7/site-packages/south/db/sql_server/__init__.py
diff --git a/lib/python2.7/site-packages/south/db/sql_server/pyodbc.py b/lib/python2.7/site-packages/south/db/sql_server/pyodbc.py
new file mode 100644
index 0000000..b725ec0
--- /dev/null
+++ b/lib/python2.7/site-packages/south/db/sql_server/pyodbc.py
@@ -0,0 +1,444 @@
+from datetime import date, datetime, time
+from warnings import warn
+from django.db import models
+from django.db.models import fields
+from south.db import generic
+from south.db.generic import delete_column_constraints, invalidate_table_constraints, copy_column_constraints
+from south.exceptions import ConstraintDropped
+from south.utils.py3 import string_types
+try:
+ from django.utils.encoding import smart_text # Django >= 1.5
+except ImportError:
+ from django.utils.encoding import smart_unicode as smart_text # Django < 1.5
+from django.core.management.color import no_style
+
+class DatabaseOperations(generic.DatabaseOperations):
+ """
+ django-pyodbc (sql_server.pyodbc) implementation of database operations.
+ """
+
+ backend_name = "pyodbc"
+
+ add_column_string = 'ALTER TABLE %s ADD %s;'
+ alter_string_set_type = 'ALTER COLUMN %(column)s %(type)s'
+ alter_string_set_null = 'ALTER COLUMN %(column)s %(type)s NULL'
+ alter_string_drop_null = 'ALTER COLUMN %(column)s %(type)s NOT NULL'
+
+ allows_combined_alters = False
+
+ drop_index_string = 'DROP INDEX %(index_name)s ON %(table_name)s'
+ drop_constraint_string = 'ALTER TABLE %(table_name)s DROP CONSTRAINT %(constraint_name)s'
+ delete_column_string = 'ALTER TABLE %s DROP COLUMN %s'
+
+ #create_check_constraint_sql = "ALTER TABLE %(table)s " + \
+ # generic.DatabaseOperations.add_check_constraint_fragment
+ create_foreign_key_sql = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s " + \
+ "FOREIGN KEY (%(column)s) REFERENCES %(target)s"
+ create_unique_sql = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s UNIQUE (%(columns)s)"
+
+
+ default_schema_name = "dbo"
+
+ has_booleans = False
+
+
+ @delete_column_constraints
+ def delete_column(self, table_name, name):
+ q_table_name, q_name = (self.quote_name(table_name), self.quote_name(name))
+
+ # Zap the constraints
+ for const in self._find_constraints_for_column(table_name,name):
+ params = {'table_name':q_table_name, 'constraint_name': const}
+ sql = self.drop_constraint_string % params
+ self.execute(sql, [])
+
+ # Zap the indexes
+ for ind in self._find_indexes_for_column(table_name,name):
+ params = {'table_name':q_table_name, 'index_name': ind}
+ sql = self.drop_index_string % params
+ self.execute(sql, [])
+
+ # Zap default if exists
+ drop_default = self.drop_column_default_sql(table_name, name)
+ if drop_default:
+ sql = "ALTER TABLE [%s] %s" % (table_name, drop_default)
+ self.execute(sql, [])
+
+ # Finally zap the column itself
+ self.execute(self.delete_column_string % (q_table_name, q_name), [])
+
+ def _find_indexes_for_column(self, table_name, name):
+ "Find the indexes that apply to a column, needed when deleting"
+
+ sql = """
+ SELECT si.name, si.id, sik.colid, sc.name
+ FROM dbo.sysindexes si WITH (NOLOCK)
+ INNER JOIN dbo.sysindexkeys sik WITH (NOLOCK)
+ ON sik.id = si.id
+ AND sik.indid = si.indid
+ INNER JOIN dbo.syscolumns sc WITH (NOLOCK)
+ ON si.id = sc.id
+ AND sik.colid = sc.colid
+ WHERE si.indid !=0
+ AND si.id = OBJECT_ID('%s')
+ AND sc.name = '%s'
+ """
+ idx = self.execute(sql % (table_name, name), [])
+ return [i[0] for i in idx]
+
+
+ def _find_constraints_for_column(self, table_name, name, just_names=True):
+ """
+ Find the constraints that apply to a column, needed when deleting. Defaults not included.
+ This is more general than the parent _constraints_affecting_columns, as on MSSQL this
+ includes PK and FK constraints.
+ """
+
+ sql = """
+ SELECT CC.[CONSTRAINT_NAME]
+ ,TC.[CONSTRAINT_TYPE]
+ ,CHK.[CHECK_CLAUSE]
+ ,RFD.TABLE_SCHEMA
+ ,RFD.TABLE_NAME
+ ,RFD.COLUMN_NAME
+ -- used for normalized names
+ ,CC.TABLE_NAME
+ ,CC.COLUMN_NAME
+ FROM [INFORMATION_SCHEMA].[TABLE_CONSTRAINTS] TC
+ JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE CC
+ ON TC.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG
+ AND TC.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA
+ AND TC.CONSTRAINT_NAME = CC.CONSTRAINT_NAME
+ LEFT JOIN INFORMATION_SCHEMA.CHECK_CONSTRAINTS CHK
+ ON CHK.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG
+ AND CHK.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA
+ AND CHK.CONSTRAINT_NAME = CC.CONSTRAINT_NAME
+ AND 'CHECK' = TC.CONSTRAINT_TYPE
+ LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS REF
+ ON REF.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG
+ AND REF.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA
+ AND REF.CONSTRAINT_NAME = CC.CONSTRAINT_NAME
+ AND 'FOREIGN KEY' = TC.CONSTRAINT_TYPE
+ LEFT JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE RFD
+ ON RFD.CONSTRAINT_CATALOG = REF.UNIQUE_CONSTRAINT_CATALOG
+ AND RFD.CONSTRAINT_SCHEMA = REF.UNIQUE_CONSTRAINT_SCHEMA
+ AND RFD.CONSTRAINT_NAME = REF.UNIQUE_CONSTRAINT_NAME
+ WHERE CC.CONSTRAINT_CATALOG = CC.TABLE_CATALOG
+ AND CC.CONSTRAINT_SCHEMA = CC.TABLE_SCHEMA
+ AND CC.TABLE_CATALOG = %s
+ AND CC.TABLE_SCHEMA = %s
+ AND CC.TABLE_NAME = %s
+ AND CC.COLUMN_NAME = %s
+ """
+ db_name = self._get_setting('name')
+ schema_name = self._get_schema_name()
+ table = self.execute(sql, [db_name, schema_name, table_name, name])
+
+ if just_names:
+ return [r[0] for r in table]
+
+ all = {}
+ for r in table:
+ cons_name, type = r[:2]
+ if type=='PRIMARY KEY' or type=='UNIQUE':
+ cons = all.setdefault(cons_name, (type,[]))
+ sql = '''
+ SELECT COLUMN_NAME
+ FROM INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE RFD
+ WHERE RFD.CONSTRAINT_CATALOG = %s
+ AND RFD.CONSTRAINT_SCHEMA = %s
+ AND RFD.TABLE_NAME = %s
+ AND RFD.CONSTRAINT_NAME = %s
+ '''
+ columns = self.execute(sql, [db_name, schema_name, table_name, cons_name])
+ cons[1].extend(col for col, in columns)
+ elif type=='CHECK':
+ cons = (type, r[2])
+ elif type=='FOREIGN KEY':
+ if cons_name in all:
+ raise NotImplementedError("Multiple-column foreign keys are not supported")
+ else:
+ cons = (type, r[3:6])
+ else:
+ raise NotImplementedError("Don't know how to handle constraints of type "+ type)
+ all[cons_name] = cons
+ return all
+
+ @invalidate_table_constraints
+ def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False):
+ """
+ Alters the given column name so it will match the given field.
+ Note that conversion between the two by the database must be possible.
+ Will not automatically add _id by default; to have this behavour, pass
+ explicit_name=False.
+
+ @param table_name: The name of the table to add the column to
+ @param name: The name of the column to alter
+ @param field: The new field definition to use
+ """
+ self._fix_field_definition(field)
+
+ if not ignore_constraints:
+ qn = self.quote_name
+ sch = qn(self._get_schema_name())
+ tab = qn(table_name)
+ table = ".".join([sch, tab])
+ try:
+ self.delete_foreign_key(table_name, name)
+ except ValueError:
+ # no FK constraint on this field. That's OK.
+ pass
+ constraints = self._find_constraints_for_column(table_name, name, False)
+ for constraint in constraints.keys():
+ params = dict(table_name = table,
+ constraint_name = qn(constraint))
+ sql = self.drop_constraint_string % params
+ self.execute(sql, [])
+
+ ret_val = super(DatabaseOperations, self).alter_column(table_name, name, field, explicit_name, ignore_constraints=True)
+
+ if not ignore_constraints:
+ for cname, (ctype,args) in constraints.items():
+ params = dict(table = table,
+ constraint = qn(cname))
+ if ctype=='UNIQUE':
+ params['columns'] = ", ".join(map(qn,args))
+ sql = self.create_unique_sql % params
+ elif ctype=='PRIMARY KEY':
+ params['columns'] = ", ".join(map(qn,args))
+ sql = self.create_primary_key_string % params
+ elif ctype=='FOREIGN KEY':
+ continue
+ # Foreign keys taken care of below
+ #target = "%s.%s(%s)" % tuple(map(qn,args))
+ #params.update(column = qn(name), target = target)
+ #sql = self.create_foreign_key_sql % params
+ elif ctype=='CHECK':
+ warn(ConstraintDropped("CHECK "+ args, table_name, name))
+ continue
+ #TODO: Some check constraints should be restored; but not before the generic
+ # backend restores them.
+ #params['check'] = args
+ #sql = self.create_check_constraint_sql % params
+ else:
+ raise NotImplementedError("Don't know how to handle constraints of type "+ type)
+ self.execute(sql, [])
+ # Create foreign key if necessary
+ if field.rel and self.supports_foreign_keys:
+ self.execute(
+ self.foreign_key_sql(
+ table_name,
+ field.column,
+ field.rel.to._meta.db_table,
+ field.rel.to._meta.get_field(field.rel.field_name).column
+ )
+ )
+ model = self.mock_model("FakeModelForIndexCreation", table_name)
+ for stmt in self._get_connection().creation.sql_indexes_for_field(model, field, no_style()):
+ self.execute(stmt)
+
+
+ return ret_val
+
+ def _alter_set_defaults(self, field, name, params, sqls):
+ "Subcommand of alter_column that sets default values (overrideable)"
+ # Historically, we used to set defaults here.
+ # But since South 0.8, we don't ever set defaults on alter-column -- we only
+ # use database-level defaults as scaffolding when adding columns.
+ # However, we still sometimes need to remove defaults in alter-column.
+ table_name = self.quote_name(params['table_name'])
+ drop_default = self.drop_column_default_sql(table_name, name)
+ if drop_default:
+ sqls.append((drop_default, []))
+
+ def _value_to_unquoted_literal(self, field, value):
+ # Start with the field's own translation
+ conn = self._get_connection()
+ value = field.get_db_prep_save(value, connection=conn)
+ # This is still a Python object -- nobody expects to need a literal.
+ if isinstance(value, string_types):
+ return smart_text(value)
+ elif isinstance(value, (date,time,datetime)):
+ return value.isoformat()
+ else:
+ #TODO: Anybody else needs special translations?
+ return str(value)
+ def _default_value_workaround(self, value):
+ if isinstance(value, (date,time,datetime)):
+ return value.isoformat()
+ else:
+ return super(DatabaseOperations, self)._default_value_workaround(value)
+
+ def _quote_string(self, s):
+ return "'" + s.replace("'","''") + "'"
+
+
+ def drop_column_default_sql(self, table_name, name, q_name=None):
+ "MSSQL specific drop default, which is a pain"
+
+ sql = """
+ SELECT object_name(cdefault)
+ FROM syscolumns
+ WHERE id = object_id('%s')
+ AND name = '%s'
+ """
+ cons = self.execute(sql % (table_name, name), [])
+ if cons and cons[0] and cons[0][0]:
+ return "DROP CONSTRAINT %s" % cons[0][0]
+ return None
+
+ def _fix_field_definition(self, field):
+ if isinstance(field, (fields.BooleanField, fields.NullBooleanField)):
+ if field.default == True:
+ field.default = 1
+ if field.default == False:
+ field.default = 0
+
+ # This is copied from South's generic add_column, with two modifications:
+ # 1) The sql-server-specific call to _fix_field_definition
+ # 2) Removing a default, when needed, by calling drop_default and not the more general alter_column
+ @invalidate_table_constraints
+ def add_column(self, table_name, name, field, keep_default=False):
+ """
+ Adds the column 'name' to the table 'table_name'.
+ Uses the 'field' paramater, a django.db.models.fields.Field instance,
+ to generate the necessary sql
+
+ @param table_name: The name of the table to add the column to
+ @param name: The name of the column to add
+ @param field: The field to use
+ """
+ self._fix_field_definition(field)
+ sql = self.column_sql(table_name, name, field)
+ if sql:
+ params = (
+ self.quote_name(table_name),
+ sql,
+ )
+ sql = self.add_column_string % params
+ self.execute(sql)
+
+ # Now, drop the default if we need to
+ if not keep_default and field.default is not None:
+ field.default = fields.NOT_PROVIDED
+ #self.alter_column(table_name, name, field, explicit_name=False, ignore_constraints=True)
+ self.drop_default(table_name, name, field)
+
+ @invalidate_table_constraints
+ def drop_default(self, table_name, name, field):
+ fragment = self.drop_column_default_sql(table_name, name)
+ if fragment:
+ table_name = self.quote_name(table_name)
+ sql = " ".join(["ALTER TABLE", table_name, fragment])
+ self.execute(sql)
+
+
+ @invalidate_table_constraints
+ def create_table(self, table_name, field_defs):
+ # Tweak stuff as needed
+ for _, f in field_defs:
+ self._fix_field_definition(f)
+
+ # Run
+ super(DatabaseOperations, self).create_table(table_name, field_defs)
+
+ def _find_referencing_fks(self, table_name):
+ "MSSQL does not support cascading FKs when dropping tables, we need to implement."
+
+ # FK -- Foreign Keys
+ # UCTU -- Unique Constraints Table Usage
+ # FKTU -- Foreign Key Table Usage
+ # (last two are both really CONSTRAINT_TABLE_USAGE, different join conditions)
+ sql = """
+ SELECT FKTU.TABLE_SCHEMA as REFING_TABLE_SCHEMA,
+ FKTU.TABLE_NAME as REFING_TABLE_NAME,
+ FK.[CONSTRAINT_NAME] as FK_NAME
+ FROM [INFORMATION_SCHEMA].[REFERENTIAL_CONSTRAINTS] FK
+ JOIN [INFORMATION_SCHEMA].[CONSTRAINT_TABLE_USAGE] UCTU
+ ON FK.UNIQUE_CONSTRAINT_CATALOG = UCTU.CONSTRAINT_CATALOG and
+ FK.UNIQUE_CONSTRAINT_NAME = UCTU.CONSTRAINT_NAME and
+ FK.UNIQUE_CONSTRAINT_SCHEMA = UCTU.CONSTRAINT_SCHEMA
+ JOIN [INFORMATION_SCHEMA].[CONSTRAINT_TABLE_USAGE] FKTU
+ ON FK.CONSTRAINT_CATALOG = FKTU.CONSTRAINT_CATALOG and
+ FK.CONSTRAINT_NAME = FKTU.CONSTRAINT_NAME and
+ FK.CONSTRAINT_SCHEMA = FKTU.CONSTRAINT_SCHEMA
+ WHERE FK.CONSTRAINT_CATALOG = %s
+ AND UCTU.TABLE_SCHEMA = %s -- REFD_TABLE_SCHEMA
+ AND UCTU.TABLE_NAME = %s -- REFD_TABLE_NAME
+ """
+ db_name = self._get_setting('name')
+ schema_name = self._get_schema_name()
+ return self.execute(sql, [db_name, schema_name, table_name])
+
+ @invalidate_table_constraints
+ def delete_table(self, table_name, cascade=True):
+ """
+ Deletes the table 'table_name'.
+ """
+ if cascade:
+ refing = self._find_referencing_fks(table_name)
+ for schmea, table, constraint in refing:
+ table = ".".join(map (self.quote_name, [schmea, table]))
+ params = dict(table_name = table,
+ constraint_name = self.quote_name(constraint))
+ sql = self.drop_constraint_string % params
+ self.execute(sql, [])
+ cascade = False
+ super(DatabaseOperations, self).delete_table(table_name, cascade)
+
+ @copy_column_constraints
+ @delete_column_constraints
+ def rename_column(self, table_name, old, new):
+ """
+ Renames the column of 'table_name' from 'old' to 'new'.
+ WARNING - This isn't transactional on MSSQL!
+ """
+ if old == new:
+ # No Operation
+ return
+ # Examples on the MS site show the table name not being quoted...
+ params = (table_name, self.quote_name(old), self.quote_name(new))
+ self.execute("EXEC sp_rename '%s.%s', %s, 'COLUMN'" % params)
+
+ @invalidate_table_constraints
+ def rename_table(self, old_table_name, table_name):
+ """
+ Renames the table 'old_table_name' to 'table_name'.
+ WARNING - This isn't transactional on MSSQL!
+ """
+ if old_table_name == table_name:
+ # No Operation
+ return
+ params = (self.quote_name(old_table_name), self.quote_name(table_name))
+ self.execute('EXEC sp_rename %s, %s' % params)
+
+ def _db_type_for_alter_column(self, field):
+ return self._db_positive_type_for_alter_column(DatabaseOperations, field)
+
+ def _alter_add_column_mods(self, field, name, params, sqls):
+ return self._alter_add_positive_check(DatabaseOperations, field, name, params, sqls)
+
+ @invalidate_table_constraints
+ def delete_foreign_key(self, table_name, column):
+ super(DatabaseOperations, self).delete_foreign_key(table_name, column)
+ # A FK also implies a non-unique index
+ find_index_sql = """
+ SELECT i.name -- s.name, t.name, c.name
+ FROM sys.tables t
+ INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
+ INNER JOIN sys.indexes i ON i.object_id = t.object_id
+ INNER JOIN sys.index_columns ic ON ic.object_id = t.object_id
+ AND ic.index_id = i.index_id
+ INNER JOIN sys.columns c ON c.object_id = t.object_id
+ AND ic.column_id = c.column_id
+ WHERE i.is_unique=0 AND i.is_primary_key=0 AND i.is_unique_constraint=0
+ AND s.name = %s
+ AND t.name = %s
+ AND c.name = %s
+ """
+ schema = self._get_schema_name()
+ indexes = self.execute(find_index_sql, [schema, table_name, column])
+ qn = self.quote_name
+ for index in (i[0] for i in indexes if i[0]): # "if i[0]" added because an empty name may return
+ self.execute("DROP INDEX %s on %s.%s" % (qn(index), qn(schema), qn(table_name) ))
+
diff --git a/lib/python2.7/site-packages/south/db/sqlite3.py b/lib/python2.7/site-packages/south/db/sqlite3.py
new file mode 100644
index 0000000..c4014d3
--- /dev/null
+++ b/lib/python2.7/site-packages/south/db/sqlite3.py
@@ -0,0 +1,272 @@
+from south.db import generic
+
+
+class DatabaseOperations(generic.DatabaseOperations):
+
+ """
+ SQLite3 implementation of database operations.
+ """
+
+ backend_name = "sqlite3"
+
+ # SQLite ignores several constraints. I wish I could.
+ supports_foreign_keys = False
+ has_check_constraints = False
+ has_booleans = False
+
+ def add_column(self, table_name, name, field, *args, **kwds):
+ """
+ Adds a column.
+ """
+ # If it's not nullable, and has no default, raise an error (SQLite is picky)
+ if (not field.null and
+ (not field.has_default() or field.get_default() is None) and
+ not field.empty_strings_allowed):
+ raise ValueError("You cannot add a null=False column without a default value.")
+ # Initialise the field.
+ field.set_attributes_from_name(name)
+ # We add columns by remaking the table; even though SQLite supports
+ # adding columns, it doesn't support adding PRIMARY KEY or UNIQUE cols.
+ # We define fields with no default; a default will be used, though, to fill up the remade table
+ field_default = None
+ if not getattr(field, '_suppress_default', False):
+ default = field.get_default()
+ if default is not None:
+ field_default = "'%s'" % field.get_db_prep_save(default, connection=self._get_connection())
+ field._suppress_default = True
+ self._remake_table(table_name, added={
+ field.column: (self._column_sql_for_create(table_name, name, field, False), field_default)
+ })
+
+ def _get_full_table_description(self, connection, cursor, table_name):
+ cursor.execute('PRAGMA table_info(%s)' % connection.ops.quote_name(table_name))
+ # cid, name, type, notnull, dflt_value, pk
+ return [{'name': field[1],
+ 'type': field[2],
+ 'null_ok': not field[3],
+ 'dflt_value': field[4],
+ 'pk': field[5] # undocumented
+ } for field in cursor.fetchall()]
+
+ @generic.invalidate_table_constraints
+ def _remake_table(self, table_name, added={}, renames={}, deleted=[], altered={}, primary_key_override=None, uniques_deleted=[]):
+ """
+ Given a table and three sets of changes (renames, deletes, alters),
+ recreates it with the modified schema.
+ """
+ # Dry runs get skipped completely
+ if self.dry_run:
+ return
+ # Temporary table's name
+ temp_name = "_south_new_" + table_name
+ # Work out the (possibly new) definitions of each column
+ definitions = {}
+ cursor = self._get_connection().cursor()
+ # Get the index descriptions
+ indexes = self._get_connection().introspection.get_indexes(cursor, table_name)
+ standalone_indexes = self._get_standalone_indexes(table_name)
+ # Work out new column defs.
+ for column_info in self._get_full_table_description(self._get_connection(), cursor, table_name):
+ name = column_info['name']
+ if name in deleted:
+ continue
+ # Get the type, ignoring PRIMARY KEY (we need to be consistent)
+ type = column_info['type'].replace("PRIMARY KEY", "")
+ # Add on primary key, not null or unique if needed.
+ if (primary_key_override and primary_key_override == name) or \
+ (not primary_key_override and name in indexes and
+ indexes[name]['primary_key']):
+ type += " PRIMARY KEY"
+ elif not column_info['null_ok']:
+ type += " NOT NULL"
+ if (name in indexes and indexes[name]['unique'] and
+ name not in uniques_deleted):
+ type += " UNIQUE"
+ if column_info['dflt_value'] is not None:
+ type += " DEFAULT " + column_info['dflt_value']
+ # Deal with a rename
+ if name in renames:
+ name = renames[name]
+ # Add to the defs
+ definitions[name] = type
+ # Add on altered columns
+ for name, type in altered.items():
+ if (primary_key_override and primary_key_override == name) or \
+ (not primary_key_override and name in indexes and
+ indexes[name]['primary_key']):
+ type += " PRIMARY KEY"
+ if (name in indexes and indexes[name]['unique'] and
+ name not in uniques_deleted):
+ type += " UNIQUE"
+ definitions[name] = type
+ # Add on the new columns
+ for name, (type,_) in added.items():
+ if (primary_key_override and primary_key_override == name):
+ type += " PRIMARY KEY"
+ definitions[name] = type
+ # Alright, Make the table
+ self.execute("CREATE TABLE %s (%s)" % (
+ self.quote_name(temp_name),
+ ", ".join(["%s %s" % (self.quote_name(cname), ctype) for cname, ctype in definitions.items()]),
+ ))
+ # Copy over the data
+ self._copy_data(table_name, temp_name, renames, added)
+ # Delete the old table, move our new one over it
+ self.delete_table(table_name)
+ self.rename_table(temp_name, table_name)
+ # Recreate multi-valued indexes
+ # We can't do that before since it's impossible to rename indexes
+ # and index name scope is global
+ self._make_standalone_indexes(table_name, standalone_indexes, renames=renames, deleted=deleted, uniques_deleted=uniques_deleted)
+ self.deferred_sql = [] # prevent double indexing
+
+ def _copy_data(self, src, dst, field_renames={}, added={}):
+ "Used to copy data into a new table"
+ # Make a list of all the fields to select
+ cursor = self._get_connection().cursor()
+ src_fields = [column_info[0] for column_info in self._get_connection().introspection.get_table_description(cursor, src)]
+ dst_fields = [column_info[0] for column_info in self._get_connection().introspection.get_table_description(cursor, dst)]
+ src_fields_new = []
+ dst_fields_new = []
+ for field in src_fields:
+ if field in field_renames:
+ dst_fields_new.append(self.quote_name(field_renames[field]))
+ elif field in dst_fields:
+ dst_fields_new.append(self.quote_name(field))
+ else:
+ continue
+ src_fields_new.append(self.quote_name(field))
+ for field, (_,default) in added.items():
+ if default is not None:
+ field = self.quote_name(field)
+ src_fields_new.append("%s as %s" % (default, field))
+ dst_fields_new.append(field)
+ # Copy over the data
+ self.execute("INSERT INTO %s (%s) SELECT %s FROM %s;" % (
+ self.quote_name(dst),
+ ', '.join(dst_fields_new),
+ ', '.join(src_fields_new),
+ self.quote_name(src),
+ ))
+
+ def _create_unique(self, table_name, columns):
+ self._create_index(table_name, columns, True)
+
+ def _create_index(self, table_name, columns, unique=False, index_name=None):
+ if index_name is None:
+ index_name = '%s_%s' % (table_name, '__'.join(columns))
+ self.execute("CREATE %sINDEX %s ON %s(%s);" % (
+ unique and "UNIQUE " or "",
+ self.quote_name(index_name),
+ self.quote_name(table_name),
+ ', '.join(self.quote_name(c) for c in columns),
+ ))
+
+ def _get_standalone_indexes(self, table_name):
+ indexes = []
+ cursor = self._get_connection().cursor()
+ cursor.execute('PRAGMA index_list(%s)' % self.quote_name(table_name))
+ # seq, name, unique
+ for index, unique in [(field[1], field[2]) for field in cursor.fetchall()]:
+ cursor.execute('PRAGMA index_info(%s)' % self.quote_name(index))
+ info = cursor.fetchall()
+ if len(info) == 1 and unique:
+ # This index is already specified in the CREATE TABLE columns
+ # specification
+ continue
+ columns = []
+ for field in info:
+ columns.append(field[2])
+ indexes.append((index, columns, unique))
+ return indexes
+
+ def _make_standalone_indexes(self, table_name, indexes, deleted=[], renames={}, uniques_deleted=[]):
+ for index_name, index, unique in indexes:
+ columns = []
+
+ for name in index:
+ # Handle deletion
+ if name in deleted:
+ columns = []
+ break
+
+ # Handle renames
+ if name in renames:
+ name = renames[name]
+ columns.append(name)
+
+ if columns and (set(columns) != set(uniques_deleted) or not unique):
+ self._create_index(table_name, columns, unique, index_name)
+
+ def _column_sql_for_create(self, table_name, name, field, explicit_name=True):
+ "Given a field and its name, returns the full type for the CREATE TABLE (without unique/pk)"
+ field.set_attributes_from_name(name)
+ if not explicit_name:
+ name = field.db_column
+ else:
+ field.column = name
+ sql = self.column_sql(table_name, name, field, with_name=False, field_prepared=True)
+ # Remove keywords we don't want (this should be type only, not constraint)
+ if sql:
+ sql = sql.replace("PRIMARY KEY", "")
+ return sql
+
+ def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False):
+ """
+ Changes a column's SQL definition.
+
+ Note that this sqlite3 implementation ignores the ignore_constraints argument.
+ The argument is accepted for API compatibility with the generic
+ DatabaseOperations.alter_column() method.
+ """
+ # Change nulls to default if needed
+ if not field.null and field.has_default():
+ params = {
+ "column": self.quote_name(name),
+ "table_name": self.quote_name(table_name)
+ }
+ self._update_nulls_to_default(params, field)
+ # Remake the table correctly
+ field._suppress_default = True
+ self._remake_table(table_name, altered={
+ name: self._column_sql_for_create(table_name, name, field, explicit_name),
+ })
+
+ def delete_column(self, table_name, column_name):
+ """
+ Deletes a column.
+ """
+ self._remake_table(table_name, deleted=[column_name])
+
+ def rename_column(self, table_name, old, new):
+ """
+ Renames a column from one name to another.
+ """
+ self._remake_table(table_name, renames={old: new})
+
+ def create_unique(self, table_name, columns):
+ """
+ Create an unique index on columns
+ """
+ self._create_unique(table_name, columns)
+
+ def delete_unique(self, table_name, columns):
+ """
+ Delete an unique index
+ """
+ self._remake_table(table_name, uniques_deleted=columns)
+
+ def create_primary_key(self, table_name, columns):
+ if not isinstance(columns, (list, tuple)):
+ columns = [columns]
+ assert len(columns) == 1, "SQLite backend does not support multi-column primary keys"
+ self._remake_table(table_name, primary_key_override=columns[0])
+
+ # Not implemented this yet.
+ def delete_primary_key(self, table_name):
+ # By passing True in, we make sure we wipe all existing PKs.
+ self._remake_table(table_name, primary_key_override=True)
+
+ # No cascades on deletes
+ def delete_table(self, table_name, cascade=True):
+ generic.DatabaseOperations.delete_table(self, table_name, False)