[snowy] Replace django_evolution with South for db migrations
- From: Brad Taylor <btaylor src gnome org>
- To: svn-commits-list gnome org
- Subject: [snowy] Replace django_evolution with South for db migrations
- Date: Mon, 20 Jul 2009 14:36:07 +0000 (UTC)
commit f4e95066b59748eee0e6b4f600d8b9b27f56cb38
Author: Brad Taylor <brad getcoded net>
Date: Mon Jul 20 10:35:37 2009 -0400
Replace django_evolution with South for db migrations
TODO | 1 -
lib/django_evolution/__init__.py | 15 -
lib/django_evolution/admin.py | 5 -
lib/django_evolution/db/__init__.py | 9 -
lib/django_evolution/db/common.py | 166 ----
lib/django_evolution/db/mysql.py | 89 --
lib/django_evolution/db/mysql_old.py | 2 -
lib/django_evolution/db/postgresql.py | 14 -
lib/django_evolution/db/postgresql_psycopg2.py | 2 -
lib/django_evolution/db/sqlite3.py | 200 -----
lib/django_evolution/diff.py | 205 -----
lib/django_evolution/evolve.py | 60 --
lib/django_evolution/management/__init__.py | 100 ---
lib/django_evolution/management/commands/evolve.py | 225 -----
lib/django_evolution/models.py | 27 -
lib/django_evolution/mutations.py | 475 ----------
lib/django_evolution/signature.py | 94 --
lib/django_evolution/tests/__init__.py | 27 -
lib/django_evolution/tests/add_field.py | 551 ------------
lib/django_evolution/tests/change_field.py | 687 ---------------
lib/django_evolution/tests/db/mysql.py | 254 ------
lib/django_evolution/tests/db/mysql_old.py | 2 -
lib/django_evolution/tests/db/postgresql.py | 236 -----
.../tests/db/postgresql_psycopg2.py | 2 -
lib/django_evolution/tests/db/sqlite3.py | 540 ------------
lib/django_evolution/tests/delete_app.py | 76 --
lib/django_evolution/tests/delete_field.py | 268 ------
lib/django_evolution/tests/delete_model.py | 131 ---
lib/django_evolution/tests/generics.py | 71 --
lib/django_evolution/tests/inheritance.py | 82 --
lib/django_evolution/tests/models.py | 3 -
lib/django_evolution/tests/ordering.py | 49 -
lib/django_evolution/tests/rename_field.py | 399 ---------
lib/django_evolution/tests/signature.py | 248 ------
lib/django_evolution/tests/sql_mutation.py | 93 --
lib/django_evolution/tests/utils.py | 185 ----
lib/django_evolution/utils.py | 22 -
lib/south/__init__.py | 6 +
lib/south/db/__init__.py | 12 +
lib/south/db/generic.py | 705 +++++++++++++++
lib/south/db/mysql.py | 137 +++
lib/south/db/postgresql_psycopg2.py | 64 ++
.../commands => south/db/sql_server}/__init__.py | 0
lib/south/db/sql_server/pyodbc.py | 25 +
lib/south/db/sqlite3.py | 50 ++
.../tests/db => south/management}/__init__.py | 0
.../management/commands/__init__.py | 0
lib/south/management/commands/migrate.py | 110 +++
lib/south/management/commands/startmigration.py | 928 ++++++++++++++++++++
lib/south/management/commands/syncdb.py | 70 ++
lib/south/management/commands/test.py | 14 +
lib/south/migration.py | 553 ++++++++++++
lib/south/models.py | 19 +
lib/south/modelsparser.py | 398 +++++++++
lib/south/orm.py | 278 ++++++
lib/south/tests/__init__.py | 77 ++
lib/south/tests/db.py | 318 +++++++
.../commands => south/tests/fakeapp}/__init__.py | 0
lib/south/tests/fakeapp/migrations/0001_spam.py | 19 +
lib/south/tests/fakeapp/migrations/0002_eggs.py | 20 +
.../tests/fakeapp/migrations/0003_alter_spam.py | 12 +
.../tests/fakeapp/migrations}/__init__.py | 0
lib/south/tests/fakeapp/models.py | 46 +
lib/south/tests/logic.py | 243 +++++
lib/south/tests/modelsparser.py | 61 ++
settings.py | 2 +-
66 files changed, 4166 insertions(+), 5616 deletions(-)
---
diff --git a/TODO b/TODO
index b50ba14..25f0fb9 100644
--- a/TODO
+++ b/TODO
@@ -1,7 +1,6 @@
TODO
========================
* General
- - Replace django-evolution with South (http://south.aeracode.org)
- Create a NoteManager to handle permissions in one place and unit test
- More complicated permissions to allow for sharing
- Configurable project name to allow Snowy and Tomboy Online to use roughly
diff --git a/lib/south/__init__.py b/lib/south/__init__.py
new file mode 100644
index 0000000..918b0c2
--- /dev/null
+++ b/lib/south/__init__.py
@@ -0,0 +1,6 @@
+"""
+South - Useable migrations for Django apps
+"""
+
+__version__ = "0.5"
+__authors__ = ["Andrew Godwin <andrew aeracode org>", "Andy McCurdy <andy andymccurdy com>"]
diff --git a/lib/south/db/__init__.py b/lib/south/db/__init__.py
new file mode 100644
index 0000000..8e4d773
--- /dev/null
+++ b/lib/south/db/__init__.py
@@ -0,0 +1,12 @@
+
+# Establish the common DatabaseOperations instance, which we call 'db'.
+# This code somewhat lifted from django evolution
+from django.conf import settings
+import sys
+module_name = ['south.db', settings.DATABASE_ENGINE]
+try:
+ module = __import__('.'.join(module_name),{},{},[''])
+except ImportError:
+ sys.stderr.write("There is no South database module for the engine '%s'. Please either choose a supported one, or remove South from INSTALLED_APPS.\n" % settings.DATABASE_ENGINE)
+ sys.exit(1)
+db = module.DatabaseOperations()
\ No newline at end of file
diff --git a/lib/south/db/generic.py b/lib/south/db/generic.py
new file mode 100644
index 0000000..c06b339
--- /dev/null
+++ b/lib/south/db/generic.py
@@ -0,0 +1,705 @@
+
+import datetime
+import string
+import random
+import re
+
+from django.core.management.color import no_style
+from django.db import connection, transaction, models
+from django.db.backends.util import truncate_name
+from django.db.models.fields import NOT_PROVIDED
+from django.dispatch import dispatcher
+from django.conf import settings
+
+
+def alias(attrname):
+ """
+ Returns a function which calls 'attrname' - for function aliasing.
+ We can't just use foo = bar, as this breaks subclassing.
+ """
+ def func(self, *args, **kwds):
+ return getattr(self, attrname)(*args, **kwds)
+ return func
+
+
+class DatabaseOperations(object):
+
+ """
+ Generic SQL implementation of the DatabaseOperations.
+ Some of this code comes from Django Evolution.
+ """
+
+ # We assume the generic DB can handle DDL transactions. MySQL wil change this.
+ has_ddl_transactions = True
+
+ alter_string_set_type = 'ALTER COLUMN %(column)s TYPE %(type)s'
+ alter_string_set_null = 'ALTER COLUMN %(column)s DROP NOT NULL'
+ alter_string_drop_null = 'ALTER COLUMN %(column)s SET NOT NULL'
+ has_check_constraints = True
+ delete_check_sql = 'ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s'
+ allows_combined_alters = True
+ add_column_string = 'ALTER TABLE %s ADD COLUMN %s;'
+ delete_unique_sql = "ALTER TABLE %s DROP CONSTRAINT %s"
+ delete_foreign_key_sql = 'ALTER TABLE %s DROP CONSTRAINT %s'
+ supports_foreign_keys = True
+ max_index_name_length = 63
+ drop_index_string = 'DROP INDEX %(index_name)s'
+ delete_column_string = 'ALTER TABLE %s DROP COLUMN %s CASCADE;'
+ create_primary_key_string = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s PRIMARY KEY (%(columns)s)"
+ drop_primary_key_string = "ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s"
+ backend_name = None
+
+ def __init__(self):
+ self.debug = False
+ self.deferred_sql = []
+ self.dry_run = False
+ self.pending_create_signals = []
+
+ def execute(self, sql, params=[]):
+ """
+ Executes the given SQL statement, with optional parameters.
+ If the instance's debug attribute is True, prints out what it executes.
+ """
+ cursor = connection.cursor()
+ if self.debug:
+ print " = %s" % sql, params
+
+ if self.dry_run:
+ return []
+
+ cursor.execute(sql, params)
+ try:
+ return cursor.fetchall()
+ except:
+ return []
+
+
+ def execute_many(self, sql, regex=r"(?mx) ([^';]* (?:'[^']*'[^';]*)*)", comment_regex=r"(?mx) (?:^\s*$)|(?:--.*$)"):
+ """
+ Takes a SQL file and executes it as many separate statements.
+ (Some backends, such as Postgres, don't work otherwise.)
+ """
+ # Be warned: This function is full of dark magic. Make sure you really
+ # know regexes before trying to edit it.
+ # First, strip comments
+ sql = "\n".join([x.strip().replace("%", "%%") for x in re.split(comment_regex, sql) if x.strip()])
+ # Now execute each statement
+ for st in re.split(regex, sql)[1:][::2]:
+ self.execute(st)
+
+
+ def add_deferred_sql(self, sql):
+ """
+ Add a SQL statement to the deferred list, that won't be executed until
+ this instance's execute_deferred_sql method is run.
+ """
+ self.deferred_sql.append(sql)
+
+
+ def execute_deferred_sql(self):
+ """
+ Executes all deferred SQL, resetting the deferred_sql list
+ """
+ for sql in self.deferred_sql:
+ self.execute(sql)
+
+ self.deferred_sql = []
+
+
+ def clear_deferred_sql(self):
+ """
+ Resets the deferred_sql list to empty.
+ """
+ self.deferred_sql = []
+
+
+ def clear_run_data(self, pending_creates = None):
+ """
+ Resets variables to how they should be before a run. Used for dry runs.
+ If you want, pass in an old panding_creates to reset to.
+ """
+ self.clear_deferred_sql()
+ self.pending_create_signals = pending_creates or []
+
+
+ def get_pending_creates(self):
+ return self.pending_create_signals
+
+
+ def create_table(self, table_name, fields):
+ """
+ Creates the table 'table_name'. 'fields' is a tuple of fields,
+ each repsented by a 2-part tuple of field name and a
+ django.db.models.fields.Field object
+ """
+ qn = connection.ops.quote_name
+
+ # allow fields to be a dictionary
+ # removed for now - philosophical reasons (this is almost certainly not what you want)
+ #try:
+ # fields = fields.items()
+ #except AttributeError:
+ # pass
+
+ columns = [
+ self.column_sql(table_name, field_name, field)
+ for field_name, field in fields
+ ]
+
+ self.execute('CREATE TABLE %s (%s);' % (qn(table_name), ', '.join([col for col in columns if col])))
+
+ add_table = alias('create_table') # Alias for consistency's sake
+
+
+ def rename_table(self, old_table_name, table_name):
+ """
+ Renames the table 'old_table_name' to 'table_name'.
+ """
+ if old_table_name == table_name:
+ # No Operation
+ return
+ qn = connection.ops.quote_name
+ params = (qn(old_table_name), qn(table_name))
+ self.execute('ALTER TABLE %s RENAME TO %s;' % params)
+
+
+ def delete_table(self, table_name, cascade=True):
+ """
+ Deletes the table 'table_name'.
+ """
+ qn = connection.ops.quote_name
+ params = (qn(table_name), )
+ if cascade:
+ self.execute('DROP TABLE %s CASCADE;' % params)
+ else:
+ self.execute('DROP TABLE %s;' % params)
+
+ drop_table = alias('delete_table')
+
+
+ def clear_table(self, table_name):
+ """
+ Deletes all rows from 'table_name'.
+ """
+ qn = connection.ops.quote_name
+ params = (qn(table_name), )
+ self.execute('DELETE FROM %s;' % params)
+
+
+
+ def add_column(self, table_name, name, field, keep_default=True):
+ """
+ Adds the column 'name' to the table 'table_name'.
+ Uses the 'field' paramater, a django.db.models.fields.Field instance,
+ to generate the necessary sql
+
+ @param table_name: The name of the table to add the column to
+ @param name: The name of the column to add
+ @param field: The field to use
+ """
+ qn = connection.ops.quote_name
+ sql = self.column_sql(table_name, name, field)
+ if sql:
+ params = (
+ qn(table_name),
+ sql,
+ )
+ sql = self.add_column_string % params
+ self.execute(sql)
+
+ # Now, drop the default if we need to
+ if not keep_default and field.default:
+ field.default = NOT_PROVIDED
+ self.alter_column(table_name, name, field, explicit_name=False)
+
+
+ def _db_type_for_alter_column(self, field):
+ """
+ Returns a field's type suitable for ALTER COLUMN.
+ By default it just returns field.db_type().
+ To be overriden by backend specific subclasses
+ @param field: The field to generate type for
+ """
+ return field.db_type()
+
+ def alter_column(self, table_name, name, field, explicit_name=True):
+ """
+ Alters the given column name so it will match the given field.
+ Note that conversion between the two by the database must be possible.
+ Will not automatically add _id by default; to have this behavour, pass
+ explicit_name=False.
+
+ @param table_name: The name of the table to add the column to
+ @param name: The name of the column to alter
+ @param field: The new field definition to use
+ """
+
+ # hook for the field to do any resolution prior to it's attributes being queried
+ if hasattr(field, 'south_init'):
+ field.south_init()
+
+ qn = connection.ops.quote_name
+
+ # Add _id or whatever if we need to
+ field.set_attributes_from_name(name)
+ if not explicit_name:
+ name = field.column
+
+ # Drop all check constraints. TODO: Add the right ones back.
+ if self.has_check_constraints:
+ check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK")
+ for constraint in check_constraints:
+ self.execute(self.delete_check_sql % {'table':table_name, 'constraint': constraint})
+
+ # First, change the type
+ params = {
+ "column": qn(name),
+ "type": self._db_type_for_alter_column(field)
+ }
+
+ # SQLs is a list of (SQL, values) pairs.
+ sqls = [(self.alter_string_set_type % params, [])]
+
+ # Next, set any default
+ if not field.null and field.has_default():
+ default = field.get_default()
+ sqls.append(('ALTER COLUMN %s SET DEFAULT %%s ' % (qn(name),), [default]))
+ else:
+ sqls.append(('ALTER COLUMN %s DROP DEFAULT' % (qn(name),), []))
+
+
+ # Next, nullity
+ params = {
+ "column": qn(name),
+ "type": field.db_type(),
+ }
+ if field.null:
+ sqls.append((self.alter_string_set_null % params, []))
+ else:
+ sqls.append((self.alter_string_drop_null % params, []))
+
+ # TODO: Unique
+
+ if self.allows_combined_alters:
+ sqls, values = zip(*sqls)
+ self.execute(
+ "ALTER TABLE %s %s;" % (qn(table_name), ", ".join(sqls)),
+ flatten(values),
+ )
+ else:
+ # Databases like e.g. MySQL don't like more than one alter at once.
+ for sql, values in sqls:
+ self.execute("ALTER TABLE %s %s;" % (qn(table_name), sql), values)
+
+
+ def _constraints_affecting_columns(self, table_name, columns, type="UNIQUE"):
+ """
+ Gets the names of the constraints affecting the given columns.
+ """
+
+ if self.dry_run:
+ raise ValueError("Cannot get constraints for columns during a dry run.")
+
+ columns = set(columns)
+
+ if type == "CHECK":
+ ifsc_table = "constraint_column_usage"
+ else:
+ ifsc_table = "key_column_usage"
+
+ # First, load all constraint->col mappings for this table.
+ rows = self.execute("""
+ SELECT kc.constraint_name, kc.column_name
+ FROM information_schema.%s AS kc
+ JOIN information_schema.table_constraints AS c ON
+ kc.table_schema = c.table_schema AND
+ kc.table_name = c.table_name AND
+ kc.constraint_name = c.constraint_name
+ WHERE
+ kc.table_schema = %%s AND
+ kc.table_name = %%s AND
+ c.constraint_type = %%s
+ """ % ifsc_table, ['public', table_name, type])
+ # Load into a dict
+ mapping = {}
+ for constraint, column in rows:
+ mapping.setdefault(constraint, set())
+ mapping[constraint].add(column)
+ # Find ones affecting these columns
+ for constraint, itscols in mapping.items():
+ if itscols == columns:
+ yield constraint
+
+
+ def create_unique(self, table_name, columns):
+ """
+ Creates a UNIQUE constraint on the columns on the given table.
+ """
+ qn = connection.ops.quote_name
+
+ if not isinstance(columns, (list, tuple)):
+ columns = [columns]
+
+ name = self.create_index_name(table_name, columns)
+
+ cols = ", ".join(map(qn, columns))
+ self.execute("ALTER TABLE %s ADD CONSTRAINT %s UNIQUE (%s)" % (qn(table_name), qn(name), cols))
+ return name
+
+
+
+ def delete_unique(self, table_name, columns):
+ """
+ Deletes a UNIQUE constraint on precisely the columns on the given table.
+ """
+ qn = connection.ops.quote_name
+
+ if not isinstance(columns, (list, tuple)):
+ columns = [columns]
+
+ # Dry runs mean we can't do anything.
+ if self.dry_run:
+ return
+
+ constraints = list(self._constraints_affecting_columns(table_name, columns))
+ if not constraints:
+ raise ValueError("Cannot find a UNIQUE constraint on table %s, columns %r" % (table_name, columns))
+ for constraint in constraints:
+ self.execute(self.delete_unique_sql % (qn(table_name), qn(constraint)))
+
+
+ def column_sql(self, table_name, field_name, field, tablespace=''):
+ """
+ Creates the SQL snippet for a column. Used by add_column and add_table.
+ """
+ qn = connection.ops.quote_name
+
+ field.set_attributes_from_name(field_name)
+
+ # hook for the field to do any resolution prior to it's attributes being queried
+ if hasattr(field, 'south_init'):
+ field.south_init()
+
+ sql = field.db_type()
+ if sql:
+ field_output = [qn(field.column), sql]
+ field_output.append('%sNULL' % (not field.null and 'NOT ' or ''))
+ if field.primary_key:
+ field_output.append('PRIMARY KEY')
+ elif field.unique:
+ # Just use UNIQUE (no indexes any more, we have delete_unique)
+ field_output.append('UNIQUE')
+
+ tablespace = field.db_tablespace or tablespace
+ if tablespace and connection.features.supports_tablespaces and field.unique:
+ # We must specify the index tablespace inline, because we
+ # won't be generating a CREATE INDEX statement for this field.
+ field_output.append(connection.ops.tablespace_sql(tablespace, inline=True))
+
+ sql = ' '.join(field_output)
+ sqlparams = ()
+ # if the field is "NOT NULL" and a default value is provided, create the column with it
+ # this allows the addition of a NOT NULL field to a table with existing rows
+ if not field.null and field.has_default():
+ default = field.get_default()
+ # If the default is actually None, don't add a default term
+ if default is not None:
+ # If the default is a callable, then call it!
+ if callable(default):
+ default = default()
+ # Now do some very cheap quoting. TODO: Redesign return values to avoid this.
+ if isinstance(default, basestring):
+ default = "'%s'" % default.replace("'", "''")
+ elif isinstance(default, datetime.date):
+ default = "'%s'" % default
+ sql += " DEFAULT %s"
+ sqlparams = (default)
+
+ if field.rel and self.supports_foreign_keys:
+ self.add_deferred_sql(
+ self.foreign_key_sql(
+ table_name,
+ field.column,
+ field.rel.to._meta.db_table,
+ field.rel.to._meta.get_field(field.rel.field_name).column
+ )
+ )
+
+ if field.db_index and not field.unique:
+ self.add_deferred_sql(self.create_index_sql(table_name, [field.column]))
+
+ if hasattr(field, 'post_create_sql'):
+ style = no_style()
+ for stmt in field.post_create_sql(style, table_name):
+ self.add_deferred_sql(stmt)
+
+ if sql:
+ return sql % sqlparams
+ else:
+ return None
+
+
+ def foreign_key_sql(self, from_table_name, from_column_name, to_table_name, to_column_name):
+ """
+ Generates a full SQL statement to add a foreign key constraint
+ """
+ qn = connection.ops.quote_name
+ constraint_name = '%s_refs_%s_%x' % (from_column_name, to_column_name, abs(hash((from_table_name, to_table_name))))
+ return 'ALTER TABLE %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)%s;' % (
+ qn(from_table_name),
+ qn(truncate_name(constraint_name, connection.ops.max_name_length())),
+ qn(from_column_name),
+ qn(to_table_name),
+ qn(to_column_name),
+ connection.ops.deferrable_sql() # Django knows this
+ )
+
+
+ def delete_foreign_key(self, table_name, column):
+ "Drop a foreign key constraint"
+ if self.dry_run:
+ return # We can't look at the DB to get the constraints
+ constraints = list(self._constraints_affecting_columns(table_name, [column], "FOREIGN KEY"))
+ if not constraints:
+ raise ValueError("Cannot find a FOREIGN KEY constraint on table %s, column %s" % (table_name, column))
+ for constraint_name in constraints:
+ self.execute(self.delete_foreign_key_sql % (table_name, constraint_name))
+
+ drop_foreign_key = alias('delete_foreign_key')
+
+
+ def create_index_name(self, table_name, column_names):
+ """
+ Generate a unique name for the index
+ """
+ index_unique_name = ''
+ if len(column_names) > 1:
+ index_unique_name = '_%x' % abs(hash((table_name, ','.join(column_names))))
+
+ return ('%s_%s%s' % (table_name, column_names[0], index_unique_name))[:self.max_index_name_length]
+
+
+ def create_index_sql(self, table_name, column_names, unique=False, db_tablespace=''):
+ """
+ Generates a create index statement on 'table_name' for a list of 'column_names'
+ """
+ qn = connection.ops.quote_name
+ if not column_names:
+ print "No column names supplied on which to create an index"
+ return ''
+
+ if db_tablespace and connection.features.supports_tablespaces:
+ tablespace_sql = ' ' + connection.ops.tablespace_sql(db_tablespace)
+ else:
+ tablespace_sql = ''
+
+ index_name = self.create_index_name(table_name, column_names)
+ qn = connection.ops.quote_name
+ return 'CREATE %sINDEX %s ON %s (%s)%s;' % (
+ unique and 'UNIQUE ' or '',
+ qn(index_name),
+ qn(table_name),
+ ','.join([qn(field) for field in column_names]),
+ tablespace_sql
+ )
+
+ def create_index(self, table_name, column_names, unique=False, db_tablespace=''):
+ """ Executes a create index statement """
+ sql = self.create_index_sql(table_name, column_names, unique, db_tablespace)
+ self.execute(sql)
+
+
+ def delete_index(self, table_name, column_names, db_tablespace=''):
+ """
+ Deletes an index created with create_index.
+ This is possible using only columns due to the deterministic
+ index naming function which relies on column names.
+ """
+ if isinstance(column_names, (str, unicode)):
+ column_names = [column_names]
+ name = self.create_index_name(table_name, column_names)
+ qn = connection.ops.quote_name
+ sql = self.drop_index_string % {"index_name": qn(name), "table_name": qn(table_name)}
+ self.execute(sql)
+
+ drop_index = alias('delete_index')
+
+
+ def delete_column(self, table_name, name):
+ """
+ Deletes the column 'column_name' from the table 'table_name'.
+ """
+ qn = connection.ops.quote_name
+ params = (qn(table_name), qn(name))
+ self.execute(self.delete_column_string % params, [])
+
+ drop_column = alias('delete_column')
+
+
+ def rename_column(self, table_name, old, new):
+ """
+ Renames the column 'old' from the table 'table_name' to 'new'.
+ """
+ raise NotImplementedError("rename_column has no generic SQL syntax")
+
+
+ def drop_primary_key(self, table_name):
+ """
+ Drops the old primary key.
+ """
+ qn = connection.ops.quote_name
+ self.execute(self.drop_primary_key_string % {
+ "table": qn(table_name),
+ "constraint": qn(table_name+"_pkey"),
+ })
+
+ delete_primary_key = alias('drop_primary_key')
+
+
+ def create_primary_key(self, table_name, columns):
+ """
+ Creates a new primary key on the specified columns.
+ """
+ if not isinstance(columns, (list, tuple)):
+ columns = [columns]
+ qn = connection.ops.quote_name
+ self.execute(self.create_primary_key_string % {
+ "table": qn(table_name),
+ "constraint": qn(table_name+"_pkey"),
+ "columns": ", ".join(map(qn, columns)),
+ })
+
+
+ def start_transaction(self):
+ """
+ Makes sure the following commands are inside a transaction.
+ Must be followed by a (commit|rollback)_transaction call.
+ """
+ if self.dry_run:
+ return
+ transaction.commit_unless_managed()
+ transaction.enter_transaction_management()
+ transaction.managed(True)
+
+
+ def commit_transaction(self):
+ """
+ Commits the current transaction.
+ Must be preceded by a start_transaction call.
+ """
+ if self.dry_run:
+ return
+ transaction.commit()
+ transaction.leave_transaction_management()
+
+
+ def rollback_transaction(self):
+ """
+ Rolls back the current transaction.
+ Must be preceded by a start_transaction call.
+ """
+ if self.dry_run:
+ return
+ transaction.rollback()
+ transaction.leave_transaction_management()
+
+
+ def send_create_signal(self, app_label, model_names):
+ self.pending_create_signals.append((app_label, model_names))
+
+
+ def send_pending_create_signals(self):
+ for (app_label, model_names) in self.pending_create_signals:
+ self.really_send_create_signal(app_label, model_names)
+ self.pending_create_signals = []
+
+
+ def really_send_create_signal(self, app_label, model_names):
+ """
+ Sends a post_syncdb signal for the model specified.
+
+ If the model is not found (perhaps it's been deleted?),
+ no signal is sent.
+
+ TODO: The behavior of django.contrib.* apps seems flawed in that
+ they don't respect created_models. Rather, they blindly execute
+ over all models within the app sending the signal. This is a
+ patch we should push Django to make For now, this should work.
+ """
+ if self.debug:
+ print " - Sending post_syncdb signal for %s: %s" % (app_label, model_names)
+ app = models.get_app(app_label)
+ if not app:
+ return
+
+ created_models = []
+ for model_name in model_names:
+ model = models.get_model(app_label, model_name)
+ if model:
+ created_models.append(model)
+
+ if created_models:
+ # syncdb defaults -- perhaps take these as options?
+ verbosity = 1
+ interactive = True
+
+ if hasattr(dispatcher, "send"):
+ dispatcher.send(signal=models.signals.post_syncdb, sender=app,
+ app=app, created_models=created_models,
+ verbosity=verbosity, interactive=interactive)
+ else:
+ models.signals.post_syncdb.send(sender=app,
+ app=app, created_models=created_models,
+ verbosity=verbosity, interactive=interactive)
+
+
+ def mock_model(self, model_name, db_table, db_tablespace='',
+ pk_field_name='id', pk_field_type=models.AutoField,
+ pk_field_args=[], pk_field_kwargs={}):
+ """
+ Generates a MockModel class that provides enough information
+ to be used by a foreign key/many-to-many relationship.
+
+ Migrations should prefer to use these rather than actual models
+ as models could get deleted over time, but these can remain in
+ migration files forever.
+
+ Depreciated.
+ """
+ class MockOptions(object):
+ def __init__(self):
+ self.db_table = db_table
+ self.db_tablespace = db_tablespace or settings.DEFAULT_TABLESPACE
+ self.object_name = model_name
+ self.module_name = model_name.lower()
+
+ if pk_field_type == models.AutoField:
+ pk_field_kwargs['primary_key'] = True
+
+ self.pk = pk_field_type(*pk_field_args, **pk_field_kwargs)
+ self.pk.set_attributes_from_name(pk_field_name)
+ self.abstract = False
+
+ def get_field_by_name(self, field_name):
+ # we only care about the pk field
+ return (self.pk, self.model, True, False)
+
+ def get_field(self, name):
+ # we only care about the pk field
+ return self.pk
+
+ class MockModel(object):
+ _meta = None
+
+ # We need to return an actual class object here, not an instance
+ MockModel._meta = MockOptions()
+ MockModel._meta.model = MockModel
+ return MockModel
+
+
+# Single-level flattening of lists
+def flatten(ls):
+ nl = []
+ for l in ls:
+ nl += l
+ return nl
diff --git a/lib/south/db/mysql.py b/lib/south/db/mysql.py
new file mode 100644
index 0000000..44bfa1a
--- /dev/null
+++ b/lib/south/db/mysql.py
@@ -0,0 +1,137 @@
+
+from django.db import connection
+from django.conf import settings
+from south.db import generic
+
+class DatabaseOperations(generic.DatabaseOperations):
+
+ """
+ MySQL implementation of database operations.
+ """
+
+ backend_name = "mysql"
+ alter_string_set_type = ''
+ alter_string_set_null = 'MODIFY %(column)s %(type)s NULL;'
+ alter_string_drop_null = 'MODIFY %(column)s %(type)s NOT NULL;'
+ drop_index_string = 'DROP INDEX %(index_name)s ON %(table_name)s'
+ drop_primary_key_string = "ALTER TABLE %(table)s DROP PRIMARY KEY"
+ allows_combined_alters = False
+ has_ddl_transactions = False
+ has_check_constraints = False
+ delete_unique_sql = "ALTER TABLE %s DROP INDEX %s"
+
+
+ def execute(self, sql, params=[]):
+ if hasattr(settings, "DATABASE_STORAGE_ENGINE") and \
+ settings.DATABASE_STORAGE_ENGINE:
+ generic.DatabaseOperations.execute(self, "SET storage_engine=%s;" %
+ settings.DATABASE_STORAGE_ENGINE)
+ return generic.DatabaseOperations.execute(self, sql, params)
+ execute.__doc__ = generic.DatabaseOperations.execute.__doc__
+
+
+ def rename_column(self, table_name, old, new):
+ if old == new or self.dry_run:
+ return []
+
+ qn = connection.ops.quote_name
+
+ rows = [x for x in self.execute('DESCRIBE %s' % (qn(table_name),)) if x[0] == old]
+
+ if not rows:
+ raise ValueError("No column '%s' in '%s'." % (old, table_name))
+
+ params = (
+ qn(table_name),
+ qn(old),
+ qn(new),
+ rows[0][1],
+ rows[0][2] == "YES" and "NULL" or "NOT NULL",
+ rows[0][3] == "PRI" and "PRIMARY KEY" or "",
+ rows[0][4] and "DEFAULT " or "",
+ rows[0][4] and "%s" or "",
+ rows[0][5] or "",
+ )
+
+ sql = 'ALTER TABLE %s CHANGE COLUMN %s %s %s %s %s %s %s %s;' % params
+
+ if rows[0][4]:
+ self.execute(sql, (rows[0][4],))
+ else:
+ self.execute(sql)
+
+
+ def delete_column(self, table_name, name):
+ qn = connection.ops.quote_name
+ db_name = settings.DATABASE_NAME
+
+ # See if there is a foreign key on this column
+ cursor = connection.cursor()
+ get_fkeyname_query = "SELECT tc.constraint_name FROM \
+ information_schema.table_constraints tc, \
+ information_schema.key_column_usage kcu \
+ WHERE tc.table_name=kcu.table_name \
+ AND tc.table_schema=kcu.table_schema \
+ AND tc.constraint_name=kcu.constraint_name \
+ AND tc.constraint_type='FOREIGN KEY' \
+ AND tc.table_schema='%s' \
+ AND tc.table_name='%s' \
+ AND kcu.column_name='%s'"
+
+ result = cursor.execute(get_fkeyname_query % (db_name, table_name, name))
+
+ # if a foreign key exists, we need to delete it first
+ if result > 0:
+ assert result == 1 #we should only have one result
+ fkey_name = cursor.fetchone()[0]
+ drop_query = "ALTER TABLE %s DROP FOREIGN KEY %s"
+ cursor.execute(drop_query % (qn(table_name), qn(fkey_name)))
+
+ super(DatabaseOperations, self).delete_column(table_name, name)
+
+
+ def rename_table(self, old_table_name, table_name):
+ """
+ Renames the table 'old_table_name' to 'table_name'.
+ """
+ if old_table_name == table_name:
+ # No Operation
+ return
+ qn = connection.ops.quote_name
+ params = (qn(old_table_name), qn(table_name))
+ self.execute('RENAME TABLE %s TO %s;' % params)
+
+
+ def _constraints_affecting_columns(self, table_name, columns, type="UNIQUE"):
+ """
+ Gets the names of the constraints affecting the given columns.
+ """
+
+ if self.dry_run:
+ raise ValueError("Cannot get constraints for columns during a dry run.")
+
+ columns = set(columns)
+ db_name = settings.DATABASE_NAME
+ # First, load all constraint->col mappings for this table.
+ rows = self.execute("""
+ SELECT kc.constraint_name, kc.column_name
+ FROM information_schema.key_column_usage AS kc
+ JOIN information_schema.table_constraints AS c ON
+ kc.table_schema = c.table_schema AND
+ kc.table_name = c.table_name AND
+ kc.constraint_name = c.constraint_name
+ WHERE
+ kc.table_schema = %s AND
+ kc.table_catalog IS NULL AND
+ kc.table_name = %s AND
+ c.constraint_type = %s
+ """, [db_name, table_name, type])
+ # Load into a dict
+ mapping = {}
+ for constraint, column in rows:
+ mapping.setdefault(constraint, set())
+ mapping[constraint].add(column)
+ # Find ones affecting these columns
+ for constraint, itscols in mapping.items():
+ if itscols == columns:
+ yield constraint
diff --git a/lib/south/db/postgresql_psycopg2.py b/lib/south/db/postgresql_psycopg2.py
new file mode 100644
index 0000000..5c1d763
--- /dev/null
+++ b/lib/south/db/postgresql_psycopg2.py
@@ -0,0 +1,64 @@
+
+from django.db import connection, models
+from south.db import generic
+
+class DatabaseOperations(generic.DatabaseOperations):
+
+ """
+ PsycoPG2 implementation of database operations.
+ """
+
+ backend_name = "postgres"
+
+ def rename_column(self, table_name, old, new):
+ if old == new:
+ return []
+ qn = connection.ops.quote_name
+ params = (qn(table_name), qn(old), qn(new))
+ self.execute('ALTER TABLE %s RENAME COLUMN %s TO %s;' % params)
+
+ def rename_table(self, old_table_name, table_name):
+ "will rename the table and an associated ID sequence and primary key index"
+ # First, rename the table
+ generic.DatabaseOperations.rename_table(self, old_table_name, table_name)
+ # Then, try renaming the ID sequence
+ # (if you're using other AutoFields... your problem, unfortunately)
+ self.commit_transaction()
+ self.start_transaction()
+ try:
+ generic.DatabaseOperations.rename_table(self, old_table_name+"_id_seq", table_name+"_id_seq")
+ except:
+ if self.debug:
+ print " ~ No such sequence (ignoring error)"
+ self.rollback_transaction()
+ else:
+ self.commit_transaction()
+ self.start_transaction()
+
+ # Rename primary key index, will not rename other indices on
+ # the table that are used by django (e.g. foreign keys). Until
+ # figure out how, you need to do this yourself.
+ try:
+ generic.DatabaseOperations.rename_table(self, old_table_name+"_pkey", table_name+ "_pkey")
+ except:
+ if self.debug:
+ print " ~ No such primary key (ignoring error)"
+ self.rollback_transaction()
+ else:
+ self.commit_transaction()
+ self.start_transaction()
+
+
+ def rename_index(self, old_index_name, index_name):
+ "Rename an index individually"
+ generic.DatabaseOperations.rename_table(self, old_index_name, index_name)
+
+ def _db_type_for_alter_column(self, field):
+ """
+ Returns a field's type suitable for ALTER COLUMN.
+ Strips CHECKs from PositiveSmallIntegerField) and PositiveIntegerField
+ @param field: The field to generate type for
+ """
+ if isinstance(field, models.PositiveSmallIntegerField) or isinstance(field, models.PositiveIntegerField):
+ return field.db_type().split(" ")[0]
+ return super(DatabaseOperations, self)._db_type_for_alter_column(field)
diff --git a/lib/django_evolution/management/commands/__init__.py b/lib/south/db/sql_server/__init__.py
similarity index 100%
copy from lib/django_evolution/management/commands/__init__.py
copy to lib/south/db/sql_server/__init__.py
diff --git a/lib/south/db/sql_server/pyodbc.py b/lib/south/db/sql_server/pyodbc.py
new file mode 100644
index 0000000..58c5166
--- /dev/null
+++ b/lib/south/db/sql_server/pyodbc.py
@@ -0,0 +1,25 @@
+from django.db import connection
+from django.db.models.fields import *
+from south.db import generic
+
+class DatabaseOperations(generic.DatabaseOperations):
+ """
+ django-pyodbc (sql_server.pyodbc) implementation of database operations.
+ """
+
+ add_column_string = 'ALTER TABLE %s ADD %s;'
+ alter_string_set_type = 'ALTER COLUMN %(column)s %(type)s'
+ allows_combined_alters = False
+ delete_column_string = 'ALTER TABLE %s DROP COLUMN %s;'
+
+ def create_table(self, table_name, fields):
+ # Tweak stuff as needed
+ for name,f in fields:
+ if isinstance(f, BooleanField):
+ if f.default == True:
+ f.default = 1
+ if f.default == False:
+ f.default = 0
+
+ # Run
+ generic.DatabaseOperations.create_table(self, table_name, fields)
diff --git a/lib/south/db/sqlite3.py b/lib/south/db/sqlite3.py
new file mode 100644
index 0000000..45e71c7
--- /dev/null
+++ b/lib/south/db/sqlite3.py
@@ -0,0 +1,50 @@
+
+from django.db import connection
+from south.db import generic
+
+class DatabaseOperations(generic.DatabaseOperations):
+
+ """
+ SQLite3 implementation of database operations.
+ """
+
+ # SQLite ignores foreign key constraints. I wish I could.
+ supports_foreign_keys = False
+
+ # You can't add UNIQUE columns with an ALTER TABLE.
+ def add_column(self, table_name, name, field, *args, **kwds):
+ # Run ALTER TABLE with no unique column
+ unique, field._unique, field.db_index = field.unique, False, False
+ # If it's not nullable, and has no default, raise an error (SQLite is picky)
+ if not field.null and (not field.has_default() or field.get_default() is None):
+ raise ValueError("You cannot add a null=False column without a default value.")
+ generic.DatabaseOperations.add_column(self, table_name, name, field, *args, **kwds)
+ # If it _was_ unique, make an index on it.
+ if unique:
+ self.create_index(table_name, [name], unique=True)
+
+ # SQLite doesn't have ALTER COLUMN
+ def alter_column(self, table_name, name, field, explicit_name=True):
+ """
+ Not supported under SQLite.
+ """
+ raise NotImplementedError("SQLite does not support altering columns.")
+
+ # Nor DROP COLUMN
+ def delete_column(self, table_name, name):
+ """
+ Not supported under SQLite.
+ """
+ raise NotImplementedError("SQLite does not support deleting columns.")
+
+ # Nor RENAME COLUMN
+ def rename_column(self, table_name, old, new):
+ """
+ Not supported under SQLite.
+ """
+ raise NotImplementedError("SQLite does not support renaming columns.")
+
+ # No cascades on deletes
+ def delete_table(self, table_name, cascade=True):
+ generic.DatabaseOperations.delete_table(self, table_name, False)
+
\ No newline at end of file
diff --git a/lib/django_evolution/tests/db/__init__.py b/lib/south/management/__init__.py
similarity index 100%
rename from lib/django_evolution/tests/db/__init__.py
rename to lib/south/management/__init__.py
diff --git a/lib/django_evolution/management/commands/__init__.py b/lib/south/management/commands/__init__.py
similarity index 100%
copy from lib/django_evolution/management/commands/__init__.py
copy to lib/south/management/commands/__init__.py
diff --git a/lib/south/management/commands/migrate.py b/lib/south/management/commands/migrate.py
new file mode 100644
index 0000000..72f9773
--- /dev/null
+++ b/lib/south/management/commands/migrate.py
@@ -0,0 +1,110 @@
+from django.core.management.base import BaseCommand
+from django.core.management.color import no_style
+from django.conf import settings
+from django.db import models
+from optparse import make_option
+from south import migration
+import sys
+
+class Command(BaseCommand):
+ option_list = BaseCommand.option_list + (
+ make_option('--all', action='store_true', dest='all_apps', default=False,
+ help='Run the specified migration for all apps.'),
+ make_option('--list', action='store_true', dest='list', default=False,
+ help='List migrations noting those that have been applied'),
+ make_option('--skip', action='store_true', dest='skip', default=False,
+ help='Will skip over out-of-order missing migrations'),
+ make_option('--merge', action='store_true', dest='merge', default=False,
+ help='Will run out-of-order missing migrations as they are - no rollbacks.'),
+ make_option('--no-initial-data', action='store_true', dest='no_initial_data', default=False,
+ help='Skips loading initial data if specified.'),
+ make_option('--fake', action='store_true', dest='fake', default=False,
+ help="Pretends to do the migrations, but doesn't actually execute them."),
+ make_option('--db-dry-run', action='store_true', dest='db_dry_run', default=False,
+ help="Doesn't execute the SQL generated by the db methods, and doesn't store a record that the migration(s) occurred. Useful to test migrations before applying them."),
+ )
+ if '--verbosity' not in [opt.get_opt_string() for opt in BaseCommand.option_list]:
+ option_list += (
+ make_option('--verbosity', action='store', dest='verbosity', default='1',
+ type='choice', choices=['0', '1', '2'],
+ help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
+ )
+ help = "Runs migrations for all apps."
+
+ def handle(self, app=None, target=None, skip=False, merge=False, backwards=False, fake=False, db_dry_run=False, list=False, **options):
+
+ # Work out what the resolve mode is
+ resolve_mode = merge and "merge" or (skip and "skip" or None)
+ # Turn on db debugging
+ from south.db import db
+ db.debug = True
+
+ # NOTE: THIS IS DUPLICATED FROM django.core.management.commands.syncdb
+ # This code imports any module named 'management' in INSTALLED_APPS.
+ # The 'management' module is the preferred way of listening to post_syncdb
+ # signals, and since we're sending those out with create_table migrations,
+ # we need apps to behave correctly.
+ for app_name in settings.INSTALLED_APPS:
+ try:
+ __import__(app_name + '.management', {}, {}, [''])
+ except ImportError, exc:
+ msg = exc.args[0]
+ if not msg.startswith('No module named') or 'management' not in msg:
+ raise
+ # END DJANGO DUPE CODE
+
+ # if all_apps flag is set, shift app over to target
+ if options['all_apps']:
+ target = app
+ app = None
+
+ # Migrate each app
+ if app:
+ apps = [migration.get_app(app.split(".")[-1])]
+ else:
+ apps = migration.get_migrated_apps()
+ silent = options.get('verbosity', 0) == 0
+
+ if list and apps:
+ list_migrations(apps)
+
+ if not list:
+ for app in apps:
+ result = migration.migrate_app(
+ app,
+ resolve_mode = resolve_mode,
+ target_name = target,
+ fake = fake,
+ db_dry_run = db_dry_run,
+ silent = silent,
+ load_inital_data = not options['no_initial_data'],
+ skip = skip,
+ )
+ if result is False:
+ return
+
+
+def list_migrations(apps):
+ from south.models import MigrationHistory
+ apps = list(apps)
+ names = [migration.get_app_name(app) for app in apps]
+ applied_migrations = MigrationHistory.objects.filter(app_name__in=names)
+ applied_migrations = ['%s.%s' % (mi.app_name,mi.migration) for mi in applied_migrations]
+
+ print
+ for app in apps:
+ print migration.get_app_name(app)
+ all_migrations = migration.get_migration_names(app)
+ for migration_name in all_migrations:
+ long_form = '%s.%s' % (migration.get_app_name(app),migration_name)
+ if long_form in applied_migrations:
+ print format_migration_list_item(migration_name)
+ else:
+ print format_migration_list_item(migration_name, applied=False)
+ print
+
+
+def format_migration_list_item(name, applied=True):
+ if applied:
+ return ' * %s' % name
+ return ' %s' % name
diff --git a/lib/south/management/commands/startmigration.py b/lib/south/management/commands/startmigration.py
new file mode 100644
index 0000000..b1ffbfd
--- /dev/null
+++ b/lib/south/management/commands/startmigration.py
@@ -0,0 +1,928 @@
+"""
+Startmigration command, version 2.
+"""
+
+import sys
+import os
+import re
+import string
+import random
+import inspect
+import parser
+from optparse import make_option
+
+from django.core.management.base import BaseCommand
+from django.core.management.color import no_style
+from django.db import models
+from django.db.models.fields.related import RECURSIVE_RELATIONSHIP_CONSTANT
+from django.contrib.contenttypes.generic import GenericRelation
+from django.db.models.fields import FieldDoesNotExist
+from django.conf import settings
+
+try:
+ set
+except NameError:
+ from sets import Set as set
+
+from south import migration, modelsparser
+
+
+class Command(BaseCommand):
+ option_list = BaseCommand.option_list + (
+ make_option('--model', action='append', dest='added_model_list', type='string',
+ help='Generate a Create Table migration for the specified model. Add multiple models to this migration with subsequent --model parameters.'),
+ make_option('--add-field', action='append', dest='added_field_list', type='string',
+ help='Generate an Add Column migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'),
+ make_option('--initial', action='store_true', dest='initial', default=False,
+ help='Generate the initial schema for the app.'),
+ make_option('--auto', action='store_true', dest='auto', default=False,
+ help='Attempt to automatically detect differences from the last migration.'),
+ make_option('--freeze', action='append', dest='freeze_list', type='string',
+ help='Freeze the specified model(s). Pass in either an app name (to freeze the whole app) or a single model, as appname.modelname.'),
+ )
+ help = "Creates a new template migration for the given app"
+
+ def handle(self, app=None, name="", added_model_list=None, added_field_list=None, initial=False, freeze_list=None, auto=False, **options):
+
+ # Any supposed lists that are None become empty lists
+ added_model_list = added_model_list or []
+ added_field_list = added_field_list or []
+
+ # Make sure options are compatable
+ if initial and (added_model_list or added_field_list or auto):
+ print "You cannot use --initial and other options together"
+ return
+ if auto and (added_model_list or added_field_list or initial):
+ print "You cannot use --auto and other options together"
+ return
+
+ # specify the default name 'initial' if a name wasn't specified and we're
+ # doing a migration for an entire app
+ if not name and initial:
+ name = 'initial'
+
+ # if not name, there's an error
+ if not name:
+ print "You must name this migration"
+ return
+
+ if not app:
+ print "Please provide an app in which to create the migration."
+ return
+
+ # Make sure the app is short form
+ app = app.split(".")[-1]
+
+ # See if the app exists
+ app_models_module = models.get_app(app)
+ if not app_models_module:
+ print "App '%s' doesn't seem to exist, isn't in INSTALLED_APPS, or has no models." % app
+ return
+
+ # If they've set SOUTH_AUTO_FREEZE_APP = True (or not set it - defaults to True)
+ if not hasattr(settings, 'SOUTH_AUTO_FREEZE_APP') or settings.SOUTH_AUTO_FREEZE_APP:
+ if freeze_list and app not in freeze_list:
+ freeze_list += [app]
+ else:
+ freeze_list = [app]
+
+ # Make the migrations directory if it's not there
+ app_module_path = app_models_module.__name__.split('.')[0:-1]
+ try:
+ app_module = __import__('.'.join(app_module_path), {}, {}, [''])
+ except ImportError:
+ print "Couldn't find path to App '%s'." % app
+ return
+
+ migrations_dir = os.path.join(
+ os.path.dirname(app_module.__file__),
+ "migrations",
+ )
+
+ # Make sure there's a migrations directory and __init__.py
+ if not os.path.isdir(migrations_dir):
+ print "Creating migrations directory at '%s'..." % migrations_dir
+ os.mkdir(migrations_dir)
+ init_path = os.path.join(migrations_dir, "__init__.py")
+ if not os.path.isfile(init_path):
+ # Touch the init py file
+ print "Creating __init__.py in '%s'..." % migrations_dir
+ open(init_path, "w").close()
+
+ # See what filename is next in line. We assume they use numbers.
+ migrations = migration.get_migration_names(migration.get_app(app))
+ highest_number = 0
+ for migration_name in migrations:
+ try:
+ number = int(migration_name.split("_")[0])
+ highest_number = max(highest_number, number)
+ except ValueError:
+ pass
+
+ # Make the new filename
+ new_filename = "%04i%s_%s.py" % (
+ highest_number + 1,
+ "".join([random.choice(string.letters.lower()) for i in range(0)]), # Possible random stuff insertion
+ name,
+ )
+
+ # Find the source file encoding, using PEP 0263's method
+ encoding = None
+ first_two_lines = inspect.getsourcelines(app_models_module)[0][:2]
+ for line in first_two_lines:
+ if re.search("coding[:=]\s*([-\w.]+)", line):
+ encoding = line
+
+ # Initialise forwards, backwards and models to blank things
+ forwards = ""
+ backwards = ""
+ frozen_models = {} # Frozen models, used by the Fake ORM
+ stub_models = {} # Frozen models, but only enough for relation ends (old mock models)
+ complete_apps = set() # Apps that are completely frozen - useable for diffing.
+
+ # Sets of actions
+ added_models = set()
+ deleted_models = [] # Special: contains instances _not_ string keys
+ added_fields = set()
+ deleted_fields = [] # Similar to deleted_models
+ changed_fields = [] # (mkey, fname, old_def, new_def)
+ added_uniques = set() # (mkey, field_names)
+ deleted_uniques = set() # (mkey, field_names)
+
+ # --initial means 'add all models in this app'.
+ if initial:
+ for model in models.get_models(app_models_module):
+ added_models.add("%s.%s" % (app, model._meta.object_name))
+
+ # Added models might be 'model' or 'app.model'.
+ for modelname in added_model_list:
+ if "." in modelname:
+ added_models.add(modelname)
+ else:
+ added_models.add("%s.%s" % (app, modelname))
+
+ # Fields need translating from "model.field" to (app.model, field)
+ for fielddef in added_field_list:
+ try:
+ modelname, fieldname = fielddef.split(".", 1)
+ except ValueError:
+ print "The field specification '%s' is not in modelname.fieldname format." % fielddef
+ else:
+ added_fields.add(("%s.%s" % (app, modelname), fieldname))
+
+ # Add anything frozen (I almost called the dict Iceland...)
+ if freeze_list:
+ for item in freeze_list:
+ if "." in item:
+ # It's a specific model
+ app_name, model_name = item.split(".", 1)
+ model = models.get_model(app_name, model_name)
+ if model is None:
+ print "Cannot find the model '%s' to freeze it." % item
+ return
+ frozen_models[model] = None
+ else:
+ # Get everything in an app!
+ frozen_models.update(dict([(x, None) for x in models.get_models(models.get_app(item))]))
+ complete_apps.add(item.split(".")[-1])
+ # For every model in the freeze list, add in dependency stubs
+ for model in frozen_models:
+ stub_models.update(model_dependencies(model))
+
+
+ ### Automatic Detection ###
+ if auto:
+ # Get the last migration for this app
+ last_models = None
+ app_module = migration.get_app(app)
+ if app_module is None:
+ print "You cannot use automatic detection on the first migration of an app. Try --initial instead."
+ else:
+ migrations = list(migration.get_migration_classes(app_module))
+ if not migrations:
+ print "You cannot use automatic detection on the first migration of an app. Try --initial instead."
+ else:
+ if hasattr(migrations[-1], "complete_apps") and \
+ app in migrations[-1].complete_apps:
+ last_models = migrations[-1].models
+ last_orm = migrations[-1].orm
+ else:
+ print "You cannot use automatic detection, since the previous migration does not have this whole app frozen.\nEither make migrations using '--freeze %s' or set 'SOUTH_AUTO_FREEZE_APP = True' in your settings.py." % app
+
+ # Right, did we manage to get the last set of models?
+ if last_models is None:
+ return
+
+ # Good! Get new things.
+ new = dict([
+ (model_key(model), prep_for_freeze(model))
+ for model in models.get_models(app_models_module)
+ ])
+ # And filter other apps out of the old
+ old = dict([
+ (key, fields)
+ for key, fields in last_models.items()
+ if key.split(".", 1)[0] == app
+ ])
+ am, dm, cm, af, df, cf = models_diff(old, new)
+
+ # For models that were there before and after, do a meta diff
+ was_meta_change = False
+ for mkey in cm:
+ au, du = meta_diff(old[mkey].get("Meta", {}), new[mkey].get("Meta", {}))
+ for entry in au:
+ added_uniques.add((mkey, entry))
+ was_meta_change = True
+ for entry in du:
+ deleted_uniques.add((mkey, entry))
+ was_meta_change = True
+
+ if not (am or dm or af or df or cf or was_meta_change):
+ print "Nothing seems to have changed."
+ return
+
+ # Add items to the todo lists
+ added_models.update(am)
+ added_fields.update(af)
+ changed_fields.extend(cf)
+
+ # Deleted models are from the past, and so we use instances instead.
+ for mkey in dm:
+ model = last_orm[mkey]
+ fields = last_models[mkey]
+ if "Meta" in fields:
+ del fields['Meta']
+ deleted_models.append((model, fields, last_models))
+
+ # For deleted fields, we tag the instance on the end too
+ for mkey, fname in df:
+ deleted_fields.append((
+ mkey,
+ fname,
+ last_orm[mkey]._meta.get_field_by_name(fname)[0],
+ last_models[mkey][fname],
+ last_models,
+ ))
+
+
+ ### Added model ###
+ for mkey in added_models:
+
+ print " + Added model '%s'" % (mkey,)
+
+ model = model_unkey(mkey)
+
+ # Add the model's dependencies to the stubs
+ stub_models.update(model_dependencies(model))
+ # Get the field definitions
+ fields = modelsparser.get_model_fields(model)
+ # Turn the (class, args, kwargs) format into a string
+ fields = triples_to_defs(app, model, fields)
+ # Make the code
+ forwards += CREATE_TABLE_SNIPPET % (
+ model._meta.object_name,
+ model._meta.db_table,
+ "\n ".join(["('%s', %s)," % (fname, fdef) for fname, fdef in fields.items()]),
+ model._meta.app_label,
+ model._meta.object_name,
+ )
+ # And the backwards code
+ backwards += DELETE_TABLE_SNIPPET % (
+ model._meta.object_name,
+ model._meta.db_table
+ )
+ # Now add M2M fields to be done
+ for field in model._meta.local_many_to_many:
+ added_fields.add((mkey, field.attname))
+ # And unique_togethers to be added
+ for ut in model._meta.unique_together:
+ added_uniques.add((mkey, tuple(ut)))
+
+
+ ### Added fields ###
+ for mkey, field_name in added_fields:
+
+ print " + Added field '%s.%s'" % (mkey, field_name)
+
+ # Get the model
+ model = model_unkey(mkey)
+ # Get the field
+ try:
+ field = model._meta.get_field(field_name)
+ except FieldDoesNotExist:
+ print "Model '%s' doesn't have a field '%s'" % (mkey, field_name)
+ return
+
+ # ManyToMany fields need special attention.
+ if isinstance(field, models.ManyToManyField):
+ if not field.rel.through: # Bug #120
+ # Add a stub model for each side
+ stub_models[model] = None
+ stub_models[field.rel.to] = None
+ # And a field defn, that's actually a table creation
+ forwards += CREATE_M2MFIELD_SNIPPET % (
+ model._meta.object_name,
+ field.name,
+ field.m2m_db_table(),
+ field.m2m_column_name()[:-3], # strip off the '_id' at the end
+ model._meta.object_name,
+ field.m2m_reverse_name()[:-3], # strip off the '_id' at the ned
+ field.rel.to._meta.object_name
+ )
+ backwards += DELETE_M2MFIELD_SNIPPET % (
+ model._meta.object_name,
+ field.name,
+ field.m2m_db_table()
+ )
+ continue
+
+ # GenericRelations need ignoring
+ if isinstance(field, GenericRelation):
+ continue
+
+ # Add any dependencies
+ stub_models.update(field_dependencies(field))
+
+ # Work out the definition
+ triple = remove_useless_attributes(
+ modelsparser.get_model_fields(model)[field_name])
+
+ field_definition = make_field_constructor(app, field, triple)
+
+ forwards += CREATE_FIELD_SNIPPET % (
+ model._meta.object_name,
+ field.name,
+ model._meta.db_table,
+ field.name,
+ field_definition,
+ )
+ backwards += DELETE_FIELD_SNIPPET % (
+ model._meta.object_name,
+ field.name,
+ model._meta.db_table,
+ field.column,
+ )
+
+
+ ### Deleted fields ###
+ for mkey, field_name, field, triple, last_models in deleted_fields:
+
+ print " - Deleted field '%s.%s'" % (mkey, field_name)
+
+ # Get the model
+ model = model_unkey(mkey)
+
+ # ManyToMany fields need special attention.
+ if isinstance(field, models.ManyToManyField):
+ # Add a stub model for each side, if they're not already there
+ # (if we just added old versions, we might override new ones)
+ if model not in stub_models:
+ stub_models[model] = last_models
+ if field.rel.to not in last_models:
+ stub_models[field.rel.to] = last_models
+ # And a field defn, that's actually a table deletion
+ forwards += DELETE_M2MFIELD_SNIPPET % (
+ model._meta.object_name,
+ field.name,
+ field.m2m_db_table()
+ )
+ backwards += CREATE_M2MFIELD_SNIPPET % (
+ model._meta.object_name,
+ field.name,
+ field.m2m_db_table(),
+ field.m2m_column_name()[:-3], # strip off the '_id' at the end
+ model._meta.object_name,
+ field.m2m_reverse_name()[:-3], # strip off the '_id' at the ned
+ field.rel.to._meta.object_name
+ )
+ continue
+
+ # Add any dependencies
+ deps = field_dependencies(field, last_models)
+ deps.update(stub_models)
+ stub_models = deps
+
+ # Work out the definition
+ triple = remove_useless_attributes(triple)
+ field_definition = make_field_constructor(app, field, triple)
+
+ forwards += DELETE_FIELD_SNIPPET % (
+ model._meta.object_name,
+ field.name,
+ model._meta.db_table,
+ field.column,
+ )
+ backwards += CREATE_FIELD_SNIPPET % (
+ model._meta.object_name,
+ field.name,
+ model._meta.db_table,
+ field.name,
+ field_definition,
+ )
+
+
+ ### Deleted model ###
+ for model, fields, last_models in deleted_models:
+
+ print " - Deleted model '%s.%s'" % (model._meta.app_label,model._meta.object_name)
+
+ # Add the model's dependencies to the stubs
+ deps = model_dependencies(model, last_models)
+ deps.update(stub_models)
+ stub_models = deps
+
+ # Turn the (class, args, kwargs) format into a string
+ fields = triples_to_defs(app, model, fields)
+
+ # Make the code
+ forwards += DELETE_TABLE_SNIPPET % (
+ model._meta.object_name,
+ model._meta.db_table
+ )
+ # And the backwards code
+ backwards += CREATE_TABLE_SNIPPET % (
+ model._meta.object_name,
+ model._meta.db_table,
+ "\n ".join(["('%s', %s)," % (fname, fdef) for fname, fdef in fields.items()]),
+ model._meta.app_label,
+ model._meta.object_name,
+ )
+
+
+ ### Changed fields ###
+ for mkey, field_name, old_triple, new_triple in changed_fields:
+
+ model = model_unkey(mkey)
+ old_def = triples_to_defs(app, model, {
+ field_name: old_triple,
+ })[field_name]
+ new_def = triples_to_defs(app, model, {
+ field_name: new_triple,
+ })[field_name]
+
+ # We need to create the field, to see if it needs _id, or if it's an M2M
+ field = model._meta.get_field_by_name(field_name)[0]
+
+ if hasattr(field, "m2m_db_table"):
+ # See if anything has ACTUALLY changed
+ if old_triple[1] != new_triple[1]:
+ print " ! Detected change to the target model of M2M field '%s.%s'. South can't handle this; leaving this change out." % (mkey, field_name)
+ continue
+
+ print " ~ Changed field '%s.%s'." % (mkey, field_name)
+
+ forwards += CHANGE_FIELD_SNIPPET % (
+ model._meta.object_name,
+ field_name,
+ model._meta.db_table,
+ field.get_attname(),
+ new_def,
+ )
+
+ backwards += CHANGE_FIELD_SNIPPET % (
+ model._meta.object_name,
+ field_name,
+ model._meta.db_table,
+ field.get_attname(),
+ old_def,
+ )
+
+
+ ### Added unique_togethers ###
+ for mkey, ut in added_uniques:
+
+ model = model_unkey(mkey)
+ print " + Added unique_together for [%s] on %s." % (", ".join(ut), model._meta.object_name)
+
+ cols = [get_field_column(model, f) for f in ut]
+
+ forwards += CREATE_UNIQUE_SNIPPET % (
+ ", ".join(ut),
+ model._meta.object_name,
+ model._meta.db_table,
+ cols,
+ )
+
+ backwards += DELETE_UNIQUE_SNIPPET % (
+ ", ".join(ut),
+ model._meta.object_name,
+ model._meta.db_table,
+ cols,
+ )
+
+
+ ### Deleted unique_togethers ###
+ for mkey, ut in deleted_uniques:
+
+ model = model_unkey(mkey)
+ print " - Deleted unique_together for [%s] on %s." % (", ".join(ut), model._meta.object_name)
+
+ forwards += DELETE_UNIQUE_SNIPPET % (
+ ", ".join(ut),
+ model._meta.object_name,
+ model._meta.db_table,
+ ut,
+ )
+
+ backwards += CREATE_UNIQUE_SNIPPET % (
+ ", ".join(ut),
+ model._meta.object_name,
+ model._meta.db_table,
+ ut,
+ )
+
+
+ # Default values for forwards/backwards
+ if (not forwards) and (not backwards):
+ forwards = '"Write your forwards migration here"'
+ backwards = '"Write your backwards migration here"'
+
+ all_models = {}
+
+ # Fill out frozen model definitions
+ for model, last_models in frozen_models.items():
+ all_models[model_key(model)] = prep_for_freeze(model, last_models)
+
+ # Fill out stub model definitions
+ for model, last_models in stub_models.items():
+ key = model_key(model)
+ if key in all_models:
+ continue # We'd rather use full models than stubs.
+ all_models[key] = prep_for_stub(model, last_models)
+
+ # Do some model cleanup, and warnings
+ for modelname, model in all_models.items():
+ for fieldname, fielddef in model.items():
+ # Remove empty-after-cleaning Metas.
+ if fieldname == "Meta" and not fielddef:
+ del model['Meta']
+ # Warn about undefined fields
+ elif fielddef is None:
+ print "WARNING: Cannot get definition for '%s' on '%s'. Please edit the migration manually." % (
+ fieldname,
+ modelname,
+ )
+ model[fieldname] = FIELD_NEEDS_DEF_SNIPPET
+
+ # Write the migration file
+ fp = open(os.path.join(migrations_dir, new_filename), "w")
+ fp.write(MIGRATION_SNIPPET % (
+ encoding or "", '.'.join(app_module_path),
+ forwards,
+ backwards,
+ pprint_frozen_models(all_models),
+ complete_apps and "complete_apps = [%s]" % (", ".join(map(repr, complete_apps))) or ""
+ ))
+ fp.close()
+ print "Created %s." % new_filename
+
+
+### Cleaning functions for freezing
+
+def prep_for_freeze(model, last_models=None):
+ if last_models:
+ fields = last_models[model_key(model)]
+ else:
+ fields = modelsparser.get_model_fields(model, m2m=True)
+ # Remove useless attributes (like 'choices')
+ for name, field in fields.items():
+ fields[name] = remove_useless_attributes(field)
+ # See if there's a Meta
+ if last_models:
+ meta = last_models[model_key(model)].get("Meta", {})
+ else:
+ meta = modelsparser.get_model_meta(model)
+ if meta:
+ fields['Meta'] = remove_useless_meta(meta)
+ return fields
+
+
+def prep_for_stub(model, last_models=None):
+ if last_models:
+ fields = last_models[model_key(model)]
+ else:
+ fields = modelsparser.get_model_fields(model)
+ # Now, take only the PK (and a 'we're a stub' field) and freeze 'em
+ pk = model._meta.pk.name
+ fields = {
+ pk: remove_useless_attributes(fields[pk]),
+ "_stub": True,
+ }
+ # Meta is important too.
+ if last_models:
+ meta = last_models[model_key(model)].get("Meta", {})
+ else:
+ meta = modelsparser.get_model_meta(model)
+ if meta:
+ fields['Meta'] = remove_useless_meta(meta)
+ return fields
+
+
+### Module handling functions
+
+def model_key(model):
+ "For a given model, return 'appname.modelname'."
+ return ("%s.%s" % (model._meta.app_label, model._meta.object_name)).lower()
+
+def model_unkey(key):
+ "For 'appname.modelname', return the model."
+ app, modelname = key.split(".", 1)
+ model = models.get_model(app, modelname)
+ if not model:
+ print "Couldn't find model '%s' in app '%s'" % (modelname, app)
+ sys.exit(1)
+ return model
+
+### Dependency resolvers
+
+def model_dependencies(model, last_models=None):
+ """
+ Returns a set of models this one depends on to be defined; things like
+ OneToOneFields as ID, ForeignKeys everywhere, etc.
+ """
+ depends = {}
+ for field in model._meta.fields + model._meta.many_to_many:
+ depends.update(field_dependencies(field, last_models))
+ return depends
+
+def stub_model_dependencies(model, last_models=None):
+ """
+ Returns a set of models this one depends on to be defined as a stub model
+ (i.e. deps of the PK).
+ """
+ return field_dependencies(model._meta.pk, last_models)
+
+def field_dependencies(field, last_models=None):
+ depends = {}
+ if isinstance(field, (models.OneToOneField, models.ForeignKey, models.ManyToManyField)):
+ depends[field.rel.to] = last_models
+ depends.update(stub_model_dependencies(field.rel.to, last_models))
+ return depends
+
+
+
+### Prettyprinters
+
+def pprint_frozen_models(models):
+ return "{\n %s\n }" % ",\n ".join([
+ "%r: %s" % (name, pprint_fields(fields))
+ for name, fields in models.items()
+ ])
+
+def pprint_fields(fields):
+ return "{\n %s\n }" % ",\n ".join([
+ "%r: %r" % (name, defn)
+ for name, defn in sorted(fields.items())
+ ])
+
+
+### Output sanitisers
+
+
+USELESS_KEYWORDS = ["choices", "help_text"]
+USELESS_DB_KEYWORDS = ["related_name", "upload_to"] # Important for ORM, not for DB.
+
+def remove_useless_attributes(field, db=False):
+ "Removes useless (for database) attributes from the field's defn."
+ keywords = db and USELESS_DB_KEYWORDS or USELESS_KEYWORDS
+ if field:
+ for name in keywords:
+ if name in field[2]:
+ del field[2][name]
+ return field
+
+USELESS_META = ["verbose_name", "verbose_name_plural"]
+def remove_useless_meta(meta):
+ "Removes useless (for database) attributes from the table's meta."
+ if meta:
+ for name in USELESS_META:
+ if name in meta:
+ del meta[name]
+ return meta
+
+
+### Turns (class, args, kwargs) triples into function defs.
+
+def make_field_constructor(default_app, field, triple):
+ """
+ Given the defualt app, the field class,
+ and the defn triple (or string), make the defition string.
+ """
+ # It might be a defn string already...
+ if isinstance(triple, (str, unicode)):
+ return triple
+ # OK, do it the hard way
+ if hasattr(field, "rel") and hasattr(field.rel, "to") and field.rel.to:
+ rel_to = field.rel.to
+ else:
+ rel_to = None
+ args = [poss_ormise(default_app, rel_to, arg) for arg in triple[1]]
+ kwds = ["%s=%s" % (k, poss_ormise(default_app, rel_to, v)) for k,v in triple[2].items()]
+ return "%s(%s)" % (triple[0], ", ".join(args+kwds))
+
+QUOTES = ['"""', "'''", '"', "'"]
+
+def poss_ormise(default_app, rel_to, arg):
+ """
+ Given the name of something that needs orm. stuck on the front and
+ a python eval-able string, possibly add orm. to it.
+ """
+ orig_arg = arg
+ # If it's not a relative field, short-circuit out
+ if not rel_to:
+ return arg
+ # Get the name of the other model
+ rel_name = rel_to._meta.object_name
+ # Is it in a different app? If so, use proper addressing.
+ if rel_to._meta.app_label != default_app:
+ real_name = "orm['%s.%s']" % (rel_to._meta.app_label, rel_name)
+ else:
+ real_name = "orm.%s" % rel_name
+ # If it's surrounded by quotes, get rid of those
+ for quote_type in QUOTES:
+ l = len(quote_type)
+ if arg[:l] == quote_type and arg[-l:] == quote_type:
+ arg = arg[l:-l]
+ break
+ # Now see if we can replace it.
+ if arg.lower() == rel_name.lower():
+ return real_name
+ # Or perhaps it's app.model?
+ if arg.lower() == rel_to._meta.app_label.lower() + "." + rel_name.lower():
+ return real_name
+ # Or perhaps it's 'self'?
+ if arg == RECURSIVE_RELATIONSHIP_CONSTANT:
+ return real_name
+ return orig_arg
+
+
+### Diffing functions between sets of models
+
+def models_diff(old, new):
+ """
+ Returns the difference between the old and new sets of models as a 5-tuple:
+ added_models, deleted_models, added_fields, deleted_fields, changed_fields
+ """
+
+ added_models = set()
+ deleted_models = set()
+ ignored_models = set() # Stubs for backwards
+ continued_models = set() # Models that existed before and after
+ added_fields = set()
+ deleted_fields = set()
+ changed_fields = []
+
+ # See if anything's vanished
+ for key in old:
+ if key not in new:
+ if "_stub" not in old[key]:
+ deleted_models.add(key)
+ else:
+ ignored_models.add(key)
+
+ # Or appeared
+ for key in new:
+ if key not in old:
+ added_models.add(key)
+
+ # Now, for every model that's stayed the same, check its fields.
+ for key in old:
+ if key not in deleted_models and key not in ignored_models:
+ continued_models.add(key)
+ still_there = set()
+ # Find fields that have vanished.
+ for fieldname in old[key]:
+ if fieldname != "Meta" and fieldname not in new[key]:
+ deleted_fields.add((key, fieldname))
+ else:
+ still_there.add(fieldname)
+ # And ones that have appeared
+ for fieldname in new[key]:
+ if fieldname != "Meta" and fieldname not in old[key]:
+ added_fields.add((key, fieldname))
+ # For the ones that exist in both models, see if they were changed
+ for fieldname in still_there:
+ if fieldname != "Meta" and \
+ remove_useless_attributes(new[key][fieldname], True) != \
+ remove_useless_attributes(old[key][fieldname], True):
+ changed_fields.append((key, fieldname, old[key][fieldname], new[key][fieldname]))
+
+ return added_models, deleted_models, continued_models, added_fields, deleted_fields, changed_fields
+
+
+def meta_diff(old, new):
+ """
+ Diffs the two provided Meta definitions (dicts).
+ """
+
+ # First, diff unique_together
+ old_unique_together = eval(old.get('unique_together', "[]"))
+ new_unique_together = eval(new.get('unique_together', "[]"))
+
+ added_uniques = set()
+ removed_uniques = set()
+
+ for entry in old_unique_together:
+ if entry not in new_unique_together:
+ removed_uniques.add(tuple(entry))
+
+ for entry in new_unique_together:
+ if entry not in old_unique_together:
+ added_uniques.add(tuple(entry))
+
+ return added_uniques, removed_uniques
+
+
+### Used to work out what columns any fields affect ###
+
+def get_field_column(model, field_name):
+ return model._meta.get_field_by_name(field_name)[0].column
+
+
+### Creates SQL snippets for various common operations
+
+
+def triples_to_defs(app, model, fields):
+ # Turn the (class, args, kwargs) format into a string
+ for field, triple in fields.items():
+ triple = remove_useless_attributes(triple)
+ if triple is None:
+ print "WARNING: Cannot get definition for '%s' on '%s'. Please edit the migration manually." % (
+ field,
+ model_key(model),
+ )
+ fields[field] = FIELD_NEEDS_DEF_SNIPPET
+ else:
+ fields[field] = make_field_constructor(
+ app,
+ model._meta.get_field_by_name(field)[0],
+ triple,
+ )
+ return fields
+
+
+### Various code snippets we need to use
+
+MIGRATION_SNIPPET = """%s
+from south.db import db
+from django.db import models
+from %s.models import *
+
+class Migration:
+
+ def forwards(self, orm):
+ %s
+
+
+ def backwards(self, orm):
+ %s
+
+
+ models = %s
+
+ %s
+"""
+CREATE_TABLE_SNIPPET = '''
+ # Adding model '%s'
+ db.create_table(%r, (
+ %s
+ ))
+ db.send_create_signal(%r, [%r])
+ '''
+DELETE_TABLE_SNIPPET = '''
+ # Deleting model '%s'
+ db.delete_table(%r)
+ '''
+CREATE_FIELD_SNIPPET = '''
+ # Adding field '%s.%s'
+ db.add_column(%r, %r, %s)
+ '''
+DELETE_FIELD_SNIPPET = '''
+ # Deleting field '%s.%s'
+ db.delete_column(%r, %r)
+ '''
+CHANGE_FIELD_SNIPPET = '''
+ # Changing field '%s.%s'
+ db.alter_column(%r, %r, %s)
+ '''
+CREATE_M2MFIELD_SNIPPET = '''
+ # Adding ManyToManyField '%s.%s'
+ db.create_table('%s', (
+ ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
+ ('%s', models.ForeignKey(%s, null=False)),
+ ('%s', models.ForeignKey(%s, null=False))
+ ))
+ '''
+DELETE_M2MFIELD_SNIPPET = '''
+ # Dropping ManyToManyField '%s.%s'
+ db.delete_table('%s')
+ '''
+CREATE_UNIQUE_SNIPPET = '''
+ # Creating unique_together for [%s] on %s.
+ db.create_unique(%r, %r)
+ '''
+DELETE_UNIQUE_SNIPPET = '''
+ # Deleting unique_together for [%s] on %s.
+ db.delete_unique(%r, %r)
+ '''
+FIELD_NEEDS_DEF_SNIPPET = "<< PUT FIELD DEFINITION HERE >>"
\ No newline at end of file
diff --git a/lib/south/management/commands/syncdb.py b/lib/south/management/commands/syncdb.py
new file mode 100644
index 0000000..7b160c2
--- /dev/null
+++ b/lib/south/management/commands/syncdb.py
@@ -0,0 +1,70 @@
+from django.core.management.base import NoArgsCommand, BaseCommand
+from django.core.management.color import no_style
+from django.utils.datastructures import SortedDict
+from optparse import make_option
+from south import migration
+from django.core.management.commands import syncdb
+from django.conf import settings
+from django.db import models
+from django.db.models.loading import cache
+from django.core import management
+import sys
+
+def get_app_name(app):
+ return '.'.join( app.__name__.split('.')[0:-1] )
+
+class Command(NoArgsCommand):
+ option_list = NoArgsCommand.option_list + (
+ make_option('--noinput', action='store_false', dest='interactive', default=True,
+ help='Tells Django to NOT prompt the user for input of any kind.'),
+ make_option('--migrate', action='store_true', dest='migrate', default=False,
+ help='Tells South to also perform migrations after the sync. Default for during testing, and other internal calls.'),
+ )
+ if '--verbosity' not in [opt.get_opt_string() for opt in BaseCommand.option_list]:
+ option_list += (
+ make_option('--verbosity', action='store', dest='verbosity', default='1',
+ type='choice', choices=['0', '1', '2'],
+ help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
+ )
+ help = "Create the database tables for all apps in INSTALLED_APPS whose tables haven't already been created, except those which use migrations."
+
+ def handle_noargs(self, **options):
+ # Work out what uses migrations and so doesn't need syncing
+ apps_needing_sync = []
+ apps_migrated = []
+ for app in models.get_apps():
+ app_name = get_app_name(app)
+ migrations = migration.get_app(app)
+ if migrations is None:
+ apps_needing_sync.append(app_name)
+ else:
+ # This is a migrated app, leave it
+ apps_migrated.append(app_name)
+ verbosity = int(options.get('verbosity', 0))
+ # Run syncdb on only the ones needed
+ if verbosity > 0:
+ print "Syncing..."
+ old_installed, settings.INSTALLED_APPS = settings.INSTALLED_APPS, apps_needing_sync
+ old_app_store, cache.app_store = cache.app_store, SortedDict([
+ (k, v) for (k, v) in cache.app_store.items()
+ if get_app_name(k) in apps_needing_sync
+ ])
+ syncdb.Command().execute(**options)
+ settings.INSTALLED_APPS = old_installed
+ cache.app_store = old_app_store
+ # Migrate if needed
+ if options.get('migrate', True):
+ if verbosity > 0:
+ print "Migrating..."
+ management.call_command('migrate', **options)
+ # Be obvious about what we did
+ if verbosity > 0:
+ print "\nSynced:\n > %s" % "\n > ".join(apps_needing_sync)
+
+ if options.get('migrate', True):
+ if verbosity > 0:
+ print "\nMigrated:\n - %s" % "\n - ".join(apps_migrated)
+ else:
+ if verbosity > 0:
+ print "\nNot synced (use migrations):\n - %s" % "\n - ".join(apps_migrated)
+ print "(use ./manage.py migrate to migrate these)"
diff --git a/lib/south/management/commands/test.py b/lib/south/management/commands/test.py
new file mode 100644
index 0000000..808107d
--- /dev/null
+++ b/lib/south/management/commands/test.py
@@ -0,0 +1,14 @@
+from django.core import management
+from django.core.management.commands import test
+from django.core.management.commands import syncdb
+from django.conf import settings
+
+class Command(test.Command):
+
+ def handle(self, *args, **kwargs):
+ if not hasattr(settings, "SOUTH_TESTS_MIGRATE") or not settings.SOUTH_TESTS_MIGRATE:
+ # point at the core syncdb command when creating tests
+ # tests should always be up to date with the most recent model structure
+ management.get_commands()
+ management._commands['syncdb'] = 'django.core'
+ super(Command, self).handle(*args, **kwargs)
\ No newline at end of file
diff --git a/lib/south/migration.py b/lib/south/migration.py
new file mode 100644
index 0000000..b856a77
--- /dev/null
+++ b/lib/south/migration.py
@@ -0,0 +1,553 @@
+
+import datetime
+import os
+import sys
+import traceback
+import inspect
+from django.conf import settings
+from django.db import models
+from django.core.exceptions import ImproperlyConfigured
+from django.core.management import call_command
+from models import MigrationHistory
+from south.db import db
+from south.orm import FakeORM
+
+
+def get_app(app):
+ """
+ Returns the migrations module for the given app model name/module, or None
+ if it does not use migrations.
+ """
+ if isinstance(app, (str, unicode)):
+ # If it's a string, use the models module
+ app = models.get_app(app)
+ mod = __import__(app.__name__[:-7], {}, {}, ['migrations'])
+ if hasattr(mod, 'migrations'):
+ return getattr(mod, 'migrations')
+
+
+def get_migrated_apps():
+ """
+ Returns all apps with migrations.
+ """
+ for mapp in models.get_apps():
+ app = get_app(mapp)
+ if app:
+ yield app
+
+
+def get_app_name(app):
+ """
+ Returns the _internal_ app name for the given app module.
+ i.e. for <module django.contrib.auth.models> will return 'auth'
+ """
+ return app.__name__.split('.')[-2]
+
+
+def get_app_fullname(app):
+ """
+ Returns the full python name of an app - e.g. django.contrib.auth
+ """
+ return app.__name__[:-11]
+
+
+def short_from_long(app_name):
+ return app_name.split(".")[-1]
+
+
+def get_migration_names(app):
+ """
+ Returns a list of migration file names for the given app.
+ """
+ return sorted([
+ filename[:-3]
+ for filename in os.listdir(os.path.dirname(app.__file__))
+ if filename.endswith(".py") and filename != "__init__.py" and not filename.startswith(".")
+ ])
+
+
+def get_migration_classes(app):
+ """
+ Returns a list of migration classes (one for each migration) for the app.
+ """
+ for name in get_migration_names(app):
+ yield get_migration(app, name)
+
+
+def get_migration(app, name):
+ """
+ Returns the migration class implied by 'name'.
+ """
+ try:
+ module = __import__(app.__name__ + "." + name, '', '', ['Migration'])
+ migclass = module.Migration
+ migclass.orm = FakeORM(migclass, get_app_name(app))
+ module._ = lambda x: x # Fake i18n
+ return migclass
+ except ImportError:
+ print " ! Migration %s:%s probably doesn't exist." % (get_app_name(app), name)
+ print " - Traceback:"
+ raise
+ except Exception, e:
+ print "While loading migration '%s.%s':" % (get_app_name(app), name)
+ raise
+
+
+def all_migrations():
+ return dict([
+ (app, dict([(name, get_migration(app, name)) for name in get_migration_names(app)]))
+ for app in get_migrated_apps()
+ ])
+
+
+def dependency_tree():
+ tree = all_migrations()
+
+ # Annotate tree with 'backwards edges'
+ for app, classes in tree.items():
+ for name, cls in classes.items():
+ cls.needs = []
+ if not hasattr(cls, "needed_by"):
+ cls.needed_by = []
+ if hasattr(cls, "depends_on"):
+ for dapp, dname in cls.depends_on:
+ dapp = get_app(dapp)
+ if dapp not in tree:
+ print "Migration %s in app %s depends on unmigrated app %s." % (
+ name,
+ get_app_name(app),
+ dapp,
+ )
+ sys.exit(1)
+ if dname not in tree[dapp]:
+ print "Migration %s in app %s depends on nonexistent migration %s in app %s." % (
+ name,
+ get_app_name(app),
+ dname,
+ get_app_name(dapp),
+ )
+ sys.exit(1)
+ cls.needs.append((dapp, dname))
+ if not hasattr(tree[dapp][dname], "needed_by"):
+ tree[dapp][dname].needed_by = []
+ tree[dapp][dname].needed_by.append((app, name))
+
+ # Sanity check whole tree
+ for app, classes in tree.items():
+ for name, cls in classes.items():
+ cls.dependencies = dependencies(tree, app, name)
+
+ return tree
+
+
+def nice_trace(trace):
+ return " -> ".join([str((get_app_name(a), n)) for a, n in trace])
+
+
+def dependencies(tree, app, name, trace=[]):
+ # Copy trace to stop pass-by-ref problems
+ trace = trace[:]
+ # Sanity check
+ for papp, pname in trace:
+ if app == papp:
+ if pname == name:
+ print "Found circular dependency: %s" % nice_trace(trace + [(app,name)])
+ sys.exit(1)
+ else:
+ # See if they depend in the same app the wrong way
+ migrations = get_migration_names(app)
+ if migrations.index(name) > migrations.index(pname):
+ print "Found a lower migration (%s) depending on a higher migration (%s) in the same app (%s)." % (pname, name, get_app_name(app))
+ print "Path: %s" % nice_trace(trace + [(app,name)])
+ sys.exit(1)
+ # Get the dependencies of a migration
+ deps = []
+ migration = tree[app][name]
+ for dapp, dname in migration.needs:
+ deps.extend(
+ dependencies(tree, dapp, dname, trace+[(app,name)])
+ )
+ return deps
+
+
+def remove_duplicates(l):
+ m = []
+ for x in l:
+ if x not in m:
+ m.append(x)
+ return m
+
+
+def needed_before_forwards(tree, app, name, sameapp=True):
+ """
+ Returns a list of migrations that must be applied before (app, name),
+ in the order they should be applied.
+ Used to make sure a migration can be applied (and to help apply up to it).
+ """
+ app_migrations = get_migration_names(app)
+ needed = []
+ if sameapp:
+ for aname in app_migrations[:app_migrations.index(name)]:
+ needed += needed_before_forwards(tree, app, aname, False)
+ needed += [(app, aname)]
+ for dapp, dname in tree[app][name].needs:
+ needed += needed_before_forwards(tree, dapp, dname)
+ needed += [(dapp, dname)]
+ return remove_duplicates(needed)
+
+
+def needed_before_backwards(tree, app, name, sameapp=True):
+ """
+ Returns a list of migrations that must be unapplied before (app, name) is,
+ in the order they should be unapplied.
+ Used to make sure a migration can be unapplied (and to help unapply up to it).
+ """
+ app_migrations = get_migration_names(app)
+ needed = []
+ if sameapp:
+ for aname in reversed(app_migrations[app_migrations.index(name)+1:]):
+ needed += needed_before_backwards(tree, app, aname, False)
+ needed += [(app, aname)]
+ for dapp, dname in tree[app][name].needed_by:
+ needed += needed_before_backwards(tree, dapp, dname)
+ needed += [(dapp, dname)]
+ return remove_duplicates(needed)
+
+
+def run_migrations(toprint, torun, recorder, app, migrations, fake=False, db_dry_run=False, silent=False):
+ """
+ Runs the specified migrations forwards/backwards, in order.
+ """
+ for migration in migrations:
+ app_name = get_app_name(app)
+ if not silent:
+ print toprint % (app_name, migration)
+ klass = get_migration(app, migration)
+
+ if fake:
+ if not silent:
+ print " (faked)"
+ else:
+
+ runfunc = getattr(klass(), torun)
+ args = inspect.getargspec(runfunc)
+
+ # If the database doesn't support running DDL inside a transaction
+ # *cough*MySQL*cough* then do a dry run first.
+ if not db.has_ddl_transactions or db_dry_run:
+ if not (hasattr(klass, "no_dry_run") and klass.no_dry_run):
+ db.dry_run = True
+ db.debug, old_debug = False, db.debug
+ pending_creates = db.get_pending_creates()
+ try:
+ if len(args[0]) == 1: # They don't want an ORM param
+ runfunc()
+ else:
+ runfunc(klass.orm)
+ except:
+ traceback.print_exc()
+ print " ! Error found during dry run of migration! Aborting."
+ return False
+ db.debug = old_debug
+ db.clear_run_data(pending_creates)
+ db.dry_run = False
+ elif db_dry_run:
+ print " - Migration '%s' is marked for no-dry-run."
+ # If they really wanted to dry-run, then quit!
+ if db_dry_run:
+ return
+
+ if db.has_ddl_transactions:
+ db.start_transaction()
+ try:
+ if len(args[0]) == 1: # They don't want an ORM param
+ runfunc()
+ else:
+ runfunc(klass.orm)
+ db.execute_deferred_sql()
+ except:
+ if db.has_ddl_transactions:
+ db.rollback_transaction()
+ raise
+ else:
+ traceback.print_exc()
+ print " ! Error found during real run of migration! Aborting."
+ print
+ print " ! Since you have a database that does not support running"
+ print " ! schema-altering statements in transactions, we have had to"
+ print " ! leave it in an interim state between migrations."
+ if torun == "forwards":
+ print
+ print " ! You *might* be able to recover with:"
+ db.debug = db.dry_run = True
+ if len(args[0]) == 1:
+ klass().backwards()
+ else:
+ klass().backwards(klass.orm)
+ print
+ print " ! The South developers regret this has happened, and would"
+ print " ! like to gently persuade you to consider a slightly"
+ print " ! easier-to-deal-with DBMS."
+ return False
+ else:
+ if db.has_ddl_transactions:
+ db.commit_transaction()
+
+ if not db_dry_run:
+ # Record us as having done this
+ recorder(app_name, migration)
+
+
+def run_forwards(app, migrations, fake=False, db_dry_run=False, silent=False):
+ """
+ Runs the specified migrations forwards, in order.
+ """
+
+ def record(app_name, migration):
+ # Record us as having done this
+ record = MigrationHistory.for_migration(app_name, migration)
+ record.applied = datetime.datetime.utcnow()
+ record.save()
+
+ return run_migrations(
+ toprint = " > %s: %s",
+ torun = "forwards",
+ recorder = record,
+ app = app,
+ migrations = migrations,
+ fake = fake,
+ db_dry_run = db_dry_run,
+ silent = silent,
+ )
+
+
+def run_backwards(app, migrations, ignore=[], fake=False, db_dry_run=False, silent=False):
+ """
+ Runs the specified migrations backwards, in order, skipping those
+ migrations in 'ignore'.
+ """
+
+ def record(app_name, migration):
+ # Record us as having not done this
+ record = MigrationHistory.for_migration(app_name, migration)
+ record.delete()
+
+ return run_migrations(
+ toprint = " < %s: %s",
+ torun = "backwards",
+ recorder = record,
+ app = app,
+ migrations = [x for x in migrations if x not in ignore],
+ fake = fake,
+ db_dry_run = db_dry_run,
+ silent = silent,
+ )
+
+
+def right_side_of(x, y):
+ return left_side_of(reversed(x), reversed(y))
+
+
+def left_side_of(x, y):
+ return list(y)[:len(x)] == list(x)
+
+
+def forwards_problems(tree, forwards, done, silent=False):
+ problems = []
+ for app, name in forwards:
+ if (app, name) not in done:
+ for dapp, dname in needed_before_backwards(tree, app, name):
+ if (dapp, dname) in done:
+ if not silent:
+ print " ! Migration (%s, %s) should not have been applied before (%s, %s) but was." % (get_app_name(dapp), dname, get_app_name(app), name)
+ problems.append(((app, name), (dapp, dname)))
+ return problems
+
+
+
+def backwards_problems(tree, backwards, done, silent=False):
+ problems = []
+ for app, name in backwards:
+ if (app, name) in done:
+ for dapp, dname in needed_before_forwards(tree, app, name):
+ if (dapp, dname) not in done:
+ if not silent:
+ print " ! Migration (%s, %s) should have been applied before (%s, %s) but wasn't." % (get_app_name(dapp), dname, get_app_name(app), name)
+ problems.append(((app, name), (dapp, dname)))
+ return problems
+
+
+def migrate_app(app, target_name=None, resolve_mode=None, fake=False, db_dry_run=False, yes=False, silent=False, load_inital_data=False, skip=False):
+
+ app_name = get_app_name(app)
+
+ db.debug = not silent
+
+ # If any of their app names in the DB contain a ., they're 0.2 or below, so migrate em
+ longuns = MigrationHistory.objects.filter(app_name__contains=".")
+ if longuns:
+ for mh in longuns:
+ mh.app_name = short_from_long(mh.app_name)
+ mh.save()
+ if not silent:
+ print "- Updated your South 0.2 database."
+
+ # Find out what delightful migrations we have
+ tree = dependency_tree()
+ migrations = get_migration_names(app)
+
+ # If there aren't any, quit quizically
+ if not migrations:
+ if not silent:
+ print "? You have no migrations for the '%s' app. You might want some." % app_name
+ return
+
+ if target_name not in migrations and target_name not in ["zero", None]:
+ matches = [x for x in migrations if x.startswith(target_name)]
+ if len(matches) == 1:
+ target = migrations.index(matches[0]) + 1
+ if not silent:
+ print " - Soft matched migration %s to %s." % (
+ target_name,
+ matches[0]
+ )
+ target_name = matches[0]
+ elif len(matches) > 1:
+ if not silent:
+ print " - Prefix %s matches more than one migration:" % target_name
+ print " " + "\n ".join(matches)
+ return
+ else:
+ if not silent:
+ print " ! '%s' is not a migration." % target_name
+ return
+
+ # Check there's no strange ones in the database
+ ghost_migrations = []
+ for m in MigrationHistory.objects.filter(applied__isnull = False):
+ try:
+ if get_app(m.app_name) not in tree or m.migration not in tree[get_app(m.app_name)]:
+ ghost_migrations.append(m)
+ except ImproperlyConfigured:
+ pass
+
+ if ghost_migrations:
+ if not silent:
+ print " ! These migrations are in the database but not on disk:"
+ print " - " + "\n - ".join(["%s: %s" % (x.app_name, x.migration) for x in ghost_migrations])
+ print " ! I'm not trusting myself; fix this yourself by fiddling"
+ print " ! with the south_migrationhistory table."
+ return
+
+ # Say what we're doing
+ if not silent:
+ print "Running migrations for %s:" % app_name
+
+ # Get the forwards and reverse dependencies for this target
+ if target_name == None:
+ target_name = migrations[-1]
+ if target_name == "zero":
+ forwards = []
+ backwards = needed_before_backwards(tree, app, migrations[0]) + [(app, migrations[0])]
+ else:
+ forwards = needed_before_forwards(tree, app, target_name) + [(app, target_name)]
+ # When migrating backwards we want to remove up to and including
+ # the next migration up in this app (not the next one, that includes other apps)
+ try:
+ migration_before_here = migrations[migrations.index(target_name)+1]
+ backwards = needed_before_backwards(tree, app, migration_before_here) + [(app, migration_before_here)]
+ except IndexError:
+ backwards = []
+
+ # Get the list of currently applied migrations from the db
+ current_migrations = []
+ for m in MigrationHistory.objects.filter(applied__isnull = False):
+ try:
+ current_migrations.append((get_app(m.app_name), m.migration))
+ except ImproperlyConfigured:
+ pass
+
+ direction = None
+ bad = False
+
+ # Work out the direction
+ applied_for_this_app = list(MigrationHistory.objects.filter(app_name=app_name, applied__isnull=False).order_by("migration"))
+ if target_name == "zero":
+ direction = -1
+ elif not applied_for_this_app:
+ direction = 1
+ elif migrations.index(target_name) > migrations.index(applied_for_this_app[-1].migration):
+ direction = 1
+ elif migrations.index(target_name) < migrations.index(applied_for_this_app[-1].migration):
+ direction = -1
+ else:
+ direction = None
+
+ # Is the whole forward branch applied?
+ missing = [step for step in forwards if step not in current_migrations]
+ # If they're all applied, we only know it's not backwards
+ if not missing:
+ direction = None
+ # If the remaining migrations are strictly a right segment of the forwards
+ # trace, we just need to go forwards to our target (and check for badness)
+ else:
+ problems = forwards_problems(tree, forwards, current_migrations, silent=silent)
+ if problems:
+ bad = True
+ direction = 1
+
+ # What about the whole backward trace then?
+ if not bad:
+ missing = [step for step in backwards if step not in current_migrations]
+ # If they're all missing, stick with the forwards decision
+ if missing == backwards:
+ pass
+ # If what's missing is a strict left segment of backwards (i.e.
+ # all the higher migrations) then we need to go backwards
+ else:
+ problems = backwards_problems(tree, backwards, current_migrations, silent=silent)
+ if problems:
+ bad = True
+ direction = -1
+
+ if bad and resolve_mode not in ['merge'] and not skip:
+ if not silent:
+ print " ! Inconsistent migration history"
+ print " ! The following options are available:"
+ print " --merge: will just attempt the migration ignoring any potential dependency conflicts."
+ sys.exit(1)
+
+ if direction == 1:
+ if not silent:
+ print " - Migrating forwards to %s." % target_name
+ try:
+ for mapp, mname in forwards:
+ if (mapp, mname) not in current_migrations:
+ result = run_forwards(mapp, [mname], fake=fake, db_dry_run=db_dry_run, silent=silent)
+ if result is False: # The migrations errored, but nicely.
+ return False
+ finally:
+ # Call any pending post_syncdb signals
+ db.send_pending_create_signals()
+ # Now load initial data, only if we're really doing things and ended up at current
+ if not fake and not db_dry_run and load_inital_data and target_name == migrations[-1]:
+ print " - Loading initial data for %s." % app_name
+ # Override Django's get_apps call temporarily to only load from the
+ # current app
+ old_get_apps, models.get_apps = (
+ models.get_apps,
+ lambda: [models.get_app(get_app_name(app))],
+ )
+ # Load the initial fixture
+ call_command('loaddata', 'initial_data', verbosity=1)
+ # Un-override
+ models.get_apps = old_get_apps
+ elif direction == -1:
+ if not silent:
+ print " - Migrating backwards to just after %s." % target_name
+ for mapp, mname in backwards:
+ if (mapp, mname) in current_migrations:
+ run_backwards(mapp, [mname], fake=fake, db_dry_run=db_dry_run, silent=silent)
+ else:
+ if not silent:
+ print "- Nothing to migrate."
diff --git a/lib/south/models.py b/lib/south/models.py
new file mode 100644
index 0000000..e95c79a
--- /dev/null
+++ b/lib/south/models.py
@@ -0,0 +1,19 @@
+from django.db import models
+
+class MigrationHistory(models.Model):
+ app_name = models.CharField(max_length=255)
+ migration = models.CharField(max_length=255)
+ applied = models.DateTimeField(blank=True, null=True)
+
+ @classmethod
+ def for_migration(cls, app_name, migration):
+ try:
+ return cls.objects.get(
+ app_name = app_name,
+ migration = migration,
+ )
+ except cls.DoesNotExist:
+ return cls(
+ app_name = app_name,
+ migration = migration,
+ )
\ No newline at end of file
diff --git a/lib/south/modelsparser.py b/lib/south/modelsparser.py
new file mode 100644
index 0000000..b7f972a
--- /dev/null
+++ b/lib/south/modelsparser.py
@@ -0,0 +1,398 @@
+"""
+Parsing module for models.py files. Extracts information in a more reliable
+way than inspect + regexes.
+"""
+
+import re
+import inspect
+import parser
+import symbol
+import token
+import keyword
+
+from django.db import models
+
+
+def name_that_thing(thing):
+ "Turns a symbol/token int into its name."
+ for name in dir(symbol):
+ if getattr(symbol, name) == thing:
+ return "symbol.%s" % name
+ for name in dir(token):
+ if getattr(token, name) == thing:
+ return "token.%s" % name
+ return str(thing)
+
+
+def thing_that_name(name):
+ "Turns a name of a symbol/token into its integer value."
+ if name in dir(symbol):
+ return getattr(symbol, name)
+ if name in dir(token):
+ return getattr(token, name)
+ raise ValueError("Cannot convert '%s'" % name)
+
+
+def prettyprint(tree, indent=0, omit_singles=False):
+ "Prettyprints the tree, with symbol/token names. For debugging."
+ if omit_singles and isinstance(tree, tuple) and len(tree) == 2:
+ return prettyprint(tree[1], indent, omit_singles)
+ if isinstance(tree, tuple):
+ return " (\n%s\n" % "".join([prettyprint(x, indent+1) for x in tree]) + \
+ (" " * indent) + ")"
+ elif isinstance(tree, int):
+ return (" " * indent) + name_that_thing(tree)
+ else:
+ return " " + repr(tree)
+
+
+class STTree(object):
+
+ "A syntax tree wrapper class."
+
+ def __init__(self, tree):
+ self.tree = tree
+
+
+ def __eq__(self, other):
+ return other.tree == self.tree
+
+
+ def __hash__(self):
+ return hash(self.tree)
+
+
+ @property
+ def root(self):
+ return self.tree[0]
+
+
+ @property
+ def value(self):
+ return self.tree
+
+
+ def walk(self, recursive=True):
+ """
+ Yields (symbol, subtree) for the entire subtree.
+ Comes out with node 1, node 1's children, node 2, etc.
+ """
+ stack = [self.tree]
+ done_outer = False
+ while stack:
+ atree = stack.pop()
+ if isinstance(atree, tuple):
+ if done_outer:
+ yield atree[0], STTree(atree)
+ if recursive or not done_outer:
+ for bit in reversed(atree[1:]):
+ stack.append(bit)
+ done_outer = True
+
+
+ def flatten(self):
+ "Yields the tokens/symbols in the tree only, in order."
+ bits = []
+ for sym, subtree in self.walk():
+ if sym in token_map:
+ bits.append(sym)
+ elif sym == token.NAME:
+ bits.append(subtree.value)
+ elif sym == token.STRING:
+ bits.append(subtree.value)
+ elif sym == token.NUMBER:
+ bits.append(subtree.value)
+ return bits
+
+
+ def reform(self):
+ "Prints how the tree's input probably looked."
+ return reform(self.flatten())
+
+
+ def findAllType(self, ntype, recursive=True):
+ "Returns all nodes with the given type in the tree."
+ for symbol, subtree in self.walk(recursive=recursive):
+ if symbol == ntype:
+ yield subtree
+
+
+ def find(self, selector):
+ """
+ Searches the syntax tree with a CSS-like selector syntax.
+ You can use things like 'suite simple_stmt', 'suite, simple_stmt'
+ or 'suite > simple_stmt'. Not guaranteed to return in order.
+ """
+ # Split up the overall parts
+ patterns = [x.strip() for x in selector.split(",")]
+ results = []
+ for pattern in patterns:
+ # Split up the parts
+ parts = re.split(r'(?:[\s]|(>))+', pattern)
+ # Take the first part, use it for results
+ if parts[0] == "^":
+ subresults = [self]
+ else:
+ subresults = list(self.findAllType(thing_that_name(parts[0])))
+ recursive = True
+ # For each remaining part, do something
+ for part in parts[1:]:
+ if not subresults:
+ break
+ if part == ">":
+ recursive = False
+ elif not part:
+ pass
+ else:
+ thing = thing_that_name(part)
+ newresults = [
+ list(tree.findAllType(thing, recursive))
+ for tree in subresults
+ ]
+ subresults = []
+ for stuff in newresults:
+ subresults.extend(stuff)
+ recursive = True
+ results.extend(subresults)
+ return results
+
+
+ def __str__(self):
+ return prettyprint(self.tree)
+ __repr__ = __str__
+
+
+def get_model_tree(model):
+ # Get the source of the model's file
+ source = open(inspect.getsourcefile(model)).read().replace("\r\n", "\n").replace("\r","\n") + "\n"
+ tree = STTree(parser.suite(source).totuple())
+ # Now, we have to find it
+ for poss in tree.find("compound_stmt"):
+ if poss.value[1][0] == symbol.classdef and \
+ poss.value[1][2][1].lower() == model.__name__.lower():
+ # This is the tree
+ return poss
+
+
+token_map = {
+ token.DOT: ".",
+ token.LPAR: "(",
+ token.RPAR: ")",
+ token.EQUAL: "=",
+ token.EQEQUAL: "==",
+ token.COMMA: ",",
+ token.LSQB: "[",
+ token.RSQB: "]",
+ token.AMPER: "&",
+ token.BACKQUOTE: "`",
+ token.CIRCUMFLEX: "^",
+ token.CIRCUMFLEXEQUAL: "^=",
+ token.COLON: ":",
+ token.DOUBLESLASH: "//",
+ token.DOUBLESLASHEQUAL: "//=",
+ token.DOUBLESTAR: "**",
+ token.DOUBLESLASHEQUAL: "**=",
+ token.GREATER: ">",
+ token.LESS: "<",
+ token.GREATEREQUAL: ">=",
+ token.LESSEQUAL: "<=",
+ token.LBRACE: "{",
+ token.RBRACE: "}",
+ token.SEMI: ";",
+ token.PLUS: "+",
+ token.MINUS: "-",
+ token.STAR: "*",
+ token.SLASH: "/",
+ token.VBAR: "|",
+ token.PERCENT: "%",
+ token.TILDE: "~",
+ token.AT: "@",
+ token.NOTEQUAL: "!=",
+ token.LEFTSHIFT: "<<",
+ token.RIGHTSHIFT: ">>",
+ token.LEFTSHIFTEQUAL: "<<=",
+ token.RIGHTSHIFTEQUAL: ">>=",
+ token.PLUSEQUAL: "+=",
+ token.MINEQUAL: "-=",
+ token.STAREQUAL: "*=",
+ token.SLASHEQUAL: "/=",
+ token.VBAREQUAL: "|=",
+ token.PERCENTEQUAL: "%=",
+ token.AMPEREQUAL: "&=",
+}
+
+
+def reform(bits):
+ "Returns the string that the list of tokens/symbols 'bits' represents"
+ output = ""
+ for bit in bits:
+ if bit in token_map:
+ output += token_map[bit]
+ elif bit[0] in [token.NAME, token.STRING, token.NUMBER]:
+ if keyword.iskeyword(bit[1]):
+ output += " %s " % bit[1]
+ else:
+ if bit[1] not in symbol.sym_name:
+ output += bit[1]
+ return output
+
+
+def parse_arguments(argstr):
+ """
+ Takes a string representing arguments and returns the positional and
+ keyword argument list and dict respectively.
+ All the entries in these are python source, except the dict keys.
+ """
+ # Get the tree
+ tree = STTree(parser.suite(argstr).totuple())
+
+ # Initialise the lists
+ curr_kwd = None
+ args = []
+ kwds = {}
+
+ # Walk through, assigning things
+ testlists = tree.find("testlist")
+ for i, testlist in enumerate(testlists):
+ # BTW: A testlist is to the left or right of an =.
+ items = list(testlist.walk(recursive=False))
+ for j, item in enumerate(items):
+ if item[0] == symbol.test:
+ if curr_kwd:
+ kwds[curr_kwd] = item[1].reform()
+ curr_kwd = None
+ elif j == len(items)-1 and i != len(testlists)-1:
+ # Last item in a group must be a keyword, unless it's last overall
+ curr_kwd = item[1].reform()
+ else:
+ args.append(item[1].reform())
+ return args, kwds
+
+
+def extract_field(tree):
+ # Collapses the tree and tries to parse it as a field def
+ bits = tree.flatten()
+ ## Check it looks right:
+ # Second token should be equals
+ if len(bits) < 2 or bits[1] != token.EQUAL:
+ return
+ ## Split into meaningful sections
+ name = bits[0][1]
+ declaration = bits[2:]
+ # Find the first LPAR; stuff before that is the class.
+ try:
+ lpar_at = declaration.index(token.LPAR)
+ except ValueError:
+ return
+ clsname = reform(declaration[:lpar_at])
+ # Now, inside that, find the last RPAR, and we'll take the stuff between
+ # them as the arguments
+ declaration.reverse()
+ rpar_at = (len(declaration) - 1) - declaration.index(token.RPAR)
+ declaration.reverse()
+ args = declaration[lpar_at+1:rpar_at]
+ # Now, extract the arguments as a list and dict
+ try:
+ args, kwargs = parse_arguments(reform(args))
+ except SyntaxError:
+ return
+ # OK, extract and reform it
+ return name, clsname, args, kwargs
+
+
+
+def get_model_fields(model, m2m=False):
+ """
+ Given a model class, will return the dict of name: field_constructor
+ mappings.
+ """
+ tree = get_model_tree(model)
+ if tree is None:
+ raise ValueError("Cannot find source for model '%s'." % model)
+ possible_field_defs = tree.find("^ > classdef > suite > stmt > simple_stmt > small_stmt > expr_stmt")
+ field_defs = {}
+
+ # Go through all the found defns, and try to parse them
+ for pfd in possible_field_defs:
+ field = extract_field(pfd)
+ if field:
+ field_defs[field[0]] = field[1:]
+
+ inherited_fields = {}
+ # Go through all bases (that are themselves models, but not Model)
+ for base in model.__bases__:
+ if base != models.Model and issubclass(base, models.Model):
+ inherited_fields.update(get_model_fields(base))
+
+ # Now, go through all the fields and try to get their definition
+ source = model._meta.local_fields[:]
+ if m2m:
+ source += model._meta.local_many_to_many
+ fields = {}
+ for field in source:
+ # Get its name
+ fieldname = field.name
+ if isinstance(field, models.related.RelatedObject):
+ continue
+ # Now, try to get the defn
+ if fieldname in field_defs:
+ fields[fieldname] = field_defs[fieldname]
+ # Try the South definition workaround?
+ elif hasattr(field, 'south_field_triple'):
+ fields[fieldname] = field.south_field_triple()
+ elif hasattr(field, 'south_field_definition'):
+ print "Your custom field %s provides the outdated south_field_definition method.\nPlease consider implementing south_field_triple too; it's more reliably evaluated." % field
+ fields[fieldname] = field.south_field_definition()
+ # Try a parent?
+ elif fieldname in inherited_fields:
+ fields[fieldname] = inherited_fields[fieldname]
+ # Is it a _ptr?
+ elif fieldname.endswith("_ptr"):
+ fields[fieldname] = ("models.OneToOneField", ["orm['%s.%s']" % (field.rel.to._meta.app_label, field.rel.to._meta.object_name)], {})
+ # Try a default for 'id'.
+ elif fieldname == "id":
+ fields[fieldname] = ("models.AutoField", [], {"primary_key": "True"})
+ else:
+ fields[fieldname] = None
+
+ return fields
+
+
+def get_model_meta(model):
+ """
+ Given a model class, will return the dict representing the Meta class.
+ """
+ tree = get_model_tree(model)
+
+ result = {}
+
+ # First, try to get any unusual inherited classes
+ for base in model.__bases__:
+ if base is not models.Model:
+ if hasattr(base, '_meta') and not base._meta.abstract:
+ # Abstract models' fields are included anyway, and we don't
+ # want extra dependencies
+ if "_bases" not in result:
+ result['_bases'] = []
+ result['_bases'].append(base.__module__ + "." + base.__name__)
+
+ # Find all classes exactly two levels deep
+ possible_meta_classes = set(tree.find("classdef classdef"))
+ possible_meta_classes.difference(set(tree.find("classdef classdef classdef")))
+
+ # Select only those called 'Meta', and expand all their assignments
+ possible_meta_classes = [
+ tree.find("^ > suite > stmt > simple_stmt > small_stmt > expr_stmt")
+ for tree in possible_meta_classes
+ if tree.value[2][1] == "Meta"
+ ]
+
+ if possible_meta_classes:
+ # Now, for each possible definition, try it. (Only for last Meta,
+ # since that's how python interprets it)
+ for defn in possible_meta_classes[-1]:
+ bits = defn.flatten()
+ if len(bits) > 1 and bits[1] == token.EQUAL:
+ result[bits[0][1]] = reform(bits[2:])
+
+ return result or None
diff --git a/lib/south/orm.py b/lib/south/orm.py
new file mode 100644
index 0000000..69a02f7
--- /dev/null
+++ b/lib/south/orm.py
@@ -0,0 +1,278 @@
+"""
+South's fake ORM; lets you not have to write SQL inside migrations.
+Roughly emulates the real Django ORM, to a point.
+"""
+
+import inspect
+
+from django.db import models
+from django.db.models.loading import cache
+
+from south.db import db
+
+
+class ModelsLocals(object):
+
+ """
+ Custom dictionary-like class to be locals();
+ falls back to lowercase search for items that don't exist
+ (because we store model names as lowercase).
+ """
+
+ def __init__(self, data):
+ self.data = data
+
+ def __getitem__(self, key):
+ try:
+ return self.data[key]
+ except KeyError:
+ return self.data[key.lower()]
+
+
+class FakeORM(object):
+
+ """
+ Simulates the Django ORM at some point in time,
+ using a frozen definition on the Migration class.
+ """
+
+ def __init__(self, cls, app):
+ self.default_app = app
+ self.cls = cls
+ # Try loading the models off the migration class; default to no models.
+ self.models = {}
+ try:
+ self.models_source = cls.models
+ except AttributeError:
+ return
+
+ # Now, make each model's data into a FakeModel
+ for name, data in self.models_source.items():
+ # Make sure there's some kind of Meta
+ if "Meta" not in data:
+ data['Meta'] = {}
+ try:
+ app_name, model_name = name.split(".", 1)
+ except ValueError:
+ app_name = self.default_app
+ model_name = name
+ name = "%s.%s" % (app_name, model_name)
+
+ self.models[name.lower()] = self.make_model(app_name, model_name, data)
+
+ # And perform the second run to iron out any circular/backwards depends.
+ self.retry_failed_fields()
+
+
+ def __getattr__(self, key):
+ fullname = (self.default_app+"."+key).lower()
+ try:
+ return self.models[fullname]
+ except KeyError:
+ raise AttributeError("The model '%s' from the app '%s' is not available in this migration." % (key, self.default_app))
+
+
+ def __getitem__(self, key):
+ key = key.lower()
+ try:
+ return self.models[key]
+ except KeyError:
+ try:
+ app, model = key.split(".", 1)
+ except ValueError:
+ raise KeyError("The model '%s' is not in appname.modelname format." % key)
+ else:
+ raise KeyError("The model '%s' from the app '%s' is not available in this migration." % (model, app))
+
+
+ def eval_in_context(self, code, app):
+ "Evaluates the given code in the context of the migration file."
+
+ # Drag in the migration module's locals (hopefully including models.py)
+ fake_locals = dict(inspect.getmodule(self.cls).__dict__)
+
+ # Remove all models from that (i.e. from modern models.py), to stop pollution
+ for key, value in fake_locals.items():
+ if isinstance(value, type) and issubclass(value, models.Model) and hasattr(value, "_meta"):
+ del fake_locals[key]
+
+ # We add our models into the locals for the eval
+ fake_locals.update(dict([
+ (name.split(".")[-1], model)
+ for name, model in self.models.items()
+ ]))
+
+ # Make sure the ones for this app override.
+ fake_locals.update(dict([
+ (name.split(".")[-1], model)
+ for name, model in self.models.items()
+ if name.split(".")[0] == app
+ ]))
+
+ # Ourselves as orm, to allow non-fail cross-app referencing
+ fake_locals['orm'] = self
+
+ # And a fake _ function
+ fake_locals['_'] = lambda x: x
+
+ # Use ModelsLocals to make lookups work right for CapitalisedModels
+ fake_locals = ModelsLocals(fake_locals)
+
+ return eval(code, globals(), fake_locals)
+
+
+ def make_meta(self, app, model, data, stub=False):
+ "Makes a Meta class out of a dict of eval-able arguments."
+ results = {}
+ for key, code in data.items():
+ # Some things we never want to use.
+ if key in ["_bases"]:
+ continue
+ # Some things we don't want with stubs.
+ if stub and key in ["order_with_respect_to"]:
+ continue
+ # OK, add it.
+ try:
+ results[key] = self.eval_in_context(code, app)
+ except (NameError, AttributeError), e:
+ raise ValueError("Cannot successfully create meta field '%s' for model '%s.%s': %s." % (
+ key, app, model, e
+ ))
+ return type("Meta", tuple(), results)
+
+
+ def make_model(self, app, name, data):
+ "Makes a Model class out of the given app name, model name and pickled data."
+
+ # Extract any bases out of Meta
+ if "_bases" in data['Meta']:
+ bases = data['Meta']['_bases']
+ else:
+ bases = ['django.db.models.Model']
+
+ # Turn the Meta dict into a basic class
+ meta = self.make_meta(app, name, data['Meta'], data.get("_stub", False))
+
+ failed_fields = {}
+ fields = {}
+ stub = False
+
+ # Now, make some fields!
+ for fname, params in data.items():
+ if fname == "_stub":
+ stub = bool(params)
+ continue
+ elif fname == "Meta":
+ continue
+ elif not params:
+ raise ValueError("Field '%s' on model '%s.%s' has no definition." % (fname, app, name))
+ elif isinstance(params, (str, unicode)):
+ # It's a premade definition string! Let's hope it works...
+ code = params
+ elif len(params) == 1:
+ code = "%s()" % params[0]
+ elif len(params) == 3:
+ code = "%s(%s)" % (
+ params[0],
+ ", ".join(
+ params[1] +
+ ["%s=%s" % (n, v) for n, v in params[2].items()]
+ ),
+ )
+ else:
+ raise ValueError("Field '%s' on model '%s.%s' has a weird definition length (should be 1 or 3 items)." % (fname, app, name))
+
+ try:
+ field = self.eval_in_context(code, app)
+ except (NameError, AttributeError, AssertionError):
+ # It might rely on other models being around. Add it to the
+ # model for the second pass.
+ failed_fields[fname] = code
+ else:
+ fields[fname] = field
+
+ # Find the app in the Django core, and get its module
+ more_kwds = {}
+ app_module = models.get_app(app)
+ more_kwds['__module__'] = app_module.__name__
+
+ more_kwds['Meta'] = meta
+
+ # Stop AppCache from changing!
+ cache.app_models[app], old_app_models = {}, cache.app_models[app]
+
+ # Make our model
+ fields.update(more_kwds)
+
+ model = type(
+ name,
+ tuple(map(ask_for_it_by_name, bases)),
+ fields,
+ )
+
+ # Send AppCache back in time
+ cache.app_models[app] = old_app_models
+
+ # If this is a stub model, change Objects to a whiny class
+ if stub:
+ model.objects = WhinyManager()
+ # Also, make sure they can't instantiate it
+ model.__init__ = whiny_method
+ else:
+ model.objects = NoDryRunManager(model.objects)
+
+ if failed_fields:
+ model._failed_fields = failed_fields
+
+ return model
+
+ def retry_failed_fields(self):
+ "Tries to re-evaluate the _failed_fields for each model."
+ for modelkey, model in self.models.items():
+ app, modelname = modelkey.split(".", 1)
+ if hasattr(model, "_failed_fields"):
+ for fname, code in model._failed_fields.items():
+ try:
+ field = self.eval_in_context(code, app)
+ except (NameError, AttributeError, AssertionError), e:
+ # It's failed again. Complain.
+ raise ValueError("Cannot successfully create field '%s' for model '%s': %s." % (
+ fname, modelname, e
+ ))
+ else:
+ # Startup that field.
+ model.add_to_class(fname, field)
+
+
+class WhinyManager(object):
+ "A fake manager that whines whenever you try to touch it. For stub models."
+
+ def __getattr__(self, key):
+ raise AttributeError("You cannot use items from a stub model.")
+
+
+class NoDryRunManager(object):
+ """
+ A manager that always proxies through to the real manager,
+ unless a dry run is in progress.
+ """
+
+ def __init__(self, real):
+ self.real = real
+
+ def __getattr__(self, name):
+ if db.dry_run:
+ raise AttributeError("You are in a dry run, and cannot access the ORM.\nWrap ORM sections in 'if not db.dry_run:', or if the whole migration is only a data migration, set no_dry_run = True on the Migration class.")
+ return getattr(self.real, name)
+
+
+def ask_for_it_by_name(name):
+ "Returns an object referenced by absolute path."
+ bits = name.split(".")
+ modulename = ".".join(bits[:-1])
+ module = __import__(modulename, {}, {}, bits[-1])
+ return getattr(module, bits[-1])
+
+
+def whiny_method(*a, **kw):
+ raise ValueError("You cannot instantiate a stub model.")
diff --git a/lib/south/tests/__init__.py b/lib/south/tests/__init__.py
new file mode 100644
index 0000000..85eabb8
--- /dev/null
+++ b/lib/south/tests/__init__.py
@@ -0,0 +1,77 @@
+
+import unittest
+from django.conf import settings
+
+# Note: the individual test files are imported below this.
+
+class Monkeypatcher(unittest.TestCase):
+
+ """
+ Base test class for tests that play with the INSTALLED_APPS setting at runtime.
+ """
+
+ def create_fake_app(self, name):
+
+ class Fake:
+ pass
+
+ fake = Fake()
+ fake.__name__ = name
+ return fake
+
+
+ def create_test_app(self):
+
+ class Fake:
+ pass
+
+ fake = Fake()
+ fake.__name__ = "fakeapp.migrations"
+ fake.__file__ = os.path.join(test_root, "fakeapp", "migrations", "__init__.py")
+ return fake
+
+
+ def monkeypatch(self):
+ """Swaps out various Django calls for fake ones for our own nefarious purposes."""
+
+ def new_get_apps():
+ return ['fakeapp']
+
+ from django.db import models
+ from django.conf import settings
+ models.get_apps_old, models.get_apps = models.get_apps, new_get_apps
+ settings.INSTALLED_APPS, settings.OLD_INSTALLED_APPS = (
+ ["fakeapp"],
+ settings.INSTALLED_APPS,
+ )
+ self.redo_app_cache()
+ setUp = monkeypatch
+
+
+ def unmonkeypatch(self):
+ """Undoes what monkeypatch did."""
+
+ from django.db import models
+ from django.conf import settings
+ models.get_apps = models.get_apps_old
+ settings.INSTALLED_APPS = settings.OLD_INSTALLED_APPS
+ self.redo_app_cache()
+ tearDown = unmonkeypatch
+
+
+ def redo_app_cache(self):
+ from django.db.models.loading import AppCache
+ a = AppCache()
+ a.loaded = False
+ a._populate()
+
+# Try importing all tests if asked for (then we can run 'em)
+try:
+ skiptest = settings.SKIP_SOUTH_TESTS
+except:
+ skiptest = False
+
+if not skiptest:
+ from south.tests.db import *
+ from south.tests.logic import *
+ from south.tests.modelsparser import *
\ No newline at end of file
diff --git a/lib/south/tests/db.py b/lib/south/tests/db.py
new file mode 100644
index 0000000..03971a9
--- /dev/null
+++ b/lib/south/tests/db.py
@@ -0,0 +1,318 @@
+import unittest
+
+from south.db import db
+from django.db import connection, models
+
+# Create a list of error classes from the various database libraries
+errors = []
+try:
+ from psycopg2 import ProgrammingError
+ errors.append(ProgrammingError)
+except ImportError:
+ pass
+errors = tuple(errors)
+
+class TestOperations(unittest.TestCase):
+
+ """
+ Tests if the various DB abstraction calls work.
+ Can only test a limited amount due to DB differences.
+ """
+
+ def setUp(self):
+ db.debug = False
+ db.clear_deferred_sql()
+
+ def test_create(self):
+ """
+ Test creation and deletion of tables.
+ """
+ cursor = connection.cursor()
+ # It needs to take at least 2 args
+ self.assertRaises(TypeError, db.create_table)
+ self.assertRaises(TypeError, db.create_table, "test1")
+ # Empty tables (i.e. no columns) are not fine, so make at least 1
+ db.create_table("test1", [('email_confirmed', models.BooleanField(default=False))])
+ db.start_transaction()
+ # And should exist
+ cursor.execute("SELECT * FROM test1")
+ # Make sure we can't do the same query on an empty table
+ try:
+ cursor.execute("SELECT * FROM nottheretest1")
+ self.fail("Non-existent table could be selected!")
+ except:
+ pass
+ # Clear the dirty transaction
+ db.rollback_transaction()
+ db.start_transaction()
+ # Remove the table
+ db.drop_table("test1")
+ # Make sure it went
+ try:
+ cursor.execute("SELECT * FROM test1")
+ self.fail("Just-deleted table could be selected!")
+ except:
+ pass
+ # Clear the dirty transaction
+ db.rollback_transaction()
+ db.start_transaction()
+ # Try deleting a nonexistent one
+ try:
+ db.delete_table("nottheretest1")
+ self.fail("Non-existent table could be deleted!")
+ except:
+ pass
+ db.rollback_transaction()
+
+ def test_foreign_keys(self):
+ """
+ Tests foreign key creation, especially uppercase (see #61)
+ """
+ Test = db.mock_model(model_name='Test', db_table='test5a',
+ db_tablespace='', pk_field_name='ID',
+ pk_field_type=models.AutoField, pk_field_args=[])
+ cursor = connection.cursor()
+ db.start_transaction()
+ db.create_table("test5a", [('ID', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True))])
+ db.create_table("test5b", [
+ ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
+ ('UNIQUE', models.ForeignKey(Test)),
+ ])
+ db.execute_deferred_sql()
+ db.rollback_transaction()
+
+ def test_rename(self):
+ """
+ Test column renaming
+ """
+ cursor = connection.cursor()
+ db.create_table("test_rn", [('spam', models.BooleanField(default=False))])
+ db.start_transaction()
+ # Make sure we can select the column
+ cursor.execute("SELECT spam FROM test_rn")
+ # Rename it
+ db.rename_column("test_rn", "spam", "eggs")
+ cursor.execute("SELECT eggs FROM test_rn")
+ try:
+ cursor.execute("SELECT spam FROM test_rn")
+ self.fail("Just-renamed column could be selected!")
+ except:
+ pass
+ db.rollback_transaction()
+ db.delete_table("test_rn")
+
+ def test_dry_rename(self):
+ """
+ Test column renaming while --dry-run is turned on (should do nothing)
+ See ticket #65
+ """
+ cursor = connection.cursor()
+ db.create_table("test_drn", [('spam', models.BooleanField(default=False))])
+ db.start_transaction()
+ # Make sure we can select the column
+ cursor.execute("SELECT spam FROM test_drn")
+ # Rename it
+ db.dry_run = True
+ db.rename_column("test_drn", "spam", "eggs")
+ db.dry_run = False
+ cursor.execute("SELECT spam FROM test_drn")
+ try:
+ cursor.execute("SELECT eggs FROM test_drn")
+ self.fail("Dry-renamed new column could be selected!")
+ except:
+ pass
+ db.rollback_transaction()
+ db.delete_table("test_drn")
+
+ def test_table_rename(self):
+ """
+ Test column renaming
+ """
+ cursor = connection.cursor()
+ db.create_table("testtr", [('spam', models.BooleanField(default=False))])
+ db.start_transaction()
+ # Make sure we can select the column
+ cursor.execute("SELECT spam FROM testtr")
+ # Rename it
+ db.rename_table("testtr", "testtr2")
+ cursor.execute("SELECT spam FROM testtr2")
+ try:
+ cursor.execute("SELECT spam FROM testtr")
+ self.fail("Just-renamed column could be selected!")
+ except:
+ pass
+ db.rollback_transaction()
+ db.delete_table("testtr2")
+
+ def test_index(self):
+ """
+ Test the index operations
+ """
+ db.create_table("test3", [
+ ('SELECT', models.BooleanField(default=False)),
+ ('eggs', models.IntegerField(unique=True)),
+ ])
+ db.execute_deferred_sql()
+ db.start_transaction()
+ # Add an index on that column
+ db.create_index("test3", ["SELECT"])
+ # Add another index on two columns
+ db.create_index("test3", ["SELECT", "eggs"])
+ # Delete them both
+ db.delete_index("test3", ["SELECT"])
+ db.delete_index("test3", ["SELECT", "eggs"])
+ # Delete the unique index/constraint
+ db.delete_unique("test3", ["eggs"])
+ db.rollback_transaction()
+ db.delete_table("test3")
+
+ def test_primary_key(self):
+ """
+ Test the primary key operations
+ """
+ db.create_table("test_pk", [
+ ('id', models.IntegerField(primary_key=True)),
+ ('new_pkey', models.IntegerField()),
+ ('eggs', models.IntegerField(unique=True)),
+ ])
+ db.execute_deferred_sql()
+ db.start_transaction()
+ # Remove the default primary key, and make eggs it
+ db.drop_primary_key("test_pk")
+ db.create_primary_key("test_pk", "new_pkey")
+ # Try inserting a now-valid row pair
+ db.execute("INSERT INTO test_pk (id, new_pkey, eggs) VALUES (1, 2, 3), (1, 3, 4)")
+ db.rollback_transaction()
+ db.delete_table("test_pk")
+
+ def test_alter(self):
+ """
+ Test altering columns/tables
+ """
+ db.create_table("test4", [
+ ('spam', models.BooleanField(default=False)),
+ ('eggs', models.IntegerField()),
+ ])
+ db.start_transaction()
+ # Add a column
+ db.add_column("test4", "add1", models.IntegerField(default=3), keep_default=False)
+ # Add a FK with keep_default=False (#69)
+ User = db.mock_model(model_name='User', db_table='auth_user', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={})
+ db.add_column("test4", "user", models.ForeignKey(User), keep_default=False)
+ db.delete_column("test4", "add1")
+
+ db.rollback_transaction()
+ db.delete_table("test4")
+
+ def test_alter_column_postgres_multiword(self):
+ """
+ Tests altering columns with multiple words in Postgres types (issue #125)
+ e.g. 'datetime with time zone', look at django/db/backends/postgresql/creation.py
+ """
+ db.create_table("test_multiword", [
+ ('col_datetime', models.DateTimeField(null=True)),
+ ('col_integer', models.PositiveIntegerField(null=True)),
+ ('col_smallint', models.PositiveSmallIntegerField(null=True)),
+ ('col_float', models.FloatField(null=True)),
+ ])
+
+ # test if 'double precision' is preserved
+ db.alter_column('test_multiword', 'col_float', models.FloatField('float', null=True))
+
+ # test if 'CHECK ("%(column)s" >= 0)' is stripped
+ db.alter_column('test_multiword', 'col_integer', models.PositiveIntegerField(null=True))
+ db.alter_column('test_multiword', 'col_smallint', models.PositiveSmallIntegerField(null=True))
+
+ # test if 'with timezone' is preserved
+ if db.backend_name == "postgres":
+ db.start_transaction()
+ db.execute("INSERT INTO test_multiword (col_datetime) VALUES ('2009-04-24 14:20:55+02')")
+ db.alter_column('test_multiword', 'col_datetime', models.DateTimeField(auto_now=True))
+ assert db.execute("SELECT col_datetime = '2009-04-24 14:20:55+02' FROM test_multiword")[0][0]
+ db.rollback_transaction()
+
+
+ db.delete_table("test_multiword")
+
+ def test_alter_constraints(self):
+ """
+ Tests that going from a PostiveIntegerField to an IntegerField drops
+ the constraint on the database.
+ """
+ db.create_table("test_alterc", [
+ ('num', models.PositiveIntegerField()),
+ ])
+ # Add in some test values
+ db.execute("INSERT INTO test_alterc (num) VALUES (1), (2)")
+ # Ensure that adding a negative number is bad
+ db.start_transaction()
+ try:
+ db.execute("INSERT INTO test_alterc (num) VALUES (-3)")
+ except:
+ db.rollback_transaction()
+ else:
+ self.fail("Could insert a negative integer into a PositiveIntegerField.")
+ # Alter it to a normal IntegerField
+ db.alter_column("test_alterc", "num", models.IntegerField())
+ # It should now work
+ db.execute("INSERT INTO test_alterc (num) VALUES (-3)")
+ db.delete_table("test_alterc")
+
+ def test_unique(self):
+ """
+ Tests creating/deleting unique constraints.
+ """
+ db.create_table("test_unique2", [
+ ('id', models.AutoField(primary_key=True)),
+ ])
+ db.create_table("test_unique", [
+ ('spam', models.BooleanField(default=False)),
+ ('eggs', models.IntegerField()),
+ ('ham', models.ForeignKey(db.mock_model('Unique2', 'test_unique2'))),
+ ])
+ # Add a constraint
+ db.create_unique("test_unique", ["spam"])
+ # Shouldn't do anything during dry-run
+ db.dry_run = True
+ db.delete_unique("test_unique", ["spam"])
+ db.dry_run = False
+ db.delete_unique("test_unique", ["spam"])
+ db.create_unique("test_unique", ["spam"])
+ db.start_transaction()
+ # Test it works
+ db.execute("INSERT INTO test_unique2 (id) VALUES (1), (2)")
+ db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (true, 0, 1), (false, 1, 2)")
+ try:
+ db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (true, 2, 1)")
+ except:
+ db.rollback_transaction()
+ else:
+ self.fail("Could insert non-unique item.")
+ # Drop that, add one only on eggs
+ db.delete_unique("test_unique", ["spam"])
+ db.execute("DELETE FROM test_unique")
+ db.create_unique("test_unique", ["eggs"])
+ db.start_transaction()
+ # Test similarly
+ db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (true, 0, 1), (false, 1, 2)")
+ try:
+ db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (true, 1, 1)")
+ except:
+ db.rollback_transaction()
+ else:
+ self.fail("Could insert non-unique item.")
+ # Drop those, test combined constraints
+ db.delete_unique("test_unique", ["eggs"])
+ db.execute("DELETE FROM test_unique")
+ db.create_unique("test_unique", ["spam", "eggs", "ham_id"])
+ db.start_transaction()
+ # Test similarly
+ db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (true, 0, 1), (false, 1, 1)")
+ try:
+ db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (true, 0, 1)")
+ except:
+ db.rollback_transaction()
+ else:
+ self.fail("Could insert non-unique pair.")
+ db.delete_unique("test_unique", ["spam", "eggs", "ham_id"])
+
diff --git a/lib/django_evolution/management/commands/__init__.py b/lib/south/tests/fakeapp/__init__.py
similarity index 100%
copy from lib/django_evolution/management/commands/__init__.py
copy to lib/south/tests/fakeapp/__init__.py
diff --git a/lib/south/tests/fakeapp/migrations/0001_spam.py b/lib/south/tests/fakeapp/migrations/0001_spam.py
new file mode 100644
index 0000000..d814548
--- /dev/null
+++ b/lib/south/tests/fakeapp/migrations/0001_spam.py
@@ -0,0 +1,19 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ def forwards(self):
+
+ # Model 'Spam'
+ db.create_table("southtest_spam", (
+ ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
+ ('weight', models.FloatField()),
+ ('expires', models.DateTimeField()),
+ ('name', models.CharField(max_length=255))
+ ))
+
+ def backwards(self):
+
+ db.delete_table("southtest_spam")
+
diff --git a/lib/south/tests/fakeapp/migrations/0002_eggs.py b/lib/south/tests/fakeapp/migrations/0002_eggs.py
new file mode 100644
index 0000000..3ec8399
--- /dev/null
+++ b/lib/south/tests/fakeapp/migrations/0002_eggs.py
@@ -0,0 +1,20 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ def forwards(self):
+
+ Spam = db.mock_model(model_name='Spam', db_table='southtest_spam', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField)
+
+ db.create_table("southtest_eggs", (
+ ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
+ ('size', models.FloatField()),
+ ('quantity', models.IntegerField()),
+ ('spam', models.ForeignKey(Spam)),
+ ))
+
+ def backwards(self):
+
+ db.delete_table("southtest_eggs")
+
diff --git a/lib/south/tests/fakeapp/migrations/0003_alter_spam.py b/lib/south/tests/fakeapp/migrations/0003_alter_spam.py
new file mode 100644
index 0000000..3a9aea4
--- /dev/null
+++ b/lib/south/tests/fakeapp/migrations/0003_alter_spam.py
@@ -0,0 +1,12 @@
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ def forwards(self):
+
+ db.alter_column("southtest_spam", 'name', models.CharField(max_length=255, null=True))
+
+ def backwards(self):
+
+ db.alter_column("southtest_spam", 'name', models.CharField(max_length=255))
diff --git a/lib/django_evolution/management/commands/__init__.py b/lib/south/tests/fakeapp/migrations/__init__.py
similarity index 100%
rename from lib/django_evolution/management/commands/__init__.py
rename to lib/south/tests/fakeapp/migrations/__init__.py
diff --git a/lib/south/tests/fakeapp/models.py b/lib/south/tests/fakeapp/models.py
new file mode 100644
index 0000000..652c436
--- /dev/null
+++ b/lib/south/tests/fakeapp/models.py
@@ -0,0 +1,46 @@
+# -*- coding: UTF-8 -*-
+
+from django.db import models
+from django.contrib.auth.models import User
+
+# An empty case.
+class Other1(models.Model): pass
+
+# Nastiness.
+class HorribleModel(models.Model):
+ "A model to test the edge cases of model parsing"
+
+ # First, some nice fields
+ name = models.CharField(max_length=255)
+ short_name = models.CharField(max_length=50)
+ slug = models.SlugField(unique=True)
+
+ # A ForeignKey, to a model above, and then below
+ o1 = models.ForeignKey(Other1)
+ o2 = models.ForeignKey('Other2')
+
+ # Now to something outside
+ user = models.ForeignKey(User, related_name="horribles")
+
+ # Unicode!
+ code = models.CharField(max_length=25, default="â??â??â??â??â??â??â??â??BA")
+
+ # Time to get nasty. Define a non-field choices, and use it
+ choices = [('hello', '1'), ('world', '2')]
+ choiced = models.CharField(max_length=20, choices=choices)
+
+ class Meta:
+ db_table = "my_fave"
+ verbose_name = "Dr. Strangelove," + \
+ """or how I learned to stop worrying
+and love the bomb"""
+
+ # Now spread over multiple lines
+ multiline = \
+ models.TextField(
+ )
+
+# Special case.
+class Other2(models.Model):
+ # Try loading a field without a newline after it (inspect hates this)
+ close_but_no_cigar = models.PositiveIntegerField(primary_key=True)
\ No newline at end of file
diff --git a/lib/south/tests/logic.py b/lib/south/tests/logic.py
new file mode 100644
index 0000000..2312bac
--- /dev/null
+++ b/lib/south/tests/logic.py
@@ -0,0 +1,243 @@
+import unittest
+import datetime
+import sys
+import os
+import StringIO
+
+from south import migration
+from south.tests import Monkeypatcher
+
+# Add the tests directory so fakeapp is on sys.path
+test_root = os.path.dirname(__file__)
+sys.path.append(test_root)
+
+
+class TestMigrationLogic(Monkeypatcher):
+
+ """
+ Tests if the various logic functions in migration actually work.
+ """
+
+ def test_get_app_name(self):
+ self.assertEqual(
+ "southtest",
+ migration.get_app_name(self.create_fake_app("southtest.migrations")),
+ )
+ self.assertEqual(
+ "baz",
+ migration.get_app_name(self.create_fake_app("foo.bar.baz.migrations")),
+ )
+
+
+ def test_get_migrated_apps(self):
+
+ P1 = __import__("fakeapp.migrations", {}, {}, [''])
+
+ self.assertEqual(
+ [P1],
+ list(migration.get_migrated_apps()),
+ )
+
+
+ def test_get_app(self):
+
+ P1 = __import__("fakeapp.migrations", {}, {}, [''])
+
+ self.assertEqual(P1, migration.get_app("fakeapp"))
+ self.assertEqual(P1, migration.get_app(self.create_fake_app("fakeapp.models")))
+
+
+ def test_get_app_fullname(self):
+ self.assertEqual(
+ "southtest",
+ migration.get_app_fullname(self.create_fake_app("southtest.migrations")),
+ )
+ self.assertEqual(
+ "foo.bar.baz",
+ migration.get_app_fullname(self.create_fake_app("foo.bar.baz.migrations")),
+ )
+
+
+ def test_get_migration_names(self):
+
+ app = self.create_test_app()
+
+ self.assertEqual(
+ ["0001_spam", "0002_eggs", "0003_alter_spam"],
+ migration.get_migration_names(app),
+ )
+
+
+ def test_get_migration_classes(self):
+
+ app = self.create_test_app()
+
+ # Can't use vanilla import, modules beginning with numbers aren't in grammar
+ M1 = __import__("fakeapp.migrations.0001_spam", {}, {}, ['Migration']).Migration
+ M2 = __import__("fakeapp.migrations.0002_eggs", {}, {}, ['Migration']).Migration
+ M3 = __import__("fakeapp.migrations.0003_alter_spam", {}, {}, ['Migration']).Migration
+
+ self.assertEqual(
+ [M1, M2, M3],
+ list(migration.get_migration_classes(app)),
+ )
+
+
+ def test_get_migration(self):
+
+ app = self.create_test_app()
+
+ # Can't use vanilla import, modules beginning with numbers aren't in grammar
+ M1 = __import__("fakeapp.migrations.0001_spam", {}, {}, ['Migration']).Migration
+ M2 = __import__("fakeapp.migrations.0002_eggs", {}, {}, ['Migration']).Migration
+
+ self.assertEqual(M1, migration.get_migration(app, "0001_spam"))
+ self.assertEqual(M2, migration.get_migration(app, "0002_eggs"))
+
+ # Temporarily redirect sys.stdout during this, it whinges.
+ stdout, sys.stdout = sys.stdout, StringIO.StringIO()
+ try:
+ self.assertRaises((ImportError, ValueError), migration.get_migration, app, "0001_jam")
+ finally:
+ sys.stdout = stdout
+
+
+ def test_all_migrations(self):
+
+ app = migration.get_app("fakeapp")
+
+ self.assertEqual(
+ {app: {
+ "0001_spam": migration.get_migration(app, "0001_spam"),
+ "0002_eggs": migration.get_migration(app, "0002_eggs"),
+ "0003_alter_spam": migration.get_migration(app, "0003_alter_spam"),
+ }},
+ migration.all_migrations(),
+ )
+
+
+ def assertListEqual(self, list1, list2):
+ list1 = list(list1)
+ list2 = list(list2)
+ list1.sort()
+ list2.sort()
+ return self.assertEqual(list1, list2)
+
+
+ def test_apply_migrations(self):
+
+ app = migration.get_app("fakeapp")
+
+ # We should start with no migrations
+ self.assertEqual(list(migration.MigrationHistory.objects.all()), [])
+
+ # Apply them normally
+ migration.migrate_app(app, target_name=None, resolve_mode=None, fake=False, silent=True)
+
+ # We should finish with all migrations
+ self.assertListEqual(
+ (
+ (u"fakeapp", u"0001_spam"),
+ (u"fakeapp", u"0002_eggs"),
+ (u"fakeapp", u"0003_alter_spam"),
+ ),
+ migration.MigrationHistory.objects.values_list("app_name", "migration"),
+ )
+
+ # Now roll them backwards
+ migration.migrate_app(app, target_name="zero", resolve_mode=None, fake=False, silent=True)
+
+ # Finish with none
+ self.assertEqual(list(migration.MigrationHistory.objects.all()), [])
+
+
+ def test_migration_merge_forwards(self):
+
+ app = migration.get_app("fakeapp")
+
+ # We should start with no migrations
+ self.assertEqual(list(migration.MigrationHistory.objects.all()), [])
+
+ # Insert one in the wrong order
+ migration.MigrationHistory.objects.create(
+ app_name = "fakeapp",
+ migration = "0002_eggs",
+ applied = datetime.datetime.now(),
+ )
+
+ # Did it go in?
+ self.assertListEqual(
+ (
+ (u"fakeapp", u"0002_eggs"),
+ ),
+ migration.MigrationHistory.objects.values_list("app_name", "migration"),
+ )
+
+ # Apply them normally
+ try:
+ migration.migrate_app(app, target_name=None, resolve_mode=None, fake=False, silent=True)
+ except SystemExit:
+ pass
+
+ # Nothing should have changed (no merge mode!)
+ self.assertListEqual(
+ (
+ (u"fakeapp", u"0002_eggs"),
+ ),
+ migration.MigrationHistory.objects.values_list("app_name", "migration"),
+ )
+
+ # Apply with merge
+ migration.migrate_app(app, target_name=None, resolve_mode="merge", fake=False, silent=True)
+
+ # We should finish with all migrations
+ self.assertListEqual(
+ (
+ (u"fakeapp", u"0001_spam"),
+ (u"fakeapp", u"0002_eggs"),
+ (u"fakeapp", u"0003_alter_spam"),
+ ),
+ migration.MigrationHistory.objects.values_list("app_name", "migration"),
+ )
+
+ # Now roll them backwards
+ migration.migrate_app(app, target_name="0002", resolve_mode=None, fake=False, silent=True)
+ migration.migrate_app(app, target_name="0001", resolve_mode=None, fake=True, silent=True)
+ migration.migrate_app(app, target_name="zero", resolve_mode=None, fake=False, silent=True)
+
+ # Finish with none
+ self.assertEqual(list(migration.MigrationHistory.objects.all()), [])
+
+ def test_alter_column_null(self):
+ def null_ok():
+ from django.db import connection, transaction
+ # the DBAPI introspection module fails on postgres NULLs.
+ cursor = connection.cursor()
+ try:
+ cursor.execute("INSERT INTO southtest_spam (id, weight, expires, name) VALUES (100, 10.1, now(), NULL);")
+ except:
+ transaction.rollback()
+ return False
+ else:
+ cursor.execute("DELETE FROM southtest_spam")
+ transaction.commit()
+ return True
+
+ app = migration.get_app("fakeapp")
+ self.assertEqual(list(migration.MigrationHistory.objects.all()), [])
+
+ # by default name is NOT NULL
+ migration.migrate_app(app, target_name="0002", resolve_mode=None, fake=False, silent=True)
+ self.failIf(null_ok())
+
+ # after 0003, it should be NULL
+ migration.migrate_app(app, target_name="0003", resolve_mode=None, fake=False, silent=True)
+ self.assert_(null_ok())
+
+ # make sure it is NOT NULL again
+ migration.migrate_app(app, target_name="0002", resolve_mode=None, fake=False, silent=True)
+ self.failIf(null_ok(), 'name not null after migration')
+
+ # finish with no migrations, otherwise other tests fail...
+ migration.migrate_app(app, target_name="zero", resolve_mode=None, fake=False, silent=True)
+ self.assertEqual(list(migration.MigrationHistory.objects.all()), [])
\ No newline at end of file
diff --git a/lib/south/tests/modelsparser.py b/lib/south/tests/modelsparser.py
new file mode 100644
index 0000000..0a72e62
--- /dev/null
+++ b/lib/south/tests/modelsparser.py
@@ -0,0 +1,61 @@
+# -*- coding: UTF-8 -*-
+
+import unittest
+
+from south.db import db
+from south.tests import Monkeypatcher
+from south.tests.fakeapp.models import HorribleModel, Other1, Other2
+
+from south.modelsparser import get_model_fields, get_model_meta
+
+class TestModelParsing(Monkeypatcher):
+
+ """
+ Tests parsing of models.py files against the test one.
+ """
+
+ def test_fields(self):
+
+ fields = get_model_fields(HorribleModel)
+ self.assertEqual(
+ fields,
+ {
+ 'id': ('models.AutoField', [], {'primary_key': 'True'}),
+
+ 'name': ('models.CharField', [], {'max_length': '255'}),
+ 'short_name': ('models.CharField', [], {'max_length': '50'}),
+ 'slug': ('models.SlugField', [], {'unique': 'True'}),
+
+ 'o1': ('models.ForeignKey', ['Other1'], {}),
+ 'o2': ('models.ForeignKey', ["'Other2'"], {}),
+
+ 'user': ('models.ForeignKey', ['User'], {'related_name': '"horribles"'}),
+
+ 'code': ('models.CharField', [], {'max_length': '25', 'default': '"â??â??â??â??â??â??â??â??BA"'}),
+
+ 'choiced': ('models.CharField', [], {'max_length': '20', 'choices': 'choices'}),
+
+ 'multiline': ('models.TextField', [], {}),
+ },
+ )
+
+ fields2 = get_model_fields(Other2)
+ self.assertEqual(
+ fields2,
+ {'close_but_no_cigar': ('models.PositiveIntegerField', [], {'primary_key': 'True'})},
+ )
+
+ fields3 = get_model_fields(Other1)
+ self.assertEqual(
+ fields3,
+ {'id': ('models.AutoField', [], {'primary_key': 'True'})},
+ )
+
+
+ def test_meta(self):
+
+ meta = get_model_meta(HorribleModel)
+ self.assertEqual(
+ meta,
+ {'db_table': '"my_fave"', 'verbose_name': '"Dr. Strangelove,"+"""or how I learned to stop worrying\nand love the bomb"""'},
+ )
\ No newline at end of file
diff --git a/settings.py b/settings.py
index 0b31b89..2c240f0 100644
--- a/settings.py
+++ b/settings.py
@@ -95,7 +95,7 @@ INSTALLED_APPS = (
# External apps
'registration',
- 'django_evolution',
+ 'south',
'reversion',
'gravatar',
'autoslug',
[
Date Prev][
Date Next] [
Thread Prev][
Thread Next]
[
Thread Index]
[
Date Index]
[
Author Index]