diff options
author | Jim Rollenhagen <jim@jimrollenhagen.com> | 2019-09-26 09:56:42 -0400 |
---|---|---|
committer | Jim Rollenhagen <jim@jimrollenhagen.com> | 2019-09-26 09:56:42 -0400 |
commit | 52672a64cc0cab4ea14a4a756fce850eb03315e3 (patch) | |
tree | a86024e4e6141aa8983c750f751c58d924f5b11a /migrate/changeset | |
parent | 8acab2cd75a5b23ac162e49c8e4fb1e3f958352a (diff) | |
download | sqlalchemy-migrate-master.tar.gz |
Diffstat (limited to 'migrate/changeset')
-rw-r--r-- | migrate/changeset/__init__.py | 28 | ||||
-rw-r--r-- | migrate/changeset/ansisql.py | 311 | ||||
-rw-r--r-- | migrate/changeset/constraint.py | 199 | ||||
-rw-r--r-- | migrate/changeset/databases/__init__.py | 11 | ||||
-rw-r--r-- | migrate/changeset/databases/firebird.py | 93 | ||||
-rw-r--r-- | migrate/changeset/databases/ibmdb2.py | 337 | ||||
-rw-r--r-- | migrate/changeset/databases/mysql.py | 68 | ||||
-rw-r--r-- | migrate/changeset/databases/oracle.py | 108 | ||||
-rw-r--r-- | migrate/changeset/databases/postgres.py | 42 | ||||
-rw-r--r-- | migrate/changeset/databases/sqlite.py | 229 | ||||
-rw-r--r-- | migrate/changeset/databases/visitor.py | 88 | ||||
-rw-r--r-- | migrate/changeset/schema.py | 705 | ||||
-rw-r--r-- | migrate/changeset/util.py | 10 |
13 files changed, 0 insertions, 2229 deletions
diff --git a/migrate/changeset/__init__.py b/migrate/changeset/__init__.py deleted file mode 100644 index 507fa73..0000000 --- a/migrate/changeset/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -""" - This module extends SQLAlchemy and provides additional DDL [#]_ - support. - - .. [#] SQL Data Definition Language -""" -import re - -import sqlalchemy -from sqlalchemy import __version__ as _sa_version - -_sa_version = tuple(int(re.match("\d+", x).group(0)) for x in _sa_version.split(".")) -SQLA_07 = _sa_version >= (0, 7) -SQLA_08 = _sa_version >= (0, 8) -SQLA_09 = _sa_version >= (0, 9) -SQLA_10 = _sa_version >= (1, 0) - -del re -del _sa_version - -from migrate.changeset.schema import * -from migrate.changeset.constraint import * - -sqlalchemy.schema.Table.__bases__ += (ChangesetTable, ) -sqlalchemy.schema.Column.__bases__ += (ChangesetColumn, ) -sqlalchemy.schema.Index.__bases__ += (ChangesetIndex, ) - -sqlalchemy.schema.DefaultClause.__bases__ += (ChangesetDefaultClause, ) diff --git a/migrate/changeset/ansisql.py b/migrate/changeset/ansisql.py deleted file mode 100644 index 0a298a2..0000000 --- a/migrate/changeset/ansisql.py +++ /dev/null @@ -1,311 +0,0 @@ -""" - Extensions to SQLAlchemy for altering existing tables. - - At the moment, this isn't so much based off of ANSI as much as - things that just happen to work with multiple databases. -""" - -import sqlalchemy as sa -from sqlalchemy.schema import SchemaVisitor -from sqlalchemy.engine.default import DefaultDialect -from sqlalchemy.sql import ClauseElement -from sqlalchemy.schema import (ForeignKeyConstraint, - PrimaryKeyConstraint, - CheckConstraint, - UniqueConstraint, - Index) - -from migrate import exceptions -import sqlalchemy.sql.compiler -from migrate.changeset import constraint -from migrate.changeset import util -from six.moves import StringIO - -from sqlalchemy.schema import AddConstraint, DropConstraint -from sqlalchemy.sql.compiler import DDLCompiler -SchemaGenerator = SchemaDropper = DDLCompiler - - -class AlterTableVisitor(SchemaVisitor): - """Common operations for ``ALTER TABLE`` statements.""" - - # engine.Compiler looks for .statement - # when it spawns off a new compiler - statement = ClauseElement() - - def append(self, s): - """Append content to the SchemaIterator's query buffer.""" - - self.buffer.write(s) - - def execute(self): - """Execute the contents of the SchemaIterator's buffer.""" - try: - return self.connection.execute(self.buffer.getvalue()) - finally: - self.buffer.seek(0) - self.buffer.truncate() - - def __init__(self, dialect, connection, **kw): - self.connection = connection - self.buffer = StringIO() - self.preparer = dialect.identifier_preparer - self.dialect = dialect - - def traverse_single(self, elem): - ret = super(AlterTableVisitor, self).traverse_single(elem) - if ret: - # adapt to 0.6 which uses a string-returning - # object - self.append(" %s" % ret) - - def _to_table(self, param): - """Returns the table object for the given param object.""" - if isinstance(param, (sa.Column, sa.Index, sa.schema.Constraint)): - ret = param.table - else: - ret = param - return ret - - def start_alter_table(self, param): - """Returns the start of an ``ALTER TABLE`` SQL-Statement. - - Use the param object to determine the table name and use it - for building the SQL statement. - - :param param: object to determine the table from - :type param: :class:`sqlalchemy.Column`, :class:`sqlalchemy.Index`, - :class:`sqlalchemy.schema.Constraint`, :class:`sqlalchemy.Table`, - or string (table name) - """ - table = self._to_table(param) - self.append('\nALTER TABLE %s ' % self.preparer.format_table(table)) - return table - - -class ANSIColumnGenerator(AlterTableVisitor, SchemaGenerator): - """Extends ansisql generator for column creation (alter table add col)""" - - def visit_column(self, column): - """Create a column (table already exists). - - :param column: column object - :type column: :class:`sqlalchemy.Column` instance - """ - if column.default is not None: - self.traverse_single(column.default) - - table = self.start_alter_table(column) - self.append("ADD ") - self.append(self.get_column_specification(column)) - - for cons in column.constraints: - self.traverse_single(cons) - self.execute() - - # ALTER TABLE STATEMENTS - - # add indexes and unique constraints - if column.index_name: - Index(column.index_name,column).create() - elif column.unique_name: - constraint.UniqueConstraint(column, - name=column.unique_name).create() - - # SA bounds FK constraints to table, add manually - for fk in column.foreign_keys: - self.add_foreignkey(fk.constraint) - - # add primary key constraint if needed - if column.primary_key_name: - cons = constraint.PrimaryKeyConstraint(column, - name=column.primary_key_name) - cons.create() - - def add_foreignkey(self, fk): - self.connection.execute(AddConstraint(fk)) - -class ANSIColumnDropper(AlterTableVisitor, SchemaDropper): - """Extends ANSI SQL dropper for column dropping (``ALTER TABLE - DROP COLUMN``). - """ - - def visit_column(self, column): - """Drop a column from its table. - - :param column: the column object - :type column: :class:`sqlalchemy.Column` - """ - table = self.start_alter_table(column) - self.append('DROP COLUMN %s' % self.preparer.format_column(column)) - self.execute() - - -class ANSISchemaChanger(AlterTableVisitor, SchemaGenerator): - """Manages changes to existing schema elements. - - Note that columns are schema elements; ``ALTER TABLE ADD COLUMN`` - is in SchemaGenerator. - - All items may be renamed. Columns can also have many of their properties - - type, for example - changed. - - Each function is passed a tuple, containing (object, name); where - object is a type of object you'd expect for that function - (ie. table for visit_table) and name is the object's new - name. NONE means the name is unchanged. - """ - - def visit_table(self, table): - """Rename a table. Other ops aren't supported.""" - self.start_alter_table(table) - self.append("RENAME TO %s" % self.preparer.quote(table.new_name)) - self.execute() - - def visit_index(self, index): - """Rename an index""" - if hasattr(self, '_validate_identifier'): - # SA <= 0.6.3 - self.append("ALTER INDEX %s RENAME TO %s" % ( - self.preparer.quote( - self._validate_identifier( - index.name, True)), - self.preparer.quote( - self._validate_identifier( - index.new_name, True)))) - elif hasattr(self, '_index_identifier'): - # SA >= 0.6.5, < 0.8 - self.append("ALTER INDEX %s RENAME TO %s" % ( - self.preparer.quote( - self._index_identifier( - index.name)), - self.preparer.quote( - self._index_identifier( - index.new_name)))) - else: - # SA >= 0.8 - class NewName(object): - """Map obj.name -> obj.new_name""" - def __init__(self, index): - self.name = index.new_name - self._obj = index - - def __getattr__(self, attr): - if attr == 'name': - return getattr(self, attr) - return getattr(self._obj, attr) - - self.append("ALTER INDEX %s RENAME TO %s" % ( - self._prepared_index_name(index), - self._prepared_index_name(NewName(index)))) - - self.execute() - - def visit_column(self, delta): - """Rename/change a column.""" - # ALTER COLUMN is implemented as several ALTER statements - keys = delta.keys() - if 'type' in keys: - self._run_subvisit(delta, self._visit_column_type) - if 'nullable' in keys: - self._run_subvisit(delta, self._visit_column_nullable) - if 'server_default' in keys: - # Skip 'default': only handle server-side defaults, others - # are managed by the app, not the db. - self._run_subvisit(delta, self._visit_column_default) - if 'name' in keys: - self._run_subvisit(delta, self._visit_column_name, start_alter=False) - - def _run_subvisit(self, delta, func, start_alter=True): - """Runs visit method based on what needs to be changed on column""" - table = self._to_table(delta.table) - col_name = delta.current_name - if start_alter: - self.start_alter_column(table, col_name) - ret = func(table, delta.result_column, delta) - self.execute() - - def start_alter_column(self, table, col_name): - """Starts ALTER COLUMN""" - self.start_alter_table(table) - self.append("ALTER COLUMN %s " % self.preparer.quote(col_name)) - - def _visit_column_nullable(self, table, column, delta): - nullable = delta['nullable'] - if nullable: - self.append("DROP NOT NULL") - else: - self.append("SET NOT NULL") - - def _visit_column_default(self, table, column, delta): - default_text = self.get_column_default_string(column) - if default_text is not None: - self.append("SET DEFAULT %s" % default_text) - else: - self.append("DROP DEFAULT") - - def _visit_column_type(self, table, column, delta): - type_ = delta['type'] - type_text = str(type_.compile(dialect=self.dialect)) - self.append("TYPE %s" % type_text) - - def _visit_column_name(self, table, column, delta): - self.start_alter_table(table) - col_name = self.preparer.quote(delta.current_name) - new_name = self.preparer.format_column(delta.result_column) - self.append('RENAME COLUMN %s TO %s' % (col_name, new_name)) - - -class ANSIConstraintCommon(AlterTableVisitor): - """ - Migrate's constraints require a separate creation function from - SA's: Migrate's constraints are created independently of a table; - SA's are created at the same time as the table. - """ - - def get_constraint_name(self, cons): - """Gets a name for the given constraint. - - If the name is already set it will be used otherwise the - constraint's :meth:`autoname <migrate.changeset.constraint.ConstraintChangeset.autoname>` - method is used. - - :param cons: constraint object - """ - if cons.name is not None: - ret = cons.name - else: - ret = cons.name = cons.autoname() - return ret - - def visit_migrate_primary_key_constraint(self, *p, **k): - self._visit_constraint(*p, **k) - - def visit_migrate_foreign_key_constraint(self, *p, **k): - self._visit_constraint(*p, **k) - - def visit_migrate_check_constraint(self, *p, **k): - self._visit_constraint(*p, **k) - - def visit_migrate_unique_constraint(self, *p, **k): - self._visit_constraint(*p, **k) - -class ANSIConstraintGenerator(ANSIConstraintCommon, SchemaGenerator): - def _visit_constraint(self, constraint): - constraint.name = self.get_constraint_name(constraint) - self.append(self.process(AddConstraint(constraint))) - self.execute() - -class ANSIConstraintDropper(ANSIConstraintCommon, SchemaDropper): - def _visit_constraint(self, constraint): - constraint.name = self.get_constraint_name(constraint) - self.append(self.process(DropConstraint(constraint, cascade=constraint.cascade))) - self.execute() - - -class ANSIDialect(DefaultDialect): - columngenerator = ANSIColumnGenerator - columndropper = ANSIColumnDropper - schemachanger = ANSISchemaChanger - constraintgenerator = ANSIConstraintGenerator - constraintdropper = ANSIConstraintDropper diff --git a/migrate/changeset/constraint.py b/migrate/changeset/constraint.py deleted file mode 100644 index 96407bd..0000000 --- a/migrate/changeset/constraint.py +++ /dev/null @@ -1,199 +0,0 @@ -""" - This module defines standalone schema constraint classes. -""" -from sqlalchemy import schema - -from migrate.exceptions import * - -class ConstraintChangeset(object): - """Base class for Constraint classes.""" - - def _normalize_columns(self, cols, table_name=False): - """Given: column objects or names; return col names and - (maybe) a table""" - colnames = [] - table = None - for col in cols: - if isinstance(col, schema.Column): - if col.table is not None and table is None: - table = col.table - if table_name: - col = '.'.join((col.table.name, col.name)) - else: - col = col.name - colnames.append(col) - return colnames, table - - def __do_imports(self, visitor_name, *a, **kw): - engine = kw.pop('engine', self.table.bind) - from migrate.changeset.databases.visitor import (get_engine_visitor, - run_single_visitor) - visitorcallable = get_engine_visitor(engine, visitor_name) - run_single_visitor(engine, visitorcallable, self, *a, **kw) - - def create(self, *a, **kw): - """Create the constraint in the database. - - :param engine: the database engine to use. If this is \ - :keyword:`None` the instance's engine will be used - :type engine: :class:`sqlalchemy.engine.base.Engine` - :param connection: reuse connection istead of creating new one. - :type connection: :class:`sqlalchemy.engine.base.Connection` instance - """ - # TODO: set the parent here instead of in __init__ - self.__do_imports('constraintgenerator', *a, **kw) - - def drop(self, *a, **kw): - """Drop the constraint from the database. - - :param engine: the database engine to use. If this is - :keyword:`None` the instance's engine will be used - :param cascade: Issue CASCADE drop if database supports it - :type engine: :class:`sqlalchemy.engine.base.Engine` - :type cascade: bool - :param connection: reuse connection istead of creating new one. - :type connection: :class:`sqlalchemy.engine.base.Connection` instance - :returns: Instance with cleared columns - """ - self.cascade = kw.pop('cascade', False) - self.__do_imports('constraintdropper', *a, **kw) - # the spirit of Constraint objects is that they - # are immutable (just like in a DB. they're only ADDed - # or DROPped). - #self.columns.clear() - return self - - -class PrimaryKeyConstraint(ConstraintChangeset, schema.PrimaryKeyConstraint): - """Construct PrimaryKeyConstraint - - Migrate's additional parameters: - - :param cols: Columns in constraint. - :param table: If columns are passed as strings, this kw is required - :type table: Table instance - :type cols: strings or Column instances - """ - - __migrate_visit_name__ = 'migrate_primary_key_constraint' - - def __init__(self, *cols, **kwargs): - colnames, table = self._normalize_columns(cols) - table = kwargs.pop('table', table) - super(PrimaryKeyConstraint, self).__init__(*colnames, **kwargs) - if table is not None: - self._set_parent(table) - - - def autoname(self): - """Mimic the database's automatic constraint names""" - return "%s_pkey" % self.table.name - - -class ForeignKeyConstraint(ConstraintChangeset, schema.ForeignKeyConstraint): - """Construct ForeignKeyConstraint - - Migrate's additional parameters: - - :param columns: Columns in constraint - :param refcolumns: Columns that this FK reffers to in another table. - :param table: If columns are passed as strings, this kw is required - :type table: Table instance - :type columns: list of strings or Column instances - :type refcolumns: list of strings or Column instances - """ - - __migrate_visit_name__ = 'migrate_foreign_key_constraint' - - def __init__(self, columns, refcolumns, *args, **kwargs): - colnames, table = self._normalize_columns(columns) - table = kwargs.pop('table', table) - refcolnames, reftable = self._normalize_columns(refcolumns, - table_name=True) - super(ForeignKeyConstraint, self).__init__(colnames, refcolnames, *args, - **kwargs) - if table is not None: - self._set_parent(table) - - @property - def referenced(self): - return [e.column for e in self.elements] - - @property - def reftable(self): - return self.referenced[0].table - - def autoname(self): - """Mimic the database's automatic constraint names""" - if hasattr(self.columns, 'keys'): - # SA <= 0.5 - firstcol = self.columns[self.columns.keys()[0]] - ret = "%(table)s_%(firstcolumn)s_fkey" % dict( - table=firstcol.table.name, - firstcolumn=firstcol.name,) - else: - # SA >= 0.6 - ret = "%(table)s_%(firstcolumn)s_fkey" % dict( - table=self.table.name, - firstcolumn=self.columns[0],) - return ret - - -class CheckConstraint(ConstraintChangeset, schema.CheckConstraint): - """Construct CheckConstraint - - Migrate's additional parameters: - - :param sqltext: Plain SQL text to check condition - :param columns: If not name is applied, you must supply this kw\ - to autoname constraint - :param table: If columns are passed as strings, this kw is required - :type table: Table instance - :type columns: list of Columns instances - :type sqltext: string - """ - - __migrate_visit_name__ = 'migrate_check_constraint' - - def __init__(self, sqltext, *args, **kwargs): - cols = kwargs.pop('columns', []) - if not cols and not kwargs.get('name', False): - raise InvalidConstraintError('You must either set "name"' - 'parameter or "columns" to autogenarate it.') - colnames, table = self._normalize_columns(cols) - table = kwargs.pop('table', table) - schema.CheckConstraint.__init__(self, sqltext, *args, **kwargs) - if table is not None: - self._set_parent(table) - self.colnames = colnames - - def autoname(self): - return "%(table)s_%(cols)s_check" % \ - dict(table=self.table.name, cols="_".join(self.colnames)) - - -class UniqueConstraint(ConstraintChangeset, schema.UniqueConstraint): - """Construct UniqueConstraint - - Migrate's additional parameters: - - :param cols: Columns in constraint. - :param table: If columns are passed as strings, this kw is required - :type table: Table instance - :type cols: strings or Column instances - - .. versionadded:: 0.6.0 - """ - - __migrate_visit_name__ = 'migrate_unique_constraint' - - def __init__(self, *cols, **kwargs): - self.colnames, table = self._normalize_columns(cols) - table = kwargs.pop('table', table) - super(UniqueConstraint, self).__init__(*self.colnames, **kwargs) - if table is not None: - self._set_parent(table) - - def autoname(self): - """Mimic the database's automatic constraint names""" - return "%s_%s_key" % (self.table.name, self.colnames[0]) diff --git a/migrate/changeset/databases/__init__.py b/migrate/changeset/databases/__init__.py deleted file mode 100644 index 075a787..0000000 --- a/migrate/changeset/databases/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -""" - This module contains database dialect specific changeset - implementations. -""" -__all__ = [ - 'postgres', - 'sqlite', - 'mysql', - 'oracle', - 'ibmdb2', -] diff --git a/migrate/changeset/databases/firebird.py b/migrate/changeset/databases/firebird.py deleted file mode 100644 index 0f16b0a..0000000 --- a/migrate/changeset/databases/firebird.py +++ /dev/null @@ -1,93 +0,0 @@ -""" - Firebird database specific implementations of changeset classes. -""" -from sqlalchemy.databases import firebird as sa_base -from sqlalchemy.schema import PrimaryKeyConstraint -from migrate import exceptions -from migrate.changeset import ansisql - - -FBSchemaGenerator = sa_base.FBDDLCompiler - -class FBColumnGenerator(FBSchemaGenerator, ansisql.ANSIColumnGenerator): - """Firebird column generator implementation.""" - - -class FBColumnDropper(ansisql.ANSIColumnDropper): - """Firebird column dropper implementation.""" - - def visit_column(self, column): - """Firebird supports 'DROP col' instead of 'DROP COLUMN col' syntax - - Drop primary key and unique constraints if dropped column is referencing it.""" - if column.primary_key: - if column.table.primary_key.columns.contains_column(column): - column.table.primary_key.drop() - # TODO: recreate primary key if it references more than this column - - for index in column.table.indexes: - # "column in index.columns" causes problems as all - # column objects compare equal and return a SQL expression - if column.name in [col.name for col in index.columns]: - index.drop() - # TODO: recreate index if it references more than this column - - for cons in column.table.constraints: - if isinstance(cons,PrimaryKeyConstraint): - # will be deleted only when the column its on - # is deleted! - continue - - should_drop = column.name in cons.columns - if should_drop: - self.start_alter_table(column) - self.append("DROP CONSTRAINT ") - self.append(self.preparer.format_constraint(cons)) - self.execute() - # TODO: recreate unique constraint if it refenrences more than this column - - self.start_alter_table(column) - self.append('DROP %s' % self.preparer.format_column(column)) - self.execute() - - -class FBSchemaChanger(ansisql.ANSISchemaChanger): - """Firebird schema changer implementation.""" - - def visit_table(self, table): - """Rename table not supported""" - raise exceptions.NotSupportedError( - "Firebird does not support renaming tables.") - - def _visit_column_name(self, table, column, delta): - self.start_alter_table(table) - col_name = self.preparer.quote(delta.current_name) - new_name = self.preparer.format_column(delta.result_column) - self.append('ALTER COLUMN %s TO %s' % (col_name, new_name)) - - def _visit_column_nullable(self, table, column, delta): - """Changing NULL is not supported""" - # TODO: http://www.firebirdfaq.org/faq103/ - raise exceptions.NotSupportedError( - "Firebird does not support altering NULL bevahior.") - - -class FBConstraintGenerator(ansisql.ANSIConstraintGenerator): - """Firebird constraint generator implementation.""" - - -class FBConstraintDropper(ansisql.ANSIConstraintDropper): - """Firebird constaint dropper implementation.""" - - def cascade_constraint(self, constraint): - """Cascading constraints is not supported""" - raise exceptions.NotSupportedError( - "Firebird does not support cascading constraints") - - -class FBDialect(ansisql.ANSIDialect): - columngenerator = FBColumnGenerator - columndropper = FBColumnDropper - schemachanger = FBSchemaChanger - constraintgenerator = FBConstraintGenerator - constraintdropper = FBConstraintDropper diff --git a/migrate/changeset/databases/ibmdb2.py b/migrate/changeset/databases/ibmdb2.py deleted file mode 100644 index a12d73b..0000000 --- a/migrate/changeset/databases/ibmdb2.py +++ /dev/null @@ -1,337 +0,0 @@ -""" - DB2 database specific implementations of changeset classes. -""" - -import logging - -from ibm_db_sa import base -from sqlalchemy.schema import (AddConstraint, - CreateIndex, - DropConstraint) -from sqlalchemy.schema import (Index, - PrimaryKeyConstraint, - UniqueConstraint) - -from migrate.changeset import ansisql -from migrate.changeset import constraint -from migrate.changeset import util -from migrate import exceptions - - -LOG = logging.getLogger(__name__) - -IBMDBSchemaGenerator = base.IBM_DBDDLCompiler - - -def get_server_version_info(dialect): - """Returns the DB2 server major and minor version as a list of ints.""" - return [int(ver_token) for ver_token in dialect.dbms_ver.split('.')[0:2]] - - -def is_unique_constraint_with_null_columns_supported(dialect): - """Checks to see if the DB2 version is at least 10.5. - - This is needed for checking if unique constraints with null columns - are supported. - """ - return get_server_version_info(dialect) >= [10, 5] - - -class IBMDBColumnGenerator(IBMDBSchemaGenerator, - ansisql.ANSIColumnGenerator): - def visit_column(self, column): - nullable = True - if not column.nullable: - nullable = False - column.nullable = True - - table = self.start_alter_table(column) - self.append("ADD COLUMN ") - self.append(self.get_column_specification(column)) - - for cons in column.constraints: - self.traverse_single(cons) - if column.default is not None: - self.traverse_single(column.default) - self.execute() - - #ALTER TABLE STATEMENTS - if not nullable: - self.start_alter_table(column) - self.append("ALTER COLUMN %s SET NOT NULL" % - self.preparer.format_column(column)) - self.execute() - self.append("CALL SYSPROC.ADMIN_CMD('REORG TABLE %s')" % - self.preparer.format_table(table)) - self.execute() - - # add indexes and unique constraints - if column.index_name: - Index(column.index_name, column).create() - elif column.unique_name: - constraint.UniqueConstraint(column, - name=column.unique_name).create() - - # SA bounds FK constraints to table, add manually - for fk in column.foreign_keys: - self.add_foreignkey(fk.constraint) - - # add primary key constraint if needed - if column.primary_key_name: - pk = constraint.PrimaryKeyConstraint( - column, name=column.primary_key_name) - pk.create() - - self.append("COMMIT") - self.execute() - self.append("CALL SYSPROC.ADMIN_CMD('REORG TABLE %s')" % - self.preparer.format_table(table)) - self.execute() - - -class IBMDBColumnDropper(ansisql.ANSIColumnDropper): - def visit_column(self, column): - """Drop a column from its table. - - :param column: the column object - :type column: :class:`sqlalchemy.Column` - """ - #table = self.start_alter_table(column) - super(IBMDBColumnDropper, self).visit_column(column) - self.append("CALL SYSPROC.ADMIN_CMD('REORG TABLE %s')" % - self.preparer.format_table(column.table)) - self.execute() - - -class IBMDBSchemaChanger(IBMDBSchemaGenerator, ansisql.ANSISchemaChanger): - def visit_table(self, table): - """Rename a table; #38. Other ops aren't supported.""" - - self._rename_table(table) - self.append("TO %s" % self.preparer.quote(table.new_name)) - self.execute() - self.append("COMMIT") - self.execute() - - def _rename_table(self, table): - self.append("RENAME TABLE %s " % self.preparer.format_table(table)) - - def visit_index(self, index): - if hasattr(self, '_index_identifier'): - # SA >= 0.6.5, < 0.8 - old_name = self.preparer.quote( - self._index_identifier(index.name)) - new_name = self.preparer.quote( - self._index_identifier(index.new_name)) - else: - # SA >= 0.8 - class NewName(object): - """Map obj.name -> obj.new_name""" - def __init__(self, index): - self.name = index.new_name - self._obj = index - - def __getattr__(self, attr): - if attr == 'name': - return getattr(self, attr) - return getattr(self._obj, attr) - - old_name = self._prepared_index_name(index) - new_name = self._prepared_index_name(NewName(index)) - - self.append("RENAME INDEX %s TO %s" % (old_name, new_name)) - self.execute() - self.append("COMMIT") - self.execute() - - def _run_subvisit(self, delta, func, start_alter=True): - """Runs visit method based on what needs to be changed on column""" - table = delta.table - if start_alter: - self.start_alter_table(table) - ret = func(table, - self.preparer.quote(delta.current_name), - delta) - self.execute() - self._reorg_table(self.preparer.format_table(delta.table)) - - def _reorg_table(self, delta): - self.append("CALL SYSPROC.ADMIN_CMD('REORG TABLE %s')" % delta) - self.execute() - - def visit_column(self, delta): - keys = delta.keys() - tr = self.connection.begin() - column = delta.result_column.copy() - - if 'type' in keys: - try: - self._run_subvisit(delta, self._visit_column_change, False) - except Exception as e: - LOG.warn("Unable to change the column type. Error: %s" % e) - - if column.primary_key and 'primary_key' not in keys: - try: - self._run_subvisit(delta, self._visit_primary_key) - except Exception as e: - LOG.warn("Unable to add primary key. Error: %s" % e) - - if 'nullable' in keys: - self._run_subvisit(delta, self._visit_column_nullable) - - if 'server_default' in keys: - self._run_subvisit(delta, self._visit_column_default) - - if 'primary_key' in keys: - self._run_subvisit(delta, self._visit_primary_key) - self._run_subvisit(delta, self._visit_unique_constraint) - - if 'name' in keys: - try: - self._run_subvisit(delta, self._visit_column_name, False) - except Exception as e: - LOG.warn("Unable to change column %(name)s. Error: %(error)s" % - {'name': delta.current_name, 'error': e}) - - self._reorg_table(self.preparer.format_table(delta.table)) - self.append("COMMIT") - self.execute() - tr.commit() - - def _visit_unique_constraint(self, table, col_name, delta): - # Add primary key to the current column - self.append("ADD CONSTRAINT %s " % col_name) - self.append("UNIQUE (%s)" % col_name) - - def _visit_primary_key(self, table, col_name, delta): - # Add primary key to the current column - self.append("ADD PRIMARY KEY (%s)" % col_name) - - def _visit_column_name(self, table, col_name, delta): - column = delta.result_column.copy() - - # Delete the primary key before renaming the column - if column.primary_key: - try: - self.start_alter_table(table) - self.append("DROP PRIMARY KEY") - self.execute() - except Exception: - LOG.debug("Continue since Primary key does not exist.") - - self.start_alter_table(table) - new_name = self.preparer.format_column(delta.result_column) - self.append("RENAME COLUMN %s TO %s" % (col_name, new_name)) - - if column.primary_key: - # execute the rename before adding primary key back - self.execute() - self.start_alter_table(table) - self.append("ADD PRIMARY KEY (%s)" % new_name) - - def _visit_column_nullable(self, table, col_name, delta): - self.append("ALTER COLUMN %s " % col_name) - nullable = delta['nullable'] - if nullable: - self.append("DROP NOT NULL") - else: - self.append("SET NOT NULL") - - def _visit_column_default(self, table, col_name, delta): - default_text = self.get_column_default_string(delta.result_column) - self.append("ALTER COLUMN %s " % col_name) - if default_text is None: - self.append("DROP DEFAULT") - else: - self.append("SET WITH DEFAULT %s" % default_text) - - def _visit_column_change(self, table, col_name, delta): - column = delta.result_column.copy() - - # Delete the primary key before - if column.primary_key: - try: - self.start_alter_table(table) - self.append("DROP PRIMARY KEY") - self.execute() - except Exception: - LOG.debug("Continue since Primary key does not exist.") - # Delete the identity before - try: - self.start_alter_table(table) - self.append("ALTER COLUMN %s DROP IDENTITY" % col_name) - self.execute() - except Exception: - LOG.debug("Continue since identity does not exist.") - - column.default = None - if not column.table: - column.table = delta.table - self.start_alter_table(table) - self.append("ALTER COLUMN %s " % col_name) - self.append("SET DATA TYPE ") - type_text = self.dialect.type_compiler.process( - delta.result_column.type) - self.append(type_text) - - -class IBMDBConstraintGenerator(ansisql.ANSIConstraintGenerator): - def _visit_constraint(self, constraint): - constraint.name = self.get_constraint_name(constraint) - if (isinstance(constraint, UniqueConstraint) and - is_unique_constraint_with_null_columns_supported( - self.dialect)): - for column in constraint: - if column.nullable: - constraint.exclude_nulls = True - break - if getattr(constraint, 'exclude_nulls', None): - index = Index(constraint.name, - *(column for column in constraint), - unique=True) - sql = self.process(CreateIndex(index)) - sql += ' EXCLUDE NULL KEYS' - else: - sql = self.process(AddConstraint(constraint)) - self.append(sql) - self.execute() - - -class IBMDBConstraintDropper(ansisql.ANSIConstraintDropper, - ansisql.ANSIConstraintCommon): - def _visit_constraint(self, constraint): - constraint.name = self.get_constraint_name(constraint) - if (isinstance(constraint, UniqueConstraint) and - is_unique_constraint_with_null_columns_supported( - self.dialect)): - for column in constraint: - if column.nullable: - constraint.exclude_nulls = True - break - if getattr(constraint, 'exclude_nulls', None): - if hasattr(self, '_index_identifier'): - # SA >= 0.6.5, < 0.8 - index_name = self.preparer.quote( - self._index_identifier(constraint.name)) - else: - # SA >= 0.8 - index_name = self._prepared_index_name(constraint) - sql = 'DROP INDEX %s ' % index_name - else: - sql = self.process(DropConstraint(constraint, - cascade=constraint.cascade)) - self.append(sql) - self.execute() - - def visit_migrate_primary_key_constraint(self, constraint): - self.start_alter_table(constraint.table) - self.append("DROP PRIMARY KEY") - self.execute() - - -class IBMDBDialect(ansisql.ANSIDialect): - columngenerator = IBMDBColumnGenerator - columndropper = IBMDBColumnDropper - schemachanger = IBMDBSchemaChanger - constraintgenerator = IBMDBConstraintGenerator - constraintdropper = IBMDBConstraintDropper diff --git a/migrate/changeset/databases/mysql.py b/migrate/changeset/databases/mysql.py deleted file mode 100644 index 1c01706..0000000 --- a/migrate/changeset/databases/mysql.py +++ /dev/null @@ -1,68 +0,0 @@ -""" - MySQL database specific implementations of changeset classes. -""" - -import sqlalchemy -from sqlalchemy.databases import mysql as sa_base -from sqlalchemy import types as sqltypes - -from migrate import exceptions -from migrate.changeset import ansisql -from migrate.changeset import util - - - -MySQLSchemaGenerator = sa_base.MySQLDDLCompiler - -class MySQLColumnGenerator(MySQLSchemaGenerator, ansisql.ANSIColumnGenerator): - pass - - -class MySQLColumnDropper(ansisql.ANSIColumnDropper): - pass - - -class MySQLSchemaChanger(MySQLSchemaGenerator, ansisql.ANSISchemaChanger): - - def visit_column(self, delta): - table = delta.table - colspec = self.get_column_specification(delta.result_column) - if delta.result_column.autoincrement: - primary_keys = [c for c in table.primary_key.columns - if (c.autoincrement and - isinstance(c.type, sqltypes.Integer) and - not c.foreign_keys)] - - if primary_keys: - first = primary_keys.pop(0) - if first.name == delta.current_name: - colspec += " AUTO_INCREMENT" - old_col_name = self.preparer.quote(delta.current_name) - - self.start_alter_table(table) - - self.append("CHANGE COLUMN %s " % old_col_name) - self.append(colspec) - self.execute() - - def visit_index(self, param): - # If MySQL can do this, I can't find how - raise exceptions.NotSupportedError("MySQL cannot rename indexes") - - -class MySQLConstraintGenerator(ansisql.ANSIConstraintGenerator): - pass - - -class MySQLConstraintDropper(MySQLSchemaGenerator, ansisql.ANSIConstraintDropper): - def visit_migrate_check_constraint(self, *p, **k): - raise exceptions.NotSupportedError("MySQL does not support CHECK" - " constraints, use triggers instead.") - - -class MySQLDialect(ansisql.ANSIDialect): - columngenerator = MySQLColumnGenerator - columndropper = MySQLColumnDropper - schemachanger = MySQLSchemaChanger - constraintgenerator = MySQLConstraintGenerator - constraintdropper = MySQLConstraintDropper diff --git a/migrate/changeset/databases/oracle.py b/migrate/changeset/databases/oracle.py deleted file mode 100644 index 2f16b5b..0000000 --- a/migrate/changeset/databases/oracle.py +++ /dev/null @@ -1,108 +0,0 @@ -""" - Oracle database specific implementations of changeset classes. -""" -import sqlalchemy as sa -from sqlalchemy.databases import oracle as sa_base - -from migrate import exceptions -from migrate.changeset import ansisql - - -OracleSchemaGenerator = sa_base.OracleDDLCompiler - - -class OracleColumnGenerator(OracleSchemaGenerator, ansisql.ANSIColumnGenerator): - pass - - -class OracleColumnDropper(ansisql.ANSIColumnDropper): - pass - - -class OracleSchemaChanger(OracleSchemaGenerator, ansisql.ANSISchemaChanger): - - def get_column_specification(self, column, **kwargs): - # Ignore the NOT NULL generated - override_nullable = kwargs.pop('override_nullable', None) - if override_nullable: - orig = column.nullable - column.nullable = True - ret = super(OracleSchemaChanger, self).get_column_specification( - column, **kwargs) - if override_nullable: - column.nullable = orig - return ret - - def visit_column(self, delta): - keys = delta.keys() - - if 'name' in keys: - self._run_subvisit(delta, - self._visit_column_name, - start_alter=False) - - if len(set(('type', 'nullable', 'server_default')).intersection(keys)): - self._run_subvisit(delta, - self._visit_column_change, - start_alter=False) - - def _visit_column_change(self, table, column, delta): - # Oracle cannot drop a default once created, but it can set it - # to null. We'll do that if default=None - # http://forums.oracle.com/forums/message.jspa?messageID=1273234#1273234 - dropdefault_hack = (column.server_default is None \ - and 'server_default' in delta.keys()) - # Oracle apparently doesn't like it when we say "not null" if - # the column's already not null. Fudge it, so we don't need a - # new function - notnull_hack = ((not column.nullable) \ - and ('nullable' not in delta.keys())) - # We need to specify NULL if we're removing a NOT NULL - # constraint - null_hack = (column.nullable and ('nullable' in delta.keys())) - - if dropdefault_hack: - column.server_default = sa.PassiveDefault(sa.sql.null()) - if notnull_hack: - column.nullable = True - colspec = self.get_column_specification(column, - override_nullable=null_hack) - if null_hack: - colspec += ' NULL' - if notnull_hack: - column.nullable = False - if dropdefault_hack: - column.server_default = None - - self.start_alter_table(table) - self.append("MODIFY (") - self.append(colspec) - self.append(")") - - -class OracleConstraintCommon(object): - - def get_constraint_name(self, cons): - # Oracle constraints can't guess their name like other DBs - if not cons.name: - raise exceptions.NotSupportedError( - "Oracle constraint names must be explicitly stated") - return cons.name - - -class OracleConstraintGenerator(OracleConstraintCommon, - ansisql.ANSIConstraintGenerator): - pass - - -class OracleConstraintDropper(OracleConstraintCommon, - ansisql.ANSIConstraintDropper): - pass - - -class OracleDialect(ansisql.ANSIDialect): - columngenerator = OracleColumnGenerator - columndropper = OracleColumnDropper - schemachanger = OracleSchemaChanger - constraintgenerator = OracleConstraintGenerator - constraintdropper = OracleConstraintDropper diff --git a/migrate/changeset/databases/postgres.py b/migrate/changeset/databases/postgres.py deleted file mode 100644 index 10ea094..0000000 --- a/migrate/changeset/databases/postgres.py +++ /dev/null @@ -1,42 +0,0 @@ -""" - `PostgreSQL`_ database specific implementations of changeset classes. - - .. _`PostgreSQL`: http://www.postgresql.org/ -""" -from migrate.changeset import ansisql - -from sqlalchemy.databases import postgresql as sa_base -PGSchemaGenerator = sa_base.PGDDLCompiler - - -class PGColumnGenerator(PGSchemaGenerator, ansisql.ANSIColumnGenerator): - """PostgreSQL column generator implementation.""" - pass - - -class PGColumnDropper(ansisql.ANSIColumnDropper): - """PostgreSQL column dropper implementation.""" - pass - - -class PGSchemaChanger(ansisql.ANSISchemaChanger): - """PostgreSQL schema changer implementation.""" - pass - - -class PGConstraintGenerator(ansisql.ANSIConstraintGenerator): - """PostgreSQL constraint generator implementation.""" - pass - - -class PGConstraintDropper(ansisql.ANSIConstraintDropper): - """PostgreSQL constaint dropper implementation.""" - pass - - -class PGDialect(ansisql.ANSIDialect): - columngenerator = PGColumnGenerator - columndropper = PGColumnDropper - schemachanger = PGSchemaChanger - constraintgenerator = PGConstraintGenerator - constraintdropper = PGConstraintDropper diff --git a/migrate/changeset/databases/sqlite.py b/migrate/changeset/databases/sqlite.py deleted file mode 100644 index 908c800..0000000 --- a/migrate/changeset/databases/sqlite.py +++ /dev/null @@ -1,229 +0,0 @@ -""" - `SQLite`_ database specific implementations of changeset classes. - - .. _`SQLite`: http://www.sqlite.org/ -""" -try: # Python 3 - from collections.abc import MutableMapping as DictMixin -except ImportError: # Python 2 - from UserDict import DictMixin -from copy import copy -import re - -from sqlalchemy.databases import sqlite as sa_base -from sqlalchemy.schema import ForeignKeyConstraint -from sqlalchemy.schema import UniqueConstraint - -from migrate import exceptions -from migrate.changeset import ansisql - - -SQLiteSchemaGenerator = sa_base.SQLiteDDLCompiler - - -class SQLiteCommon(object): - - def _not_supported(self, op): - raise exceptions.NotSupportedError("SQLite does not support " - "%s; see http://www.sqlite.org/lang_altertable.html" % op) - - -class SQLiteHelper(SQLiteCommon): - - def _filter_columns(self, cols, table): - """Splits the string of columns and returns those only in the table. - - :param cols: comma-delimited string of table columns - :param table: the table to check - :return: list of columns in the table - """ - columns = [] - for c in cols.split(","): - if c in table.columns: - # There was a bug in reflection of SQLite columns with - # reserved identifiers as names (SQLite can return them - # wrapped with double quotes), so strip double quotes. - columns.extend(c.strip(' "')) - return columns - - def _get_constraints(self, table): - """Retrieve information about existing constraints of the table - - This feature is needed for recreate_table() to work properly. - """ - - data = table.metadata.bind.execute( - """SELECT sql - FROM sqlite_master - WHERE - type='table' AND - name=:table_name""", - table_name=table.name - ).fetchone()[0] - - UNIQUE_PATTERN = "CONSTRAINT (\w+) UNIQUE \(([^\)]+)\)" - constraints = [] - for name, cols in re.findall(UNIQUE_PATTERN, data): - # Filter out any columns that were dropped from the table. - columns = self._filter_columns(cols, table) - if columns: - constraints.extend(UniqueConstraint(*columns, name=name)) - - FKEY_PATTERN = "CONSTRAINT (\w+) FOREIGN KEY \(([^\)]+)\)" - for name, cols in re.findall(FKEY_PATTERN, data): - # Filter out any columns that were dropped from the table. - columns = self._filter_columns(cols, table) - if columns: - constraints.extend(ForeignKeyConstraint(*columns, name=name)) - - return constraints - - def recreate_table(self, table, column=None, delta=None, - omit_constraints=None): - table_name = self.preparer.format_table(table) - - # we remove all indexes so as not to have - # problems during copy and re-create - for index in table.indexes: - index.drop() - - # reflect existing constraints - for constraint in self._get_constraints(table): - table.append_constraint(constraint) - # omit given constraints when creating a new table if required - table.constraints = set([ - cons for cons in table.constraints - if omit_constraints is None or cons.name not in omit_constraints - ]) - - # Use "PRAGMA legacy_alter_table = ON" with sqlite >= 3.26 when - # using "ALTER TABLE RENAME TO migration_tmp" to maintain legacy - # behavior. See: https://www.sqlite.org/src/info/ae9638e9c0ad0c36 - if self.connection.engine.dialect.server_version_info >= (3, 26): - self.append('PRAGMA legacy_alter_table = ON') - self.execute() - self.append('ALTER TABLE %s RENAME TO migration_tmp' % table_name) - self.execute() - if self.connection.engine.dialect.server_version_info >= (3, 26): - self.append('PRAGMA legacy_alter_table = OFF') - self.execute() - - insertion_string = self._modify_table(table, column, delta) - - table.create(bind=self.connection) - self.append(insertion_string % {'table_name': table_name}) - self.execute() - self.append('DROP TABLE migration_tmp') - self.execute() - - def visit_column(self, delta): - if isinstance(delta, DictMixin): - column = delta.result_column - table = self._to_table(delta.table) - else: - column = delta - table = self._to_table(column.table) - self.recreate_table(table,column,delta) - -class SQLiteColumnGenerator(SQLiteSchemaGenerator, - ansisql.ANSIColumnGenerator, - # at the end so we get the normal - # visit_column by default - SQLiteHelper, - SQLiteCommon - ): - """SQLite ColumnGenerator""" - - def _modify_table(self, table, column, delta): - columns = ' ,'.join(map( - self.preparer.format_column, - [c for c in table.columns if c.name!=column.name])) - return ('INSERT INTO %%(table_name)s (%(cols)s) ' - 'SELECT %(cols)s from migration_tmp')%{'cols':columns} - - def visit_column(self,column): - if column.foreign_keys: - SQLiteHelper.visit_column(self,column) - else: - super(SQLiteColumnGenerator,self).visit_column(column) - -class SQLiteColumnDropper(SQLiteHelper, ansisql.ANSIColumnDropper): - """SQLite ColumnDropper""" - - def _modify_table(self, table, column, delta): - - columns = ' ,'.join(map(self.preparer.format_column, table.columns)) - return 'INSERT INTO %(table_name)s SELECT ' + columns + \ - ' from migration_tmp' - - def visit_column(self,column): - # For SQLite, we *have* to remove the column here so the table - # is re-created properly. - column.remove_from_table(column.table,unset_table=False) - super(SQLiteColumnDropper,self).visit_column(column) - - -class SQLiteSchemaChanger(SQLiteHelper, ansisql.ANSISchemaChanger): - """SQLite SchemaChanger""" - - def _modify_table(self, table, column, delta): - return 'INSERT INTO %(table_name)s SELECT * from migration_tmp' - - def visit_index(self, index): - """Does not support ALTER INDEX""" - self._not_supported('ALTER INDEX') - - -class SQLiteConstraintGenerator(ansisql.ANSIConstraintGenerator, SQLiteHelper, SQLiteCommon): - - def visit_migrate_primary_key_constraint(self, constraint): - tmpl = "CREATE UNIQUE INDEX %s ON %s ( %s )" - cols = ', '.join(map(self.preparer.format_column, constraint.columns)) - tname = self.preparer.format_table(constraint.table) - name = self.get_constraint_name(constraint) - msg = tmpl % (name, tname, cols) - self.append(msg) - self.execute() - - def _modify_table(self, table, column, delta): - return 'INSERT INTO %(table_name)s SELECT * from migration_tmp' - - def visit_migrate_foreign_key_constraint(self, *p, **k): - self.recreate_table(p[0].table) - - def visit_migrate_unique_constraint(self, *p, **k): - self.recreate_table(p[0].table) - - -class SQLiteConstraintDropper(ansisql.ANSIColumnDropper, - SQLiteHelper, - ansisql.ANSIConstraintCommon): - - def _modify_table(self, table, column, delta): - return 'INSERT INTO %(table_name)s SELECT * from migration_tmp' - - def visit_migrate_primary_key_constraint(self, constraint): - tmpl = "DROP INDEX %s " - name = self.get_constraint_name(constraint) - msg = tmpl % (name) - self.append(msg) - self.execute() - - def visit_migrate_foreign_key_constraint(self, *p, **k): - self.recreate_table(p[0].table, omit_constraints=[p[0].name]) - - def visit_migrate_check_constraint(self, *p, **k): - self._not_supported('ALTER TABLE DROP CONSTRAINT') - - def visit_migrate_unique_constraint(self, *p, **k): - self.recreate_table(p[0].table, omit_constraints=[p[0].name]) - - -# TODO: technically primary key is a NOT NULL + UNIQUE constraint, should add NOT NULL to index - -class SQLiteDialect(ansisql.ANSIDialect): - columngenerator = SQLiteColumnGenerator - columndropper = SQLiteColumnDropper - schemachanger = SQLiteSchemaChanger - constraintgenerator = SQLiteConstraintGenerator - constraintdropper = SQLiteConstraintDropper diff --git a/migrate/changeset/databases/visitor.py b/migrate/changeset/databases/visitor.py deleted file mode 100644 index c70aa6b..0000000 --- a/migrate/changeset/databases/visitor.py +++ /dev/null @@ -1,88 +0,0 @@ -""" - Module for visitor class mapping. -""" -import sqlalchemy as sa - -from migrate.changeset import ansisql -from migrate.changeset.databases import (sqlite, - postgres, - mysql, - oracle, - firebird) - - -# Map SA dialects to the corresponding Migrate extensions -DIALECTS = { - "default": ansisql.ANSIDialect, - "sqlite": sqlite.SQLiteDialect, - "postgres": postgres.PGDialect, - "postgresql": postgres.PGDialect, - "mysql": mysql.MySQLDialect, - "oracle": oracle.OracleDialect, - "firebird": firebird.FBDialect, -} - - -# NOTE(mriedem): We have to conditionally check for DB2 in case ibm_db_sa -# isn't available since ibm_db_sa is not packaged in sqlalchemy like the -# other dialects. -try: - from migrate.changeset.databases import ibmdb2 - DIALECTS["ibm_db_sa"] = ibmdb2.IBMDBDialect -except ImportError: - pass - - -def get_engine_visitor(engine, name): - """ - Get the visitor implementation for the given database engine. - - :param engine: SQLAlchemy Engine - :param name: Name of the visitor - :type name: string - :type engine: Engine - :returns: visitor - """ - # TODO: link to supported visitors - return get_dialect_visitor(engine.dialect, name) - - -def get_dialect_visitor(sa_dialect, name): - """ - Get the visitor implementation for the given dialect. - - Finds the visitor implementation based on the dialect class and - returns and instance initialized with the given name. - - Binds dialect specific preparer to visitor. - """ - - # map sa dialect to migrate dialect and return visitor - sa_dialect_name = getattr(sa_dialect, 'name', 'default') - migrate_dialect_cls = DIALECTS[sa_dialect_name] - visitor = getattr(migrate_dialect_cls, name) - - # bind preparer - visitor.preparer = sa_dialect.preparer(sa_dialect) - - return visitor - -def run_single_visitor(engine, visitorcallable, element, - connection=None, **kwargs): - """Taken from :meth:`sqlalchemy.engine.base.Engine._run_single_visitor` - with support for migrate visitors. - """ - if connection is None: - conn = engine.contextual_connect(close_with_result=False) - else: - conn = connection - visitor = visitorcallable(engine.dialect, conn) - try: - if hasattr(element, '__migrate_visit_name__'): - fn = getattr(visitor, 'visit_' + element.__migrate_visit_name__) - else: - fn = getattr(visitor, 'visit_' + element.__visit_name__) - fn(element, **kwargs) - finally: - if connection is None: - conn.close() diff --git a/migrate/changeset/schema.py b/migrate/changeset/schema.py deleted file mode 100644 index a33be4b..0000000 --- a/migrate/changeset/schema.py +++ /dev/null @@ -1,705 +0,0 @@ -""" - Schema module providing common schema operations. -""" -import abc -try: # Python 3 - from collections.abc import MutableMapping as DictMixin -except ImportError: # Python 2 - from UserDict import DictMixin -import warnings - -import six -import sqlalchemy - -from sqlalchemy.schema import ForeignKeyConstraint -from sqlalchemy.schema import UniqueConstraint - -from migrate.exceptions import * -from migrate.changeset import SQLA_07, SQLA_08 -from migrate.changeset import util -from migrate.changeset.databases.visitor import (get_engine_visitor, - run_single_visitor) - - -__all__ = [ - 'create_column', - 'drop_column', - 'alter_column', - 'rename_table', - 'rename_index', - 'ChangesetTable', - 'ChangesetColumn', - 'ChangesetIndex', - 'ChangesetDefaultClause', - 'ColumnDelta', -] - -def create_column(column, table=None, *p, **kw): - """Create a column, given the table. - - API to :meth:`ChangesetColumn.create`. - """ - if table is not None: - return table.create_column(column, *p, **kw) - return column.create(*p, **kw) - - -def drop_column(column, table=None, *p, **kw): - """Drop a column, given the table. - - API to :meth:`ChangesetColumn.drop`. - """ - if table is not None: - return table.drop_column(column, *p, **kw) - return column.drop(*p, **kw) - - -def rename_table(table, name, engine=None, **kw): - """Rename a table. - - If Table instance is given, engine is not used. - - API to :meth:`ChangesetTable.rename`. - - :param table: Table to be renamed. - :param name: New name for Table. - :param engine: Engine instance. - :type table: string or Table instance - :type name: string - :type engine: obj - """ - table = _to_table(table, engine) - table.rename(name, **kw) - - -def rename_index(index, name, table=None, engine=None, **kw): - """Rename an index. - - If Index instance is given, - table and engine are not used. - - API to :meth:`ChangesetIndex.rename`. - - :param index: Index to be renamed. - :param name: New name for index. - :param table: Table to which Index is reffered. - :param engine: Engine instance. - :type index: string or Index instance - :type name: string - :type table: string or Table instance - :type engine: obj - """ - index = _to_index(index, table, engine) - index.rename(name, **kw) - - -def alter_column(*p, **k): - """Alter a column. - - This is a helper function that creates a :class:`ColumnDelta` and - runs it. - - :argument column: - The name of the column to be altered or a - :class:`ChangesetColumn` column representing it. - - :param table: - A :class:`~sqlalchemy.schema.Table` or table name to - for the table where the column will be changed. - - :param engine: - The :class:`~sqlalchemy.engine.base.Engine` to use for table - reflection and schema alterations. - - :returns: A :class:`ColumnDelta` instance representing the change. - - - """ - - if 'table' not in k and isinstance(p[0], sqlalchemy.Column): - k['table'] = p[0].table - if 'engine' not in k: - k['engine'] = k['table'].bind - - # deprecation - if len(p) >= 2 and isinstance(p[1], sqlalchemy.Column): - warnings.warn( - "Passing a Column object to alter_column is deprecated." - " Just pass in keyword parameters instead.", - MigrateDeprecationWarning - ) - engine = k['engine'] - - # enough tests seem to break when metadata is always altered - # that this crutch has to be left in until they can be sorted - # out - k['alter_metadata']=True - - delta = ColumnDelta(*p, **k) - - visitorcallable = get_engine_visitor(engine, 'schemachanger') - engine._run_visitor(visitorcallable, delta) - - return delta - - -def _to_table(table, engine=None): - """Return if instance of Table, else construct new with metadata""" - if isinstance(table, sqlalchemy.Table): - return table - - # Given: table name, maybe an engine - meta = sqlalchemy.MetaData() - if engine is not None: - meta.bind = engine - return sqlalchemy.Table(table, meta) - - -def _to_index(index, table=None, engine=None): - """Return if instance of Index, else construct new with metadata""" - if isinstance(index, sqlalchemy.Index): - return index - - # Given: index name; table name required - table = _to_table(table, engine) - ret = sqlalchemy.Index(index) - ret.table = table - return ret - - - -# Python3: if we just use: -# -# class ColumnDelta(DictMixin, sqlalchemy.schema.SchemaItem): -# ... -# -# We get the following error: -# TypeError: metaclass conflict: the metaclass of a derived class must be a -# (non-strict) subclass of the metaclasses of all its bases. -# -# The complete inheritance/metaclass relationship list of ColumnDelta can be -# summarized by this following dot file: -# -# digraph test123 { -# ColumnDelta -> MutableMapping; -# MutableMapping -> Mapping; -# Mapping -> {Sized Iterable Container}; -# {Sized Iterable Container} -> ABCMeta[style=dashed]; -# -# ColumnDelta -> SchemaItem; -# SchemaItem -> {SchemaEventTarget Visitable}; -# SchemaEventTarget -> object; -# Visitable -> {VisitableType object} [style=dashed]; -# VisitableType -> type; -# } -# -# We need to use a metaclass that inherits from all the metaclasses of -# DictMixin and sqlalchemy.schema.SchemaItem. Let's call it "MyMeta". -class MyMeta(sqlalchemy.sql.visitors.VisitableType, abc.ABCMeta, object): - pass - - -class ColumnDelta(six.with_metaclass(MyMeta, DictMixin, sqlalchemy.schema.SchemaItem)): - """Extracts the differences between two columns/column-parameters - - May receive parameters arranged in several different ways: - - * **current_column, new_column, \*p, \*\*kw** - Additional parameters can be specified to override column - differences. - - * **current_column, \*p, \*\*kw** - Additional parameters alter current_column. Table name is extracted - from current_column object. - Name is changed to current_column.name from current_name, - if current_name is specified. - - * **current_col_name, \*p, \*\*kw** - Table kw must specified. - - :param table: Table at which current Column should be bound to.\ - If table name is given, reflection will be used. - :type table: string or Table instance - - :param metadata: A :class:`MetaData` instance to store - reflected table names - - :param engine: When reflecting tables, either engine or metadata must \ - be specified to acquire engine object. - :type engine: :class:`Engine` instance - :returns: :class:`ColumnDelta` instance provides interface for altered attributes to \ - `result_column` through :func:`dict` alike object. - - * :class:`ColumnDelta`.result_column is altered column with new attributes - - * :class:`ColumnDelta`.current_name is current name of column in db - - - """ - - # Column attributes that can be altered - diff_keys = ('name', 'type', 'primary_key', 'nullable', - 'server_onupdate', 'server_default', 'autoincrement') - diffs = dict() - __visit_name__ = 'column' - - def __init__(self, *p, **kw): - # 'alter_metadata' is not a public api. It exists purely - # as a crutch until the tests that fail when 'alter_metadata' - # behaviour always happens can be sorted out - self.alter_metadata = kw.pop("alter_metadata", False) - - self.meta = kw.pop("metadata", None) - self.engine = kw.pop("engine", None) - - # Things are initialized differently depending on how many column - # parameters are given. Figure out how many and call the appropriate - # method. - if len(p) >= 1 and isinstance(p[0], sqlalchemy.Column): - # At least one column specified - if len(p) >= 2 and isinstance(p[1], sqlalchemy.Column): - # Two columns specified - diffs = self.compare_2_columns(*p, **kw) - else: - # Exactly one column specified - diffs = self.compare_1_column(*p, **kw) - else: - # Zero columns specified - if not len(p) or not isinstance(p[0], six.string_types): - raise ValueError("First argument must be column name") - diffs = self.compare_parameters(*p, **kw) - - self.apply_diffs(diffs) - - def __repr__(self): - return '<ColumnDelta altermetadata=%r, %s>' % ( - self.alter_metadata, - super(ColumnDelta, self).__repr__() - ) - - def __getitem__(self, key): - if key not in self.keys(): - raise KeyError("No such diff key, available: %s" % self.diffs ) - return getattr(self.result_column, key) - - def __setitem__(self, key, value): - if key not in self.keys(): - raise KeyError("No such diff key, available: %s" % self.diffs ) - setattr(self.result_column, key, value) - - def __delitem__(self, key): - raise NotImplementedError - - def __len__(self): - raise NotImplementedError - - def __iter__(self): - raise NotImplementedError - - def keys(self): - return self.diffs.keys() - - def compare_parameters(self, current_name, *p, **k): - """Compares Column objects with reflection""" - self.table = k.pop('table') - self.result_column = self._table.c.get(current_name) - if len(p): - k = self._extract_parameters(p, k, self.result_column) - return k - - def compare_1_column(self, col, *p, **k): - """Compares one Column object""" - self.table = k.pop('table', None) - if self.table is None: - self.table = col.table - self.result_column = col - if len(p): - k = self._extract_parameters(p, k, self.result_column) - return k - - def compare_2_columns(self, old_col, new_col, *p, **k): - """Compares two Column objects""" - self.process_column(new_col) - self.table = k.pop('table', None) - # we cannot use bool() on table in SA06 - if self.table is None: - self.table = old_col.table - if self.table is None: - new_col.table - self.result_column = old_col - - # set differences - # leave out some stuff for later comp - for key in (set(self.diff_keys) - set(('type',))): - val = getattr(new_col, key, None) - if getattr(self.result_column, key, None) != val: - k.setdefault(key, val) - - # inspect types - if not self.are_column_types_eq(self.result_column.type, new_col.type): - k.setdefault('type', new_col.type) - - if len(p): - k = self._extract_parameters(p, k, self.result_column) - return k - - def apply_diffs(self, diffs): - """Populate dict and column object with new values""" - self.diffs = diffs - for key in self.diff_keys: - if key in diffs: - setattr(self.result_column, key, diffs[key]) - - self.process_column(self.result_column) - - # create an instance of class type if not yet - if 'type' in diffs: - if callable(self.result_column.type): - self.result_column.type = self.result_column.type() - if self.result_column.autoincrement and \ - not issubclass( - self.result_column.type._type_affinity, - sqlalchemy.Integer): - self.result_column.autoincrement = False - - # add column to the table - if self.table is not None and self.alter_metadata: - self.result_column.add_to_table(self.table) - - def are_column_types_eq(self, old_type, new_type): - """Compares two types to be equal""" - ret = old_type.__class__ == new_type.__class__ - - # String length is a special case - if ret and isinstance(new_type, sqlalchemy.types.String): - ret = (getattr(old_type, 'length', None) == \ - getattr(new_type, 'length', None)) - return ret - - def _extract_parameters(self, p, k, column): - """Extracts data from p and modifies diffs""" - p = list(p) - while len(p): - if isinstance(p[0], six.string_types): - k.setdefault('name', p.pop(0)) - elif isinstance(p[0], sqlalchemy.types.TypeEngine): - k.setdefault('type', p.pop(0)) - elif callable(p[0]): - p[0] = p[0]() - else: - break - - if len(p): - new_col = column.copy_fixed() - new_col._init_items(*p) - k = self.compare_2_columns(column, new_col, **k) - return k - - def process_column(self, column): - """Processes default values for column""" - # XXX: this is a snippet from SA processing of positional parameters - toinit = list() - - if column.server_default is not None: - if isinstance(column.server_default, sqlalchemy.FetchedValue): - toinit.append(column.server_default) - else: - toinit.append(sqlalchemy.DefaultClause(column.server_default)) - if column.server_onupdate is not None: - if isinstance(column.server_onupdate, FetchedValue): - toinit.append(column.server_default) - else: - toinit.append(sqlalchemy.DefaultClause(column.server_onupdate, - for_update=True)) - if toinit: - column._init_items(*toinit) - - def _get_table(self): - return getattr(self, '_table', None) - - def _set_table(self, table): - if isinstance(table, six.string_types): - if self.alter_metadata: - if not self.meta: - raise ValueError("metadata must be specified for table" - " reflection when using alter_metadata") - meta = self.meta - if self.engine: - meta.bind = self.engine - else: - if not self.engine and not self.meta: - raise ValueError("engine or metadata must be specified" - " to reflect tables") - if not self.engine: - self.engine = self.meta.bind - meta = sqlalchemy.MetaData(bind=self.engine) - self._table = sqlalchemy.Table(table, meta, autoload=True) - elif isinstance(table, sqlalchemy.Table): - self._table = table - if not self.alter_metadata: - self._table.meta = sqlalchemy.MetaData(bind=self._table.bind) - def _get_result_column(self): - return getattr(self, '_result_column', None) - - def _set_result_column(self, column): - """Set Column to Table based on alter_metadata evaluation.""" - self.process_column(column) - if not hasattr(self, 'current_name'): - self.current_name = column.name - if self.alter_metadata: - self._result_column = column - else: - self._result_column = column.copy_fixed() - - table = property(_get_table, _set_table) - result_column = property(_get_result_column, _set_result_column) - - -class ChangesetTable(object): - """Changeset extensions to SQLAlchemy tables.""" - - def create_column(self, column, *p, **kw): - """Creates a column. - - The column parameter may be a column definition or the name of - a column in this table. - - API to :meth:`ChangesetColumn.create` - - :param column: Column to be created - :type column: Column instance or string - """ - if not isinstance(column, sqlalchemy.Column): - # It's a column name - column = getattr(self.c, str(column)) - column.create(table=self, *p, **kw) - - def drop_column(self, column, *p, **kw): - """Drop a column, given its name or definition. - - API to :meth:`ChangesetColumn.drop` - - :param column: Column to be droped - :type column: Column instance or string - """ - if not isinstance(column, sqlalchemy.Column): - # It's a column name - try: - column = getattr(self.c, str(column)) - except AttributeError: - # That column isn't part of the table. We don't need - # its entire definition to drop the column, just its - # name, so create a dummy column with the same name. - column = sqlalchemy.Column(str(column), sqlalchemy.Integer()) - column.drop(table=self, *p, **kw) - - def rename(self, name, connection=None, **kwargs): - """Rename this table. - - :param name: New name of the table. - :type name: string - :param connection: reuse connection istead of creating new one. - :type connection: :class:`sqlalchemy.engine.base.Connection` instance - """ - engine = self.bind - self.new_name = name - visitorcallable = get_engine_visitor(engine, 'schemachanger') - run_single_visitor(engine, visitorcallable, self, connection, **kwargs) - - # Fix metadata registration - self.name = name - self.deregister() - self._set_parent(self.metadata) - - def _meta_key(self): - """Get the meta key for this table.""" - return sqlalchemy.schema._get_table_key(self.name, self.schema) - - def deregister(self): - """Remove this table from its metadata""" - if SQLA_07: - self.metadata._remove_table(self.name, self.schema) - else: - key = self._meta_key() - meta = self.metadata - if key in meta.tables: - del meta.tables[key] - - -class ChangesetColumn(object): - """Changeset extensions to SQLAlchemy columns.""" - - def alter(self, *p, **k): - """Makes a call to :func:`alter_column` for the column this - method is called on. - """ - if 'table' not in k: - k['table'] = self.table - if 'engine' not in k: - k['engine'] = k['table'].bind - return alter_column(self, *p, **k) - - def create(self, table=None, index_name=None, unique_name=None, - primary_key_name=None, populate_default=True, connection=None, **kwargs): - """Create this column in the database. - - Assumes the given table exists. ``ALTER TABLE ADD COLUMN``, - for most databases. - - :param table: Table instance to create on. - :param index_name: Creates :class:`ChangesetIndex` on this column. - :param unique_name: Creates :class:\ -`~migrate.changeset.constraint.UniqueConstraint` on this column. - :param primary_key_name: Creates :class:\ -`~migrate.changeset.constraint.PrimaryKeyConstraint` on this column. - :param populate_default: If True, created column will be \ -populated with defaults - :param connection: reuse connection istead of creating new one. - :type table: Table instance - :type index_name: string - :type unique_name: string - :type primary_key_name: string - :type populate_default: bool - :type connection: :class:`sqlalchemy.engine.base.Connection` instance - - :returns: self - """ - self.populate_default = populate_default - self.index_name = index_name - self.unique_name = unique_name - self.primary_key_name = primary_key_name - for cons in ('index_name', 'unique_name', 'primary_key_name'): - self._check_sanity_constraints(cons) - - self.add_to_table(table) - engine = self.table.bind - visitorcallable = get_engine_visitor(engine, 'columngenerator') - engine._run_visitor(visitorcallable, self, connection, **kwargs) - - # TODO: reuse existing connection - if self.populate_default and self.default is not None: - stmt = table.update().values({self: engine._execute_default(self.default)}) - engine.execute(stmt) - - return self - - def drop(self, table=None, connection=None, **kwargs): - """Drop this column from the database, leaving its table intact. - - ``ALTER TABLE DROP COLUMN``, for most databases. - - :param connection: reuse connection istead of creating new one. - :type connection: :class:`sqlalchemy.engine.base.Connection` instance - """ - if table is not None: - self.table = table - engine = self.table.bind - visitorcallable = get_engine_visitor(engine, 'columndropper') - engine._run_visitor(visitorcallable, self, connection, **kwargs) - self.remove_from_table(self.table, unset_table=False) - self.table = None - return self - - def add_to_table(self, table): - if table is not None and self.table is None: - if SQLA_07: - table.append_column(self) - else: - self._set_parent(table) - - def _col_name_in_constraint(self,cons,name): - return False - - def remove_from_table(self, table, unset_table=True): - # TODO: remove primary keys, constraints, etc - if unset_table: - self.table = None - - to_drop = set() - for index in table.indexes: - columns = [] - for col in index.columns: - if col.name!=self.name: - columns.append(col) - if columns: - index.columns = columns - if SQLA_08: - index.expressions = columns - else: - to_drop.add(index) - table.indexes = table.indexes - to_drop - - to_drop = set() - for cons in table.constraints: - # TODO: deal with other types of constraint - if isinstance(cons,(ForeignKeyConstraint, - UniqueConstraint)): - for col_name in cons.columns: - if not isinstance(col_name,six.string_types): - col_name = col_name.name - if self.name==col_name: - to_drop.add(cons) - table.constraints = table.constraints - to_drop - - if table.c.contains_column(self): - if SQLA_07: - table._columns.remove(self) - else: - table.c.remove(self) - - # TODO: this is fixed in 0.6 - def copy_fixed(self, **kw): - """Create a copy of this ``Column``, with all attributes.""" - return sqlalchemy.Column(self.name, self.type, self.default, - key=self.key, - primary_key=self.primary_key, - nullable=self.nullable, - index=self.index, - unique=self.unique, - onupdate=self.onupdate, - autoincrement=self.autoincrement, - server_default=self.server_default, - server_onupdate=self.server_onupdate, - *[c.copy(**kw) for c in self.constraints]) - - def _check_sanity_constraints(self, name): - """Check if constraints names are correct""" - obj = getattr(self, name) - if (getattr(self, name[:-5]) and not obj): - raise InvalidConstraintError("Column.create() accepts index_name," - " primary_key_name and unique_name to generate constraints") - if not isinstance(obj, six.string_types) and obj is not None: - raise InvalidConstraintError( - "%s argument for column must be constraint name" % name) - - -class ChangesetIndex(object): - """Changeset extensions to SQLAlchemy Indexes.""" - - __visit_name__ = 'index' - - def rename(self, name, connection=None, **kwargs): - """Change the name of an index. - - :param name: New name of the Index. - :type name: string - :param connection: reuse connection istead of creating new one. - :type connection: :class:`sqlalchemy.engine.base.Connection` instance - """ - engine = self.table.bind - self.new_name = name - visitorcallable = get_engine_visitor(engine, 'schemachanger') - engine._run_visitor(visitorcallable, self, connection, **kwargs) - self.name = name - - -class ChangesetDefaultClause(object): - """Implements comparison between :class:`DefaultClause` instances""" - - def __eq__(self, other): - if isinstance(other, self.__class__): - if self.arg == other.arg: - return True - - def __ne__(self, other): - return not self.__eq__(other) diff --git a/migrate/changeset/util.py b/migrate/changeset/util.py deleted file mode 100644 index 68b7609..0000000 --- a/migrate/changeset/util.py +++ /dev/null @@ -1,10 +0,0 @@ -from migrate.changeset import SQLA_10 - - -def fk_column_names(constraint): - if SQLA_10: - return [ - constraint.columns[key].name for key in constraint.column_keys] - else: - return [ - element.parent.name for element in constraint.elements] |