diff options
author | Mike Bayer <mike_mp@zzzcomputing.com> | 2009-08-06 21:11:27 +0000 |
---|---|---|
committer | Mike Bayer <mike_mp@zzzcomputing.com> | 2009-08-06 21:11:27 +0000 |
commit | 8fc5005dfe3eb66a46470ad8a8c7b95fc4d6bdca (patch) | |
tree | ae9e27d12c9fbf8297bb90469509e1cb6a206242 /lib/sqlalchemy/engine/ddl.py | |
parent | 7638aa7f242c6ea3d743aa9100e32be2052546a6 (diff) | |
download | sqlalchemy-8fc5005dfe3eb66a46470ad8a8c7b95fc4d6bdca.tar.gz |
merge 0.6 series to trunk.
Diffstat (limited to 'lib/sqlalchemy/engine/ddl.py')
-rw-r--r-- | lib/sqlalchemy/engine/ddl.py | 128 |
1 files changed, 128 insertions, 0 deletions
diff --git a/lib/sqlalchemy/engine/ddl.py b/lib/sqlalchemy/engine/ddl.py new file mode 100644 index 000000000..6e7253e9a --- /dev/null +++ b/lib/sqlalchemy/engine/ddl.py @@ -0,0 +1,128 @@ +# engine/ddl.py +# Copyright (C) 2009 Michael Bayer mike_mp@zzzcomputing.com +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Routines to handle CREATE/DROP workflow.""" + +from sqlalchemy import engine, schema +from sqlalchemy.sql import util as sql_util + + +class DDLBase(schema.SchemaVisitor): + def __init__(self, connection): + self.connection = connection + +class SchemaGenerator(DDLBase): + def __init__(self, dialect, connection, checkfirst=False, tables=None, **kwargs): + super(SchemaGenerator, self).__init__(connection, **kwargs) + self.checkfirst = checkfirst + self.tables = tables and set(tables) or None + self.preparer = dialect.identifier_preparer + self.dialect = dialect + + def _can_create(self, table): + self.dialect.validate_identifier(table.name) + if table.schema: + self.dialect.validate_identifier(table.schema) + return not self.checkfirst or not self.dialect.has_table(self.connection, table.name, schema=table.schema) + + def visit_metadata(self, metadata): + if self.tables: + tables = self.tables + else: + tables = metadata.tables.values() + collection = [t for t in sql_util.sort_tables(tables) if self._can_create(t)] + + for listener in metadata.ddl_listeners['before-create']: + listener('before-create', metadata, self.connection, tables=collection) + + for table in collection: + self.traverse_single(table) + + for listener in metadata.ddl_listeners['after-create']: + listener('after-create', metadata, self.connection, tables=collection) + + def visit_table(self, table): + for listener in table.ddl_listeners['before-create']: + listener('before-create', table, self.connection) + + for column in table.columns: + if column.default is not None: + self.traverse_single(column.default) + + self.connection.execute(schema.CreateTable(table)) + + if hasattr(table, 'indexes'): + for index in table.indexes: + self.traverse_single(index) + + for listener in table.ddl_listeners['after-create']: + listener('after-create', table, self.connection) + + def visit_sequence(self, sequence): + if self.dialect.supports_sequences: + if ((not self.dialect.sequences_optional or + not sequence.optional) and + (not self.checkfirst or + not self.dialect.has_sequence(self.connection, sequence.name))): + self.connection.execute(schema.CreateSequence(sequence)) + + def visit_index(self, index): + self.connection.execute(schema.CreateIndex(index)) + + +class SchemaDropper(DDLBase): + def __init__(self, dialect, connection, checkfirst=False, tables=None, **kwargs): + super(SchemaDropper, self).__init__(connection, **kwargs) + self.checkfirst = checkfirst + self.tables = tables + self.preparer = dialect.identifier_preparer + self.dialect = dialect + + def visit_metadata(self, metadata): + if self.tables: + tables = self.tables + else: + tables = metadata.tables.values() + collection = [t for t in reversed(sql_util.sort_tables(tables)) if self._can_drop(t)] + + for listener in metadata.ddl_listeners['before-drop']: + listener('before-drop', metadata, self.connection, tables=collection) + + for table in collection: + self.traverse_single(table) + + for listener in metadata.ddl_listeners['after-drop']: + listener('after-drop', metadata, self.connection, tables=collection) + + def _can_drop(self, table): + self.dialect.validate_identifier(table.name) + if table.schema: + self.dialect.validate_identifier(table.schema) + return not self.checkfirst or self.dialect.has_table(self.connection, table.name, schema=table.schema) + + def visit_index(self, index): + self.connection.execute(schema.DropIndex(index)) + + def visit_table(self, table): + for listener in table.ddl_listeners['before-drop']: + listener('before-drop', table, self.connection) + + for column in table.columns: + if column.default is not None: + self.traverse_single(column.default) + + self.connection.execute(schema.DropTable(table)) + + for listener in table.ddl_listeners['after-drop']: + listener('after-drop', table, self.connection) + + def visit_sequence(self, sequence): + if self.dialect.supports_sequences: + if ((not self.dialect.sequences_optional or + not sequence.optional) and + (not self.checkfirst or + self.dialect.has_sequence(self.connection, sequence.name))): + self.connection.execute(schema.DropSequence(sequence)) |