summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2015-07-03 13:10:41 -0400
committerMike Bayer <mike_mp@zzzcomputing.com>2015-07-03 13:10:41 -0400
commit0e43247da4cfd2d829ee4b350e336364cb8a7ec1 (patch)
treeb98e97333832e8db5f3c983f42771a2c15936ccc
parent5ccc81701c08a482efa4b7080ca88608fd237130 (diff)
downloadalembic-0e43247da4cfd2d829ee4b350e336364cb8a7ec1.tar.gz
- squash merge of ticket_302 branch
- The internal system for Alembic operations has been reworked to now build upon an extensible system of operation objects. New operations can be added to the ``op.`` namespace, including that they are available in custom autogenerate schemes. fixes #302 - The internal system for autogenerate been reworked to build upon the extensible system of operation objects present in #302. A new customization hook process_revision_directives is added to allow manipulation of the autogen stream. Fixes #301
-rw-r--r--.gitignore1
-rw-r--r--alembic/__init__.py8
-rw-r--r--alembic/autogenerate/__init__.py9
-rw-r--r--alembic/autogenerate/api.py327
-rw-r--r--alembic/autogenerate/compare.py54
-rw-r--r--alembic/autogenerate/compose.py144
-rw-r--r--alembic/autogenerate/generate.py92
-rw-r--r--alembic/autogenerate/render.py493
-rw-r--r--alembic/command.py47
-rw-r--r--alembic/config.py11
-rw-r--r--alembic/context.py5
-rw-r--r--alembic/ddl/base.py68
-rw-r--r--alembic/ddl/impl.py75
-rw-r--r--alembic/ddl/mssql.py6
-rw-r--r--alembic/ddl/mysql.py9
-rw-r--r--alembic/ddl/postgresql.py2
-rw-r--r--alembic/op.py6
-rw-r--r--alembic/operations/__init__.py6
-rw-r--r--alembic/operations/base.py442
-rw-r--r--alembic/operations/batch.py (renamed from alembic/batch.py)4
-rw-r--r--alembic/operations/ops.py (renamed from alembic/operations.py)1962
-rw-r--r--alembic/operations/schemaobj.py157
-rw-r--r--alembic/operations/toimpl.py162
-rw-r--r--alembic/runtime/__init__.py0
-rw-r--r--alembic/runtime/environment.py (renamed from alembic/environment.py)53
-rw-r--r--alembic/runtime/migration.py (renamed from alembic/migration.py)4
-rw-r--r--alembic/script/__init__.py3
-rw-r--r--alembic/script/base.py (renamed from alembic/script.py)6
-rw-r--r--alembic/script/revision.py (renamed from alembic/revision.py)5
-rw-r--r--alembic/testing/assertions.py4
-rw-r--r--alembic/testing/env.py6
-rw-r--r--alembic/testing/exclusions.py3
-rw-r--r--alembic/testing/fixtures.py13
-rw-r--r--alembic/testing/mock.py2
-rw-r--r--alembic/testing/provision.py6
-rw-r--r--alembic/util.py405
-rw-r--r--alembic/util/__init__.py20
-rw-r--r--alembic/util/compat.py (renamed from alembic/compat.py)0
-rw-r--r--alembic/util/langhelpers.py275
-rw-r--r--alembic/util/messaging.py94
-rw-r--r--alembic/util/pyfiles.py80
-rw-r--r--alembic/util/sqla_compat.py160
-rw-r--r--docs/build/api.rst217
-rw-r--r--docs/build/api/api_overview.pngbin0 -> 123965 bytes
-rw-r--r--docs/build/api/autogenerate.rst235
-rw-r--r--docs/build/api/commands.rst38
-rw-r--r--docs/build/api/config.rst26
-rw-r--r--docs/build/api/ddl.rst56
-rw-r--r--docs/build/api/environment.rst19
-rw-r--r--docs/build/api/index.rst33
-rw-r--r--docs/build/api/migration.rst8
-rw-r--r--docs/build/api/operations.rst123
-rw-r--r--docs/build/api/overview.rst47
-rw-r--r--docs/build/api/script.rst20
-rw-r--r--docs/build/api_overview.pngbin64697 -> 0 bytes
-rw-r--r--docs/build/assets/api_overview.graffle2252
-rw-r--r--docs/build/changelog.rst38
-rw-r--r--docs/build/cookbook.rst2
-rw-r--r--docs/build/front.rst19
-rw-r--r--docs/build/index.rst4
-rw-r--r--docs/build/ops.rst18
-rw-r--r--tests/_autogen_fixtures.py251
-rw-r--r--tests/test_autogen_composition.py328
-rw-r--r--tests/test_autogen_diffs.py (renamed from tests/test_autogenerate.py)669
-rw-r--r--tests/test_autogen_fks.py4
-rw-r--r--tests/test_autogen_indexes.py2
-rw-r--r--tests/test_autogen_render.py277
-rw-r--r--tests/test_batch.py24
-rw-r--r--tests/test_config.py5
-rw-r--r--tests/test_op.py39
-rw-r--r--tests/test_revision.py2
-rw-r--r--tests/test_script_consumption.py3
-rw-r--r--tests/test_script_production.py177
73 files changed, 6865 insertions, 3300 deletions
diff --git a/.gitignore b/.gitignore
index 0875618..5a97f5e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,3 +10,4 @@ alembic.ini
.coverage
coverage.xml
.tox
+*.patch
diff --git a/alembic/__init__.py b/alembic/__init__.py
index f429441..345bf26 100644
--- a/alembic/__init__.py
+++ b/alembic/__init__.py
@@ -1,9 +1,15 @@
from os import path
-__version__ = '0.7.7'
+__version__ = '0.8.0'
package_dir = path.abspath(path.dirname(__file__))
from . import op # noqa
from . import context # noqa
+
+import sys
+from .runtime import environment
+from .runtime import migration
+sys.modules['alembic.migration'] = migration
+sys.modules['alembic.environment'] = environment
diff --git a/alembic/autogenerate/__init__.py b/alembic/autogenerate/__init__.py
index 2d75912..4272a7e 100644
--- a/alembic/autogenerate/__init__.py
+++ b/alembic/autogenerate/__init__.py
@@ -1,2 +1,7 @@
-from .api import compare_metadata, _produce_migration_diffs, \
- _produce_net_changes
+from .api import ( # noqa
+ compare_metadata, _render_migration_diffs,
+ produce_migrations, render_python_code
+ )
+from .compare import _produce_net_changes # noqa
+from .generate import RevisionContext # noqa
+from .render import render_op_text, renderers # noqa \ No newline at end of file
diff --git a/alembic/autogenerate/api.py b/alembic/autogenerate/api.py
index 6281a6c..cff977b 100644
--- a/alembic/autogenerate/api.py
+++ b/alembic/autogenerate/api.py
@@ -1,26 +1,12 @@
"""Provide the 'autogenerate' feature which can produce migration operations
automatically."""
-import logging
-import itertools
-import re
-
-from ..compat import StringIO
-
-from mako.pygen import PythonPrinter
-from sqlalchemy.engine.reflection import Inspector
-from sqlalchemy.util import OrderedSet
-from .compare import _compare_tables
-from .render import _drop_table, _drop_column, _drop_index, _drop_constraint, \
- _add_table, _add_column, _add_index, _add_constraint, _modify_col, \
- _add_fk_constraint
+from ..operations import ops
+from . import render
+from . import compare
+from . import compose
from .. import util
-log = logging.getLogger(__name__)
-
-###################################################
-# public
-
def compare_metadata(context, metadata):
"""Compare a database schema to that given in a
@@ -105,9 +91,14 @@ def compare_metadata(context, metadata):
:param metadata: a :class:`~sqlalchemy.schema.MetaData`
instance.
+ .. seealso::
+
+ :func:`.produce_migrations` - produces a :class:`.MigrationScript`
+ structure based on metadata comparison.
+
"""
- autogen_context, connection = _autogen_context(context, None)
+ autogen_context = _autogen_context(context, metadata=metadata)
# as_sql=True is nonsensical here. autogenerate requires a connection
# it can use to run queries against to get the database schema.
@@ -118,76 +109,107 @@ def compare_metadata(context, metadata):
diffs = []
- object_filters = _get_object_filters(context.opts)
- include_schemas = context.opts.get('include_schemas', False)
-
- _produce_net_changes(connection, metadata, diffs, autogen_context,
- object_filters, include_schemas)
+ compare._produce_net_changes(autogen_context, diffs)
return diffs
-###################################################
-# top level
+def produce_migrations(context, metadata):
+ """Produce a :class:`.MigrationScript` structure based on schema
+ comparison.
-def _produce_migration_diffs(context, template_args,
- imports, include_symbol=None,
- include_object=None,
- include_schemas=False):
- opts = context.opts
- metadata = opts['target_metadata']
- include_schemas = opts.get('include_schemas', include_schemas)
+ This function does essentially what :func:`.compare_metadata` does,
+ but then runs the resulting list of diffs to produce the full
+ :class:`.MigrationScript` object. For an example of what this looks like,
+ see the example in :ref:`customizing_revision`.
- object_filters = _get_object_filters(opts, include_symbol, include_object)
+ .. versionadded:: 0.8.0
- if metadata is None:
- raise util.CommandError(
- "Can't proceed with --autogenerate option; environment "
- "script %s does not provide "
- "a MetaData object to the context." % (
- context.script.env_py_location
- ))
- autogen_context, connection = _autogen_context(context, imports)
+ .. seealso::
+
+ :func:`.compare_metadata` - returns more fundamental "diff"
+ data from comparing a schema.
+
+ """
+ autogen_context = _autogen_context(context, metadata=metadata)
diffs = []
- _produce_net_changes(connection, metadata, diffs,
- autogen_context, object_filters, include_schemas)
- template_args[opts['upgrade_token']] = _indent(_render_cmd_body(
- _produce_upgrade_commands, diffs, autogen_context))
- template_args[opts['downgrade_token']] = _indent(_render_cmd_body(
- _produce_downgrade_commands, diffs, autogen_context))
- template_args['imports'] = "\n".join(sorted(imports))
+ compare._produce_net_changes(autogen_context, diffs)
+
+ migration_script = ops.MigrationScript(
+ rev_id=None,
+ upgrade_ops=ops.UpgradeOps([]),
+ downgrade_ops=ops.DowngradeOps([]),
+ )
+
+ compose._to_migration_script(autogen_context, migration_script, diffs)
+
+ return migration_script
+
+
+def render_python_code(
+ up_or_down_op,
+ sqlalchemy_module_prefix='sa.',
+ alembic_module_prefix='op.',
+ imports=(),
+ render_item=None,
+):
+ """Render Python code given an :class:`.UpgradeOps` or
+ :class:`.DowngradeOps` object.
+
+ This is a convenience function that can be used to test the
+ autogenerate output of a user-defined :class:`.MigrationScript` structure.
+
+ """
+ autogen_context = {
+ 'opts': {
+ 'sqlalchemy_module_prefix': sqlalchemy_module_prefix,
+ 'alembic_module_prefix': alembic_module_prefix,
+ 'render_item': render_item,
+ },
+ 'imports': set(imports)
+ }
+ return render._indent(render._render_cmd_body(
+ up_or_down_op, autogen_context))
-def _indent(text):
- text = re.compile(r'^', re.M).sub(" ", text).strip()
- text = re.compile(r' +$', re.M).sub("", text)
- return text
-def _render_cmd_body(fn, diffs, autogen_context):
+def _render_migration_diffs(context, template_args, imports):
+ """legacy, used by test_autogen_composition at the moment"""
- buf = StringIO()
- printer = PythonPrinter(buf)
+ migration_script = produce_migrations(context, None)
+
+ autogen_context = _autogen_context(context, imports=imports)
+ diffs = []
- printer.writeline(
- "### commands auto generated by Alembic - "
- "please adjust! ###"
+ compare._produce_net_changes(autogen_context, diffs)
+
+ migration_script = ops.MigrationScript(
+ rev_id=None,
+ imports=imports,
+ upgrade_ops=ops.UpgradeOps([]),
+ downgrade_ops=ops.DowngradeOps([]),
)
- for line in fn(diffs, autogen_context):
- printer.writeline(line)
+ compose._to_migration_script(autogen_context, migration_script, diffs)
- printer.writeline("### end Alembic commands ###")
+ render._render_migration_script(
+ autogen_context, migration_script, template_args
+ )
- return buf.getvalue()
+def _autogen_context(
+ context, imports=None, metadata=None, include_symbol=None,
+ include_object=None, include_schemas=False):
-def _get_object_filters(
- context_opts, include_symbol=None, include_object=None):
- include_symbol = context_opts.get('include_symbol', include_symbol)
- include_object = context_opts.get('include_object', include_object)
+ opts = context.opts
+ metadata = opts['target_metadata'] if metadata is None else metadata
+ include_schemas = opts.get('include_schemas', include_schemas)
+
+ include_symbol = opts.get('include_symbol', include_symbol)
+ include_object = opts.get('include_object', include_object)
object_filters = []
if include_symbol:
@@ -200,171 +222,24 @@ def _get_object_filters(
if include_object:
object_filters.append(include_object)
- return object_filters
-
+ if metadata is None:
+ raise util.CommandError(
+ "Can't proceed with --autogenerate option; environment "
+ "script %s does not provide "
+ "a MetaData object to the context." % (
+ context.script.env_py_location
+ ))
-def _autogen_context(context, imports):
opts = context.opts
connection = context.bind
return {
- 'imports': imports,
+ 'imports': imports if imports is not None else set(),
'connection': connection,
'dialect': connection.dialect,
'context': context,
- 'opts': opts
- }, connection
-
-
-###################################################
-# walk structures
-
-
-def _produce_net_changes(connection, metadata, diffs, autogen_context,
- object_filters=(),
- include_schemas=False):
- inspector = Inspector.from_engine(connection)
- conn_table_names = set()
-
- default_schema = connection.dialect.default_schema_name
- if include_schemas:
- schemas = set(inspector.get_schema_names())
- # replace default schema name with None
- schemas.discard("information_schema")
- # replace the "default" schema with None
- schemas.add(None)
- schemas.discard(default_schema)
- else:
- schemas = [None]
-
- version_table_schema = autogen_context['context'].version_table_schema
- version_table = autogen_context['context'].version_table
-
- for s in schemas:
- tables = set(inspector.get_table_names(schema=s))
- if s == version_table_schema:
- tables = tables.difference(
- [autogen_context['context'].version_table]
- )
- conn_table_names.update(zip([s] * len(tables), tables))
-
- metadata_table_names = OrderedSet(
- [(table.schema, table.name) for table in metadata.sorted_tables]
- ).difference([(version_table_schema, version_table)])
-
- _compare_tables(conn_table_names, metadata_table_names,
- object_filters,
- inspector, metadata, diffs, autogen_context)
-
-
-def _produce_upgrade_commands(diffs, autogen_context):
- return _produce_commands("upgrade", diffs, autogen_context)
-
-
-def _produce_downgrade_commands(diffs, autogen_context):
- return _produce_commands("downgrade", diffs, autogen_context)
-
-
-def _produce_commands(type_, diffs, autogen_context):
- opts = autogen_context['opts']
- render_as_batch = opts.get('render_as_batch', False)
-
- if diffs:
- if type_ == 'downgrade':
- diffs = reversed(diffs)
- for (schema, table), subdiffs in _group_diffs_by_table(diffs):
- if table is not None and render_as_batch:
- yield "with op.batch_alter_table"\
- "(%r, schema=%r) as batch_op:" % (table, schema)
- autogen_context['batch_prefix'] = 'batch_op.'
- for diff in subdiffs:
- yield _invoke_command(type_, diff, autogen_context)
- if table is not None and render_as_batch:
- del autogen_context['batch_prefix']
- yield ""
- else:
- yield "pass"
-
-
-def _invoke_command(updown, args, autogen_context):
- if isinstance(args, tuple):
- return _invoke_adddrop_command(updown, args, autogen_context)
- else:
- return _invoke_modify_command(updown, args, autogen_context)
-
-
-def _invoke_adddrop_command(updown, args, autogen_context):
- cmd_type = args[0]
- adddrop, cmd_type = cmd_type.split("_")
-
- cmd_args = args[1:] + (autogen_context,)
-
- _commands = {
- "table": (_drop_table, _add_table),
- "column": (_drop_column, _add_column),
- "index": (_drop_index, _add_index),
- "constraint": (_drop_constraint, _add_constraint),
- "fk": (_drop_constraint, _add_fk_constraint)
- }
-
- cmd_callables = _commands[cmd_type]
-
- if (
- updown == "upgrade" and adddrop == "add"
- ) or (
- updown == "downgrade" and adddrop == "remove"
- ):
- return cmd_callables[1](*cmd_args)
- else:
- return cmd_callables[0](*cmd_args)
-
-
-def _invoke_modify_command(updown, args, autogen_context):
- sname, tname, cname = args[0][1:4]
- kw = {}
-
- _arg_struct = {
- "modify_type": ("existing_type", "type_"),
- "modify_nullable": ("existing_nullable", "nullable"),
- "modify_default": ("existing_server_default", "server_default"),
- }
- for diff in args:
- diff_kw = diff[4]
- for arg in ("existing_type",
- "existing_nullable",
- "existing_server_default"):
- if arg in diff_kw:
- kw.setdefault(arg, diff_kw[arg])
- old_kw, new_kw = _arg_struct[diff[0]]
- if updown == "upgrade":
- kw[new_kw] = diff[-1]
- kw[old_kw] = diff[-2]
- else:
- kw[new_kw] = diff[-2]
- kw[old_kw] = diff[-1]
-
- if "nullable" in kw:
- kw.pop("existing_nullable", None)
- if "server_default" in kw:
- kw.pop("existing_server_default", None)
- return _modify_col(tname, cname, autogen_context, schema=sname, **kw)
-
-
-def _group_diffs_by_table(diffs):
- _adddrop = {
- "table": lambda diff: (None, None),
- "column": lambda diff: (diff[0], diff[1]),
- "index": lambda diff: (diff[0].table.schema, diff[0].table.name),
- "constraint": lambda diff: (diff[0].table.schema, diff[0].table.name),
- "fk": lambda diff: (diff[0].parent.schema, diff[0].parent.name)
+ 'opts': opts,
+ 'metadata': metadata,
+ 'object_filters': object_filters,
+ 'include_schemas': include_schemas
}
- def _derive_table(diff):
- if isinstance(diff, tuple):
- cmd_type = diff[0]
- adddrop, cmd_type = cmd_type.split("_")
- return _adddrop[cmd_type](diff[1:])
- else:
- sname, tname = diff[0][1:3]
- return sname, tname
-
- return itertools.groupby(diffs, _derive_table)
diff --git a/alembic/autogenerate/compare.py b/alembic/autogenerate/compare.py
index 2aae962..cd6b696 100644
--- a/alembic/autogenerate/compare.py
+++ b/alembic/autogenerate/compare.py
@@ -1,7 +1,9 @@
from sqlalchemy import schema as sa_schema, types as sqltypes
+from sqlalchemy.engine.reflection import Inspector
from sqlalchemy import event
import logging
-from .. import compat
+from ..util import compat
+from ..util import sqla_compat
from sqlalchemy.util import OrderedSet
import re
from .render import _user_defined_render
@@ -11,6 +13,47 @@ from alembic.ddl.base import _fk_spec
log = logging.getLogger(__name__)
+def _produce_net_changes(autogen_context, diffs):
+
+ metadata = autogen_context['metadata']
+ connection = autogen_context['connection']
+ object_filters = autogen_context.get('object_filters', ())
+ include_schemas = autogen_context.get('include_schemas', False)
+
+ inspector = Inspector.from_engine(connection)
+ conn_table_names = set()
+
+ default_schema = connection.dialect.default_schema_name
+ if include_schemas:
+ schemas = set(inspector.get_schema_names())
+ # replace default schema name with None
+ schemas.discard("information_schema")
+ # replace the "default" schema with None
+ schemas.add(None)
+ schemas.discard(default_schema)
+ else:
+ schemas = [None]
+
+ version_table_schema = autogen_context['context'].version_table_schema
+ version_table = autogen_context['context'].version_table
+
+ for s in schemas:
+ tables = set(inspector.get_table_names(schema=s))
+ if s == version_table_schema:
+ tables = tables.difference(
+ [autogen_context['context'].version_table]
+ )
+ conn_table_names.update(zip([s] * len(tables), tables))
+
+ metadata_table_names = OrderedSet(
+ [(table.schema, table.name) for table in metadata.sorted_tables]
+ ).difference([(version_table_schema, version_table)])
+
+ _compare_tables(conn_table_names, metadata_table_names,
+ object_filters,
+ inspector, metadata, diffs, autogen_context)
+
+
def _run_filters(object_, name, type_, reflected, compare_to, object_filters):
for fn in object_filters:
if not fn(object_, name, type_, reflected, compare_to):
@@ -250,7 +293,7 @@ class _ix_constraint_sig(_constraint_sig):
@property
def column_names(self):
- return _get_index_column_names(self.const)
+ return sqla_compat._get_index_column_names(self.const)
class _fk_constraint_sig(_constraint_sig):
@@ -267,13 +310,6 @@ class _fk_constraint_sig(_constraint_sig):
)
-def _get_index_column_names(idx):
- if compat.sqla_08:
- return [getattr(exp, "name", None) for exp in idx.expressions]
- else:
- return [getattr(col, "name", None) for col in idx.columns]
-
-
def _compare_indexes_and_uniques(schema, tname, object_filters, conn_table,
metadata_table, diffs,
autogen_context, inspector):
diff --git a/alembic/autogenerate/compose.py b/alembic/autogenerate/compose.py
new file mode 100644
index 0000000..b42b505
--- /dev/null
+++ b/alembic/autogenerate/compose.py
@@ -0,0 +1,144 @@
+import itertools
+from ..operations import ops
+
+
+def _to_migration_script(autogen_context, migration_script, diffs):
+ _to_upgrade_op(
+ autogen_context,
+ diffs,
+ migration_script.upgrade_ops,
+ )
+
+ _to_downgrade_op(
+ autogen_context,
+ diffs,
+ migration_script.downgrade_ops,
+ )
+
+
+def _to_upgrade_op(autogen_context, diffs, upgrade_ops):
+ return _to_updown_op(autogen_context, diffs, upgrade_ops, "upgrade")
+
+
+def _to_downgrade_op(autogen_context, diffs, downgrade_ops):
+ return _to_updown_op(autogen_context, diffs, downgrade_ops, "downgrade")
+
+
+def _to_updown_op(autogen_context, diffs, op_container, type_):
+ if not diffs:
+ return
+
+ if type_ == 'downgrade':
+ diffs = reversed(diffs)
+
+ dest = [op_container.ops]
+
+ for (schema, tablename), subdiffs in _group_diffs_by_table(diffs):
+ subdiffs = list(subdiffs)
+ if tablename is not None:
+ table_ops = []
+ op = ops.ModifyTableOps(tablename, table_ops, schema=schema)
+ dest[-1].append(op)
+ dest.append(table_ops)
+ for diff in subdiffs:
+ _produce_command(autogen_context, diff, dest[-1], type_)
+ if tablename is not None:
+ dest.pop(-1)
+
+
+def _produce_command(autogen_context, diff, op_list, updown):
+ if isinstance(diff, tuple):
+ _produce_adddrop_command(updown, diff, op_list, autogen_context)
+ else:
+ _produce_modify_command(updown, diff, op_list, autogen_context)
+
+
+def _produce_adddrop_command(updown, diff, op_list, autogen_context):
+ cmd_type = diff[0]
+ adddrop, cmd_type = cmd_type.split("_")
+
+ cmd_args = diff[1:]
+
+ _commands = {
+ "table": (ops.DropTableOp.from_table, ops.CreateTableOp.from_table),
+ "column": (
+ ops.DropColumnOp.from_column_and_tablename,
+ ops.AddColumnOp.from_column_and_tablename),
+ "index": (ops.DropIndexOp.from_index, ops.CreateIndexOp.from_index),
+ "constraint": (
+ ops.DropConstraintOp.from_constraint,
+ ops.AddConstraintOp.from_constraint),
+ "fk": (
+ ops.DropConstraintOp.from_constraint,
+ ops.CreateForeignKeyOp.from_constraint)
+ }
+
+ cmd_callables = _commands[cmd_type]
+
+ if (
+ updown == "upgrade" and adddrop == "add"
+ ) or (
+ updown == "downgrade" and adddrop == "remove"
+ ):
+ op_list.append(cmd_callables[1](*cmd_args))
+ else:
+ op_list.append(cmd_callables[0](*cmd_args))
+
+
+def _produce_modify_command(updown, diffs, op_list, autogen_context):
+ sname, tname, cname = diffs[0][1:4]
+ kw = {}
+
+ _arg_struct = {
+ "modify_type": ("existing_type", "modify_type"),
+ "modify_nullable": ("existing_nullable", "modify_nullable"),
+ "modify_default": ("existing_server_default", "modify_server_default"),
+ }
+ for diff in diffs:
+ diff_kw = diff[4]
+ for arg in ("existing_type",
+ "existing_nullable",
+ "existing_server_default"):
+ if arg in diff_kw:
+ kw.setdefault(arg, diff_kw[arg])
+ old_kw, new_kw = _arg_struct[diff[0]]
+ if updown == "upgrade":
+ kw[new_kw] = diff[-1]
+ kw[old_kw] = diff[-2]
+ else:
+ kw[new_kw] = diff[-2]
+ kw[old_kw] = diff[-1]
+
+ if "modify_nullable" in kw:
+ kw.pop("existing_nullable", None)
+ if "modify_server_default" in kw:
+ kw.pop("existing_server_default", None)
+
+ op_list.append(
+ ops.AlterColumnOp(
+ tname, cname, schema=sname,
+ **kw
+ )
+ )
+
+
+def _group_diffs_by_table(diffs):
+ _adddrop = {
+ "table": lambda diff: (None, None),
+ "column": lambda diff: (diff[0], diff[1]),
+ "index": lambda diff: (diff[0].table.schema, diff[0].table.name),
+ "constraint": lambda diff: (diff[0].table.schema, diff[0].table.name),
+ "fk": lambda diff: (diff[0].parent.schema, diff[0].parent.name)
+ }
+
+ def _derive_table(diff):
+ if isinstance(diff, tuple):
+ cmd_type = diff[0]
+ adddrop, cmd_type = cmd_type.split("_")
+ return _adddrop[cmd_type](diff[1:])
+ else:
+ sname, tname = diff[0][1:3]
+ return sname, tname
+
+ return itertools.groupby(diffs, _derive_table)
+
diff --git a/alembic/autogenerate/generate.py b/alembic/autogenerate/generate.py
new file mode 100644
index 0000000..c686156
--- /dev/null
+++ b/alembic/autogenerate/generate.py
@@ -0,0 +1,92 @@
+from .. import util
+from . import api
+from . import compose
+from . import compare
+from . import render
+from ..operations import ops
+
+
+class RevisionContext(object):
+ def __init__(self, config, script_directory, command_args):
+ self.config = config
+ self.script_directory = script_directory
+ self.command_args = command_args
+ self.template_args = {
+ 'config': config # Let templates use config for
+ # e.g. multiple databases
+ }
+ self.generated_revisions = [
+ self._default_revision()
+ ]
+
+ def _to_script(self, migration_script):
+ template_args = {}
+ for k, v in self.template_args.items():
+ template_args.setdefault(k, v)
+
+ if migration_script._autogen_context is not None:
+ render._render_migration_script(
+ migration_script._autogen_context, migration_script,
+ template_args
+ )
+
+ return self.script_directory.generate_revision(
+ migration_script.rev_id,
+ migration_script.message,
+ refresh=True,
+ head=migration_script.head,
+ splice=migration_script.splice,
+ branch_labels=migration_script.branch_label,
+ version_path=migration_script.version_path,
+ **template_args)
+
+ def run_autogenerate(self, rev, context):
+ if self.command_args['sql']:
+ raise util.CommandError(
+ "Using --sql with --autogenerate does not make any sense")
+ if set(self.script_directory.get_revisions(rev)) != \
+ set(self.script_directory.get_revisions("heads")):
+ raise util.CommandError("Target database is not up to date.")
+
+ autogen_context = api._autogen_context(context)
+
+ diffs = []
+ compare._produce_net_changes(autogen_context, diffs)
+
+ migration_script = self.generated_revisions[0]
+
+ compose._to_migration_script(autogen_context, migration_script, diffs)
+
+ hook = context.opts.get('process_revision_directives', None)
+ if hook:
+ hook(context, rev, self.generated_revisions)
+
+ for migration_script in self.generated_revisions:
+ migration_script._autogen_context = autogen_context
+
+ def run_no_autogenerate(self, rev, context):
+ hook = context.opts.get('process_revision_directives', None)
+ if hook:
+ hook(context, rev, self.generated_revisions)
+
+ for migration_script in self.generated_revisions:
+ migration_script._autogen_context = None
+
+ def _default_revision(self):
+ op = ops.MigrationScript(
+ rev_id=self.command_args['rev_id'] or util.rev_id(),
+ message=self.command_args['message'],
+ imports=set(),
+ upgrade_ops=ops.UpgradeOps([]),
+ downgrade_ops=ops.DowngradeOps([]),
+ head=self.command_args['head'],
+ splice=self.command_args['splice'],
+ branch_label=self.command_args['branch_label'],
+ version_path=self.command_args['version_path']
+ )
+ op._autogen_context = None
+ return op
+
+ def generate_scripts(self):
+ for generated_revision in self.generated_revisions:
+ yield self._to_script(generated_revision)
diff --git a/alembic/autogenerate/render.py b/alembic/autogenerate/render.py
index 5007652..c3f3df1 100644
--- a/alembic/autogenerate/render.py
+++ b/alembic/autogenerate/render.py
@@ -1,11 +1,12 @@
from sqlalchemy import schema as sa_schema, types as sqltypes, sql
-import logging
-from .. import compat
-from ..ddl.base import _table_for_constraint, _fk_spec
+from ..operations import ops
+from ..util import compat
import re
-from ..compat import string_types
+from ..util.compat import string_types
+from .. import util
+from mako.pygen import PythonPrinter
+from ..util.compat import StringIO
-log = logging.getLogger(__name__)
MAX_PYTHON_ARGS = 255
@@ -22,69 +23,91 @@ except ImportError:
return name
-class _f_name(object):
+def _indent(text):
+ text = re.compile(r'^', re.M).sub(" ", text).strip()
+ text = re.compile(r' +$', re.M).sub("", text)
+ return text
- def __init__(self, prefix, name):
- self.prefix = prefix
- self.name = name
- def __repr__(self):
- return "%sf(%r)" % (self.prefix, _ident(self.name))
+def _render_migration_script(autogen_context, migration_script, template_args):
+ opts = autogen_context['opts']
+ imports = autogen_context['imports']
+ template_args[opts['upgrade_token']] = _indent(_render_cmd_body(
+ migration_script.upgrade_ops, autogen_context))
+ template_args[opts['downgrade_token']] = _indent(_render_cmd_body(
+ migration_script.downgrade_ops, autogen_context))
+ template_args['imports'] = "\n".join(sorted(imports))
-def _ident(name):
- """produce a __repr__() object for a string identifier that may
- use quoted_name() in SQLAlchemy 0.9 and greater.
+default_renderers = renderers = util.Dispatcher()
- The issue worked around here is that quoted_name() doesn't have
- very good repr() behavior by itself when unicode is involved.
- """
- if name is None:
- return name
- elif compat.sqla_09 and isinstance(name, sql.elements.quoted_name):
- if compat.py2k:
- # the attempt to encode to ascii here isn't super ideal,
- # however we are trying to cut down on an explosion of
- # u'' literals only when py2k + SQLA 0.9, in particular
- # makes unit tests testing code generation very difficult
- try:
- return name.encode('ascii')
- except UnicodeError:
- return compat.text_type(name)
- else:
- return compat.text_type(name)
- elif isinstance(name, compat.string_types):
- return name
+def _render_cmd_body(op_container, autogen_context):
+ buf = StringIO()
+ printer = PythonPrinter(buf)
-def _render_potential_expr(value, autogen_context, wrap_in_text=True):
- if isinstance(value, sql.ClauseElement):
- if compat.sqla_08:
- compile_kw = dict(compile_kwargs={'literal_binds': True})
- else:
- compile_kw = {}
+ printer.writeline(
+ "### commands auto generated by Alembic - "
+ "please adjust! ###"
+ )
- if wrap_in_text:
- template = "%(prefix)stext(%(sql)r)"
- else:
- template = "%(sql)r"
+ if not op_container.ops:
+ printer.writeline("pass")
+ else:
+ for op in op_container.ops:
+ lines = render_op(autogen_context, op)
- return template % {
- "prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
- "sql": compat.text_type(
- value.compile(dialect=autogen_context['dialect'],
- **compile_kw)
- )
- }
+ for line in lines:
+ printer.writeline(line)
+
+ printer.writeline("### end Alembic commands ###")
+
+ return buf.getvalue()
+
+def render_op(autogen_context, op):
+ renderer = renderers.dispatch(op)
+ lines = util.to_list(renderer(autogen_context, op))
+ return lines
+
+
+def render_op_text(autogen_context, op):
+ return "\n".join(render_op(autogen_context, op))
+
+
+@renderers.dispatch_for(ops.ModifyTableOps)
+def _render_modify_table(autogen_context, op):
+ opts = autogen_context['opts']
+ render_as_batch = opts.get('render_as_batch', False)
+
+ if op.ops:
+ lines = []
+ if render_as_batch:
+ lines.append(
+ "with op.batch_alter_table(%r, schema=%r) as batch_op:"
+ % (op.table_name, op.schema)
+ )
+ autogen_context['batch_prefix'] = 'batch_op.'
+ for t_op in op.ops:
+ t_lines = render_op(autogen_context, t_op)
+ lines.extend(t_lines)
+ if render_as_batch:
+ del autogen_context['batch_prefix']
+ lines.append("")
+ return lines
else:
- return repr(value)
+ return [
+ "pass"
+ ]
+
+@renderers.dispatch_for(ops.CreateTableOp)
+def _add_table(autogen_context, op):
+ table = op.to_table()
-def _add_table(table, autogen_context):
args = [col for col in
- [_render_column(col, autogen_context) for col in table.c]
+ [_render_column(col, autogen_context) for col in table.columns]
if col] + \
sorted([rcons for rcons in
[_render_constraint(cons, autogen_context) for cons in
@@ -98,45 +121,33 @@ def _add_table(table, autogen_context):
args = ',\n'.join(args)
text = "%(prefix)screate_table(%(tablename)r,\n%(args)s" % {
- 'tablename': _ident(table.name),
+ 'tablename': _ident(op.table_name),
'prefix': _alembic_autogenerate_prefix(autogen_context),
'args': args,
}
- if table.schema:
- text += ",\nschema=%r" % _ident(table.schema)
- for k in sorted(table.kwargs):
- text += ",\n%s=%r" % (k.replace(" ", "_"), table.kwargs[k])
+ if op.schema:
+ text += ",\nschema=%r" % _ident(op.schema)
+ for k in sorted(op.kw):
+ text += ",\n%s=%r" % (k.replace(" ", "_"), op.kw[k])
text += "\n)"
return text
-def _drop_table(table, autogen_context):
+@renderers.dispatch_for(ops.DropTableOp)
+def _drop_table(autogen_context, op):
text = "%(prefix)sdrop_table(%(tname)r" % {
"prefix": _alembic_autogenerate_prefix(autogen_context),
- "tname": _ident(table.name)
+ "tname": _ident(op.table_name)
}
- if table.schema:
- text += ", schema=%r" % _ident(table.schema)
+ if op.schema:
+ text += ", schema=%r" % _ident(op.schema)
text += ")"
return text
-def _get_index_rendered_expressions(idx, autogen_context):
- if compat.sqla_08:
- return [repr(_ident(getattr(exp, "name", None)))
- if isinstance(exp, sa_schema.Column)
- else _render_potential_expr(exp, autogen_context)
- for exp in idx.expressions]
- else:
- return [
- repr(_ident(getattr(col, "name", None))) for col in idx.columns]
-
-
-def _add_index(index, autogen_context):
- """
- Generate Alembic operations for the CREATE INDEX of an
- :class:`~sqlalchemy.schema.Index` instance.
- """
+@renderers.dispatch_for(ops.CreateIndexOp)
+def _add_index(autogen_context, op):
+ index = op.to_index()
has_batch = 'batch_prefix' in autogen_context
@@ -167,11 +178,8 @@ def _add_index(index, autogen_context):
return text
-def _drop_index(index, autogen_context):
- """
- Generate Alembic operations for the DROP INDEX of an
- :class:`~sqlalchemy.schema.Index` instance.
- """
+@renderers.dispatch_for(ops.DropIndexOp)
+def _drop_index(autogen_context, op):
has_batch = 'batch_prefix' in autogen_context
if has_batch:
@@ -182,90 +190,39 @@ def _drop_index(index, autogen_context):
text = tmpl % {
'prefix': _alembic_autogenerate_prefix(autogen_context),
- 'name': _render_gen_name(autogen_context, index.name),
- 'table_name': _ident(index.table.name),
- 'schema': ((", schema=%r" % _ident(index.table.schema))
- if index.table.schema else '')
+ 'name': _render_gen_name(autogen_context, op.index_name),
+ 'table_name': _ident(op.table_name),
+ 'schema': ((", schema=%r" % _ident(op.schema))
+ if op.schema else '')
}
return text
-def _render_unique_constraint(constraint, autogen_context):
- rendered = _user_defined_render("unique", constraint, autogen_context)
- if rendered is not False:
- return rendered
-
- return _uq_constraint(constraint, autogen_context, False)
-
-
-def _add_unique_constraint(constraint, autogen_context):
- """
- Generate Alembic operations for the ALTER TABLE .. ADD CONSTRAINT ...
- UNIQUE of a :class:`~sqlalchemy.schema.UniqueConstraint` instance.
- """
- return _uq_constraint(constraint, autogen_context, True)
-
-
-def _uq_constraint(constraint, autogen_context, alter):
- opts = []
-
- has_batch = 'batch_prefix' in autogen_context
-
- if constraint.deferrable:
- opts.append(("deferrable", str(constraint.deferrable)))
- if constraint.initially:
- opts.append(("initially", str(constraint.initially)))
- if not has_batch and alter and constraint.table.schema:
- opts.append(("schema", _ident(constraint.table.schema)))
- if not alter and constraint.name:
- opts.append(
- ("name",
- _render_gen_name(autogen_context, constraint.name)))
-
- if alter:
- args = [
- repr(_render_gen_name(autogen_context, constraint.name))]
- if not has_batch:
- args += [repr(_ident(constraint.table.name))]
- args.append(repr([_ident(col.name) for col in constraint.columns]))
- args.extend(["%s=%r" % (k, v) for k, v in opts])
- return "%(prefix)screate_unique_constraint(%(args)s)" % {
- 'prefix': _alembic_autogenerate_prefix(autogen_context),
- 'args': ", ".join(args)
- }
- else:
- args = [repr(_ident(col.name)) for col in constraint.columns]
- args.extend(["%s=%r" % (k, v) for k, v in opts])
- return "%(prefix)sUniqueConstraint(%(args)s)" % {
- "prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
- "args": ", ".join(args)
- }
+@renderers.dispatch_for(ops.CreateUniqueConstraintOp)
+def _add_unique_constraint(autogen_context, op):
+ return [_uq_constraint(op.to_constraint(), autogen_context, True)]
-def _add_fk_constraint(constraint, autogen_context):
- source_schema, source_table, \
- source_columns, target_schema, \
- target_table, target_columns = _fk_spec(constraint)
+@renderers.dispatch_for(ops.CreateForeignKeyOp)
+def _add_fk_constraint(autogen_context, op):
args = [
- repr(_render_gen_name(autogen_context, constraint.name)),
- repr(_ident(source_table)),
- repr(_ident(target_table)),
- repr([_ident(col) for col in source_columns]),
- repr([_ident(col) for col in target_columns])
+ repr(
+ _render_gen_name(autogen_context, op.constraint_name)),
+ repr(_ident(op.source_table)),
+ repr(_ident(op.referent_table)),
+ repr([_ident(col) for col in op.local_cols]),
+ repr([_ident(col) for col in op.remote_cols])
]
- if source_schema:
- args.append(
- "%s=%r" % ('source_schema', source_schema),
- )
- if target_schema:
- args.append(
- "%s=%r" % ('referent_schema', target_schema)
- )
- opts = []
- _populate_render_fk_opts(constraint, opts)
- args.extend(("%s=%s" % (k, v) for (k, v) in opts))
+ for k in (
+ 'source_schema', 'referent_schema',
+ 'onupdate', 'ondelete', 'initially', 'deferrable', 'use_alter'
+ ):
+ if k in op.kw:
+ value = op.kw[k]
+ if value is not None:
+ args.append("%s=%r" % (k, value))
return "%(prefix)screate_foreign_key(%(args)s)" % {
'prefix': _alembic_autogenerate_prefix(autogen_context),
@@ -273,41 +230,18 @@ def _add_fk_constraint(constraint, autogen_context):
}
+@renderers.dispatch_for(ops.CreatePrimaryKeyOp)
def _add_pk_constraint(constraint, autogen_context):
raise NotImplementedError()
+@renderers.dispatch_for(ops.CreateCheckConstraintOp)
def _add_check_constraint(constraint, autogen_context):
raise NotImplementedError()
-def _add_constraint(constraint, autogen_context):
- """
- Dispatcher for the different types of constraints.
- """
- funcs = {
- "unique_constraint": _add_unique_constraint,
- "foreign_key_constraint": _add_fk_constraint,
- "primary_key_constraint": _add_pk_constraint,
- "check_constraint": _add_check_constraint,
- "column_check_constraint": _add_check_constraint,
- }
- return funcs[constraint.__visit_name__](constraint, autogen_context)
-
-
-def _drop_constraint(constraint, autogen_context):
- """
- Generate Alembic operations for the ALTER TABLE ... DROP CONSTRAINT
- of a :class:`~sqlalchemy.schema.UniqueConstraint` instance.
- """
-
- types = {
- "unique_constraint": "unique",
- "foreign_key_constraint": "foreignkey",
- "primary_key_constraint": "primary",
- "check_constraint": "check",
- "column_check_constraint": "check",
- }
+@renderers.dispatch_for(ops.DropConstraintOp)
+def _drop_constraint(autogen_context, op):
if 'batch_prefix' in autogen_context:
template = "%(prefix)sdrop_constraint"\
@@ -316,19 +250,22 @@ def _drop_constraint(constraint, autogen_context):
template = "%(prefix)sdrop_constraint"\
"(%(name)r, '%(table_name)s'%(schema)s, type_=%(type)r)"
- constraint_table = _table_for_constraint(constraint)
text = template % {
'prefix': _alembic_autogenerate_prefix(autogen_context),
- 'name': _render_gen_name(autogen_context, constraint.name),
- 'table_name': _ident(constraint_table.name),
- 'type': types[constraint.__visit_name__],
- 'schema': (", schema='%s'" % _ident(constraint_table.schema))
- if constraint_table.schema else '',
+ 'name': _render_gen_name(
+ autogen_context, op.constraint_name),
+ 'table_name': _ident(op.table_name),
+ 'type': op.constraint_type,
+ 'schema': (", schema='%s'" % _ident(op.schema))
+ if op.schema else '',
}
return text
-def _add_column(schema, tname, column, autogen_context):
+@renderers.dispatch_for(ops.AddColumnOp)
+def _add_column(autogen_context, op):
+
+ schema, tname, column = op.schema, op.table_name, op.column
if 'batch_prefix' in autogen_context:
template = "%(prefix)sadd_column(%(column)s)"
else:
@@ -345,7 +282,11 @@ def _add_column(schema, tname, column, autogen_context):
return text
-def _drop_column(schema, tname, column, autogen_context):
+@renderers.dispatch_for(ops.DropColumnOp)
+def _drop_column(autogen_context, op):
+
+ schema, tname, column_name = op.schema, op.table_name, op.column_name
+
if 'batch_prefix' in autogen_context:
template = "%(prefix)sdrop_column(%(cname)r)"
else:
@@ -357,21 +298,25 @@ def _drop_column(schema, tname, column, autogen_context):
text = template % {
"prefix": _alembic_autogenerate_prefix(autogen_context),
"tname": _ident(tname),
- "cname": _ident(column.name),
+ "cname": _ident(column_name),
"schema": _ident(schema)
}
return text
-def _modify_col(tname, cname,
- autogen_context,
- server_default=False,
- type_=None,
- nullable=None,
- existing_type=None,
- existing_nullable=None,
- existing_server_default=False,
- schema=None):
+@renderers.dispatch_for(ops.AlterColumnOp)
+def _alter_column(autogen_context, op):
+
+ tname = op.table_name
+ cname = op.column_name
+ server_default = op.modify_server_default
+ type_ = op.modify_type
+ nullable = op.modify_nullable
+ existing_type = op.existing_type
+ existing_nullable = op.existing_nullable
+ existing_server_default = op.existing_server_default
+ schema = op.schema
+
indent = " " * 11
if 'batch_prefix' in autogen_context:
@@ -413,6 +358,114 @@ def _modify_col(tname, cname,
return text
+class _f_name(object):
+
+ def __init__(self, prefix, name):
+ self.prefix = prefix
+ self.name = name
+
+ def __repr__(self):
+ return "%sf(%r)" % (self.prefix, _ident(self.name))
+
+
+def _ident(name):
+ """produce a __repr__() object for a string identifier that may
+ use quoted_name() in SQLAlchemy 0.9 and greater.
+
+ The issue worked around here is that quoted_name() doesn't have
+ very good repr() behavior by itself when unicode is involved.
+
+ """
+ if name is None:
+ return name
+ elif compat.sqla_09 and isinstance(name, sql.elements.quoted_name):
+ if compat.py2k:
+ # the attempt to encode to ascii here isn't super ideal,
+ # however we are trying to cut down on an explosion of
+ # u'' literals only when py2k + SQLA 0.9, in particular
+ # makes unit tests testing code generation very difficult
+ try:
+ return name.encode('ascii')
+ except UnicodeError:
+ return compat.text_type(name)
+ else:
+ return compat.text_type(name)
+ elif isinstance(name, compat.string_types):
+ return name
+
+
+def _render_potential_expr(value, autogen_context, wrap_in_text=True):
+ if isinstance(value, sql.ClauseElement):
+ if compat.sqla_08:
+ compile_kw = dict(compile_kwargs={'literal_binds': True})
+ else:
+ compile_kw = {}
+
+ if wrap_in_text:
+ template = "%(prefix)stext(%(sql)r)"
+ else:
+ template = "%(sql)r"
+
+ return template % {
+ "prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
+ "sql": compat.text_type(
+ value.compile(dialect=autogen_context['dialect'],
+ **compile_kw)
+ )
+ }
+
+ else:
+ return repr(value)
+
+
+def _get_index_rendered_expressions(idx, autogen_context):
+ if compat.sqla_08:
+ return [repr(_ident(getattr(exp, "name", None)))
+ if isinstance(exp, sa_schema.Column)
+ else _render_potential_expr(exp, autogen_context)
+ for exp in idx.expressions]
+ else:
+ return [
+ repr(_ident(getattr(col, "name", None))) for col in idx.columns]
+
+
+def _uq_constraint(constraint, autogen_context, alter):
+ opts = []
+
+ has_batch = 'batch_prefix' in autogen_context
+
+ if constraint.deferrable:
+ opts.append(("deferrable", str(constraint.deferrable)))
+ if constraint.initially:
+ opts.append(("initially", str(constraint.initially)))
+ if not has_batch and alter and constraint.table.schema:
+ opts.append(("schema", _ident(constraint.table.schema)))
+ if not alter and constraint.name:
+ opts.append(
+ ("name",
+ _render_gen_name(autogen_context, constraint.name)))
+
+ if alter:
+ args = [
+ repr(_render_gen_name(
+ autogen_context, constraint.name))]
+ if not has_batch:
+ args += [repr(_ident(constraint.table.name))]
+ args.append(repr([_ident(col.name) for col in constraint.columns]))
+ args.extend(["%s=%r" % (k, v) for k, v in opts])
+ return "%(prefix)screate_unique_constraint(%(args)s)" % {
+ 'prefix': _alembic_autogenerate_prefix(autogen_context),
+ 'args': ", ".join(args)
+ }
+ else:
+ args = [repr(_ident(col.name)) for col in constraint.columns]
+ args.extend(["%s=%r" % (k, v) for k, v in opts])
+ return "%(prefix)sUniqueConstraint(%(args)s)" % {
+ "prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
+ "args": ", ".join(args)
+ }
+
+
def _user_autogenerate_prefix(autogen_context, target):
prefix = autogen_context['opts']['user_module_prefix']
if prefix is None:
@@ -508,14 +561,15 @@ def _repr_type(type_, autogen_context):
return "%s%r" % (prefix, type_)
+_constraint_renderers = util.Dispatcher()
+
+
def _render_constraint(constraint, autogen_context):
- renderer = _constraint_renderers.get(type(constraint), None)
- if renderer:
- return renderer(constraint, autogen_context)
- else:
- return None
+ renderer = _constraint_renderers.dispatch(constraint)
+ return renderer(constraint, autogen_context)
+@_constraint_renderers.dispatch_for(sa_schema.PrimaryKeyConstraint)
def _render_primary_key(constraint, autogen_context):
rendered = _user_defined_render("primary_key", constraint, autogen_context)
if rendered is not False:
@@ -555,7 +609,8 @@ def _fk_colspec(fk, metadata_schema):
# try to resolve the remote table and adjust for column.key
parent_metadata = fk.parent.table.metadata
if table_fullname in parent_metadata.tables:
- colname = _ident(parent_metadata.tables[table_fullname].c[colname].name)
+ colname = _ident(
+ parent_metadata.tables[table_fullname].c[colname].name)
colspec = "%s.%s" % (table_fullname, colname)
@@ -576,6 +631,7 @@ def _populate_render_fk_opts(constraint, opts):
opts.append(("use_alter", repr(constraint.use_alter)))
+@_constraint_renderers.dispatch_for(sa_schema.ForeignKeyConstraint)
def _render_foreign_key(constraint, autogen_context):
rendered = _user_defined_render("foreign_key", constraint, autogen_context)
if rendered is not False:
@@ -602,6 +658,16 @@ def _render_foreign_key(constraint, autogen_context):
}
+@_constraint_renderers.dispatch_for(sa_schema.UniqueConstraint)
+def _render_unique_constraint(constraint, autogen_context):
+ rendered = _user_defined_render("unique", constraint, autogen_context)
+ if rendered is not False:
+ return rendered
+
+ return _uq_constraint(constraint, autogen_context, False)
+
+
+@_constraint_renderers.dispatch_for(sa_schema.CheckConstraint)
def _render_check_constraint(constraint, autogen_context):
rendered = _user_defined_render("check", constraint, autogen_context)
if rendered is not False:
@@ -622,7 +688,8 @@ def _render_check_constraint(constraint, autogen_context):
(
"name",
repr(
- _render_gen_name(autogen_context, constraint.name))
+ _render_gen_name(
+ autogen_context, constraint.name))
)
)
return "%(prefix)sCheckConstraint(%(sqltext)s%(opts)s)" % {
@@ -633,9 +700,5 @@ def _render_check_constraint(constraint, autogen_context):
constraint.sqltext, autogen_context, wrap_in_text=False)
}
-_constraint_renderers = {
- sa_schema.PrimaryKeyConstraint: _render_primary_key,
- sa_schema.ForeignKeyConstraint: _render_foreign_key,
- sa_schema.UniqueConstraint: _render_unique_constraint,
- sa_schema.CheckConstraint: _render_check_constraint
-}
+
+renderers = default_renderers.branch()
diff --git a/alembic/command.py b/alembic/command.py
index 5ba6d6a..3ce5131 100644
--- a/alembic/command.py
+++ b/alembic/command.py
@@ -1,8 +1,9 @@
import os
from .script import ScriptDirectory
-from .environment import EnvironmentContext
-from . import util, autogenerate as autogen
+from .runtime.environment import EnvironmentContext
+from . import util
+from . import autogenerate as autogen
def list_templates(config):
@@ -70,12 +71,16 @@ def revision(
version_path=None, rev_id=None):
"""Create a new revision file."""
- script = ScriptDirectory.from_config(config)
- template_args = {
- 'config': config # Let templates use config for
- # e.g. multiple databases
- }
- imports = set()
+ script_directory = ScriptDirectory.from_config(config)
+
+ command_args = dict(
+ message=message,
+ autogenerate=autogenerate,
+ sql=sql, head=head, splice=splice, branch_label=branch_label,
+ version_path=version_path, rev_id=rev_id
+ )
+ revision_context = autogen.RevisionContext(
+ config, script_directory, command_args)
environment = util.asbool(
config.get_main_option("revision_environment")
@@ -89,13 +94,11 @@ def revision(
"Using --sql with --autogenerate does not make any sense")
def retrieve_migrations(rev, context):
- if set(script.get_revisions(rev)) != \
- set(script.get_revisions("heads")):
- raise util.CommandError("Target database is not up to date.")
- autogen._produce_migration_diffs(context, template_args, imports)
+ revision_context.run_autogenerate(rev, context)
return []
elif environment:
def retrieve_migrations(rev, context):
+ revision_context.run_no_autogenerate(rev, context)
return []
elif sql:
raise util.CommandError(
@@ -105,16 +108,22 @@ def revision(
if environment:
with EnvironmentContext(
config,
- script,
+ script_directory,
fn=retrieve_migrations,
as_sql=sql,
- template_args=template_args,
+ template_args=revision_context.template_args,
+ revision_context=revision_context
):
- script.run_env()
- return script.generate_revision(
- rev_id or util.rev_id(), message, refresh=True,
- head=head, splice=splice, branch_labels=branch_label,
- version_path=version_path, **template_args)
+ script_directory.run_env()
+
+ scripts = [
+ script for script in
+ revision_context.generate_scripts()
+ ]
+ if len(scripts) == 1:
+ return scripts[0]
+ else:
+ return scripts
def merge(config, revisions, message=None, branch_label=None, rev_id=None):
diff --git a/alembic/config.py b/alembic/config.py
index 7f813d2..b3fc36f 100644
--- a/alembic/config.py
+++ b/alembic/config.py
@@ -1,10 +1,13 @@
from argparse import ArgumentParser
-from .compat import SafeConfigParser
+from .util.compat import SafeConfigParser
import inspect
import os
import sys
-from . import command, util, package_dir, compat
+from . import command
+from . import util
+from . import package_dir
+from .util import compat
class Config(object):
@@ -127,7 +130,7 @@ class Config(object):
This is a utility dictionary which can include not just strings but
engines, connections, schema objects, or anything else.
Use this to pass objects into an env.py script, such as passing
- a :class:`.Connection` when calling
+ a :class:`sqlalchemy.engine.base.Connection` when calling
commands from :mod:`alembic.command` programmatically.
.. versionadded:: 0.7.5
@@ -152,7 +155,7 @@ class Config(object):
@util.memoized_property
def file_config(self):
- """Return the underlying :class:`ConfigParser` object.
+ """Return the underlying ``ConfigParser`` object.
Direct access to the .ini file is available here,
though the :meth:`.Config.get_section` and
diff --git a/alembic/context.py b/alembic/context.py
index 9c0f676..758fca8 100644
--- a/alembic/context.py
+++ b/alembic/context.py
@@ -1,6 +1,5 @@
-from .environment import EnvironmentContext
-from . import util
+from .runtime.environment import EnvironmentContext
# create proxy functions for
# each method on the EnvironmentContext class.
-util.create_module_class_proxy(EnvironmentContext, globals(), locals())
+EnvironmentContext.create_module_class_proxy(globals(), locals())
diff --git a/alembic/ddl/base.py b/alembic/ddl/base.py
index dbdc991..f4a525f 100644
--- a/alembic/ddl/base.py
+++ b/alembic/ddl/base.py
@@ -1,13 +1,16 @@
import functools
from sqlalchemy.ext.compiler import compiles
-from sqlalchemy.schema import DDLElement, Column, \
- ForeignKeyConstraint, CheckConstraint
+from sqlalchemy.schema import DDLElement, Column
from sqlalchemy import Integer
from sqlalchemy import types as sqltypes
-from sqlalchemy.sql.visitors import traverse
from .. import util
+# backwards compat
+from ..util.sqla_compat import ( # noqa
+ _table_for_constraint,
+ _columns_for_constraint, _fk_spec, _is_type_bound, _find_columns)
+
if util.sqla_09:
from sqlalchemy.sql.elements import quoted_name
@@ -154,65 +157,6 @@ def visit_column_default(element, compiler, **kw):
)
-def _table_for_constraint(constraint):
- if isinstance(constraint, ForeignKeyConstraint):
- return constraint.parent
- else:
- return constraint.table
-
-
-def _columns_for_constraint(constraint):
- if isinstance(constraint, ForeignKeyConstraint):
- return [fk.parent for fk in constraint.elements]
- elif isinstance(constraint, CheckConstraint):
- return _find_columns(constraint.sqltext)
- else:
- return list(constraint.columns)
-
-
-def _fk_spec(constraint):
- if util.sqla_100:
- source_columns = [
- constraint.columns[key].name for key in constraint.column_keys]
- else:
- source_columns = [
- element.parent.name for element in constraint.elements]
-
- source_table = constraint.parent.name
- source_schema = constraint.parent.schema
- target_schema = constraint.elements[0].column.table.schema
- target_table = constraint.elements[0].column.table.name
- target_columns = [element.column.name for element in constraint.elements]
-
- return (
- source_schema, source_table,
- source_columns, target_schema, target_table, target_columns)
-
-
-def _is_type_bound(constraint):
- # this deals with SQLAlchemy #3260, don't copy CHECK constraints
- # that will be generated by the type.
- if util.sqla_100:
- # new feature added for #3260
- return constraint._type_bound
- else:
- # old way, look at what we know Boolean/Enum to use
- return (
- constraint._create_rule is not None and
- isinstance(
- getattr(constraint._create_rule, "target", None),
- sqltypes.SchemaType)
- )
-
-
-def _find_columns(clause):
- """locate Column objects within the given expression."""
-
- cols = set()
- traverse(clause, {}, {'column': cols.add})
- return cols
-
-
def quote_dotted(name, quote):
"""quote the elements of a dotted name"""
diff --git a/alembic/ddl/impl.py b/alembic/ddl/impl.py
index 3cca1ef..debef26 100644
--- a/alembic/ddl/impl.py
+++ b/alembic/ddl/impl.py
@@ -1,17 +1,13 @@
-from sqlalchemy.sql.expression import _BindParamClause
-from sqlalchemy.ext.compiler import compiles
-from sqlalchemy import schema, text, sql
+from sqlalchemy import schema, text
from sqlalchemy import types as sqltypes
-from ..compat import string_types, text_type, with_metaclass
+from ..util.compat import (
+ string_types, text_type, with_metaclass
+)
+from ..util import sqla_compat
from .. import util
from . import base
-if util.sqla_08:
- from sqlalchemy.sql.expression import TextClause
-else:
- from sqlalchemy.sql.expression import _TextClause as TextClause
-
class ImplMeta(type):
@@ -221,8 +217,10 @@ class DefaultImpl(with_metaclass(ImplMeta)):
for row in rows:
self._exec(table.insert(inline=True).values(**dict(
(k,
- _literal_bindparam(k, v, type_=table.c[k].type)
- if not isinstance(v, _literal_bindparam) else v)
+ sqla_compat._literal_bindparam(
+ k, v, type_=table.c[k].type)
+ if not isinstance(
+ v, sqla_compat._literal_bindparam) else v)
for k, v in row.items()
)))
else:
@@ -320,61 +318,6 @@ class DefaultImpl(with_metaclass(ImplMeta)):
self.static_output("COMMIT" + self.command_terminator)
-class _literal_bindparam(_BindParamClause):
- pass
-
-
-@compiles(_literal_bindparam)
-def _render_literal_bindparam(element, compiler, **kw):
- return compiler.render_literal_bindparam(element, **kw)
-
-
-def _textual_index_column(table, text_):
- """a workaround for the Index construct's severe lack of flexibility"""
- if isinstance(text_, string_types):
- c = schema.Column(text_, sqltypes.NULLTYPE)
- table.append_column(c)
- return c
- elif isinstance(text_, TextClause):
- return _textual_index_element(table, text_)
- else:
- raise ValueError("String or text() construct expected")
-
-
-class _textual_index_element(sql.ColumnElement):
- """Wrap around a sqlalchemy text() construct in such a way that
- we appear like a column-oriented SQL expression to an Index
- construct.
-
- The issue here is that currently the Postgresql dialect, the biggest
- recipient of functional indexes, keys all the index expressions to
- the corresponding column expressions when rendering CREATE INDEX,
- so the Index we create here needs to have a .columns collection that
- is the same length as the .expressions collection. Ultimately
- SQLAlchemy should support text() expressions in indexes.
-
- See https://bitbucket.org/zzzeek/sqlalchemy/issue/3174/\
- support-text-sent-to-indexes
-
- """
- __visit_name__ = '_textual_idx_element'
-
- def __init__(self, table, text):
- self.table = table
- self.text = text
- self.key = text.text
- self.fake_column = schema.Column(self.text.text, sqltypes.NULLTYPE)
- table.append_column(self.fake_column)
-
- def get_children(self):
- return [self.fake_column]
-
-
-@compiles(_textual_index_element)
-def _render_textual_index_column(element, compiler, **kw):
- return compiler.process(element.text, **kw)
-
-
def _string_compare(t1, t2):
return \
t1.length is not None and \
diff --git a/alembic/ddl/mssql.py b/alembic/ddl/mssql.py
index f516e9b..f51de33 100644
--- a/alembic/ddl/mssql.py
+++ b/alembic/ddl/mssql.py
@@ -39,11 +39,10 @@ class MSSQLImpl(DefaultImpl):
name=None,
type_=None,
schema=None,
- autoincrement=None,
existing_type=None,
existing_server_default=None,
existing_nullable=None,
- existing_autoincrement=None
+ **kw
):
if nullable is not None and existing_type is None:
@@ -63,10 +62,9 @@ class MSSQLImpl(DefaultImpl):
nullable=nullable,
type_=type_,
schema=schema,
- autoincrement=autoincrement,
existing_type=existing_type,
existing_nullable=existing_nullable,
- existing_autoincrement=existing_autoincrement
+ **kw
)
if server_default is not False:
diff --git a/alembic/ddl/mysql.py b/alembic/ddl/mysql.py
index 7956185..b1cb324 100644
--- a/alembic/ddl/mysql.py
+++ b/alembic/ddl/mysql.py
@@ -2,7 +2,7 @@ from sqlalchemy.ext.compiler import compiles
from sqlalchemy import types as sqltypes
from sqlalchemy import schema
-from ..compat import string_types
+from ..util.compat import string_types
from .. import util
from .impl import DefaultImpl
from .base import ColumnNullable, ColumnName, ColumnDefault, \
@@ -23,11 +23,12 @@ class MySQLImpl(DefaultImpl):
name=None,
type_=None,
schema=None,
- autoincrement=None,
existing_type=None,
existing_server_default=None,
existing_nullable=None,
- existing_autoincrement=None
+ autoincrement=None,
+ existing_autoincrement=None,
+ **kw
):
if name is not None:
self._exec(
@@ -284,3 +285,5 @@ def _mysql_drop_constraint(element, compiler, **kw):
raise NotImplementedError(
"No generic 'DROP CONSTRAINT' in MySQL - "
"please specify constraint type")
+
+
diff --git a/alembic/ddl/postgresql.py b/alembic/ddl/postgresql.py
index 9f97b34..ea423d7 100644
--- a/alembic/ddl/postgresql.py
+++ b/alembic/ddl/postgresql.py
@@ -1,6 +1,6 @@
import re
-from .. import compat
+from ..util import compat
from .. import util
from .base import compiles, alter_table, format_table_name, RenameTable
from .impl import DefaultImpl
diff --git a/alembic/op.py b/alembic/op.py
index 8e5f777..1f367a1 100644
--- a/alembic/op.py
+++ b/alembic/op.py
@@ -1,6 +1,6 @@
-from .operations import Operations
-from . import util
+from .operations.base import Operations
# create proxy functions for
# each method on the Operations class.
-util.create_module_class_proxy(Operations, globals(), locals())
+Operations.create_module_class_proxy(globals(), locals())
+
diff --git a/alembic/operations/__init__.py b/alembic/operations/__init__.py
new file mode 100644
index 0000000..1f6ee5d
--- /dev/null
+++ b/alembic/operations/__init__.py
@@ -0,0 +1,6 @@
+from .base import Operations, BatchOperations
+from .ops import MigrateOperation
+from . import toimpl
+
+
+__all__ = ['Operations', 'BatchOperations', 'MigrateOperation'] \ No newline at end of file
diff --git a/alembic/operations/base.py b/alembic/operations/base.py
new file mode 100644
index 0000000..18710fc
--- /dev/null
+++ b/alembic/operations/base.py
@@ -0,0 +1,442 @@
+from contextlib import contextmanager
+
+from .. import util
+from ..util import sqla_compat
+from . import batch
+from . import schemaobj
+from ..util.compat import exec_
+import textwrap
+import inspect
+
+__all__ = ('Operations', 'BatchOperations')
+
+try:
+ from sqlalchemy.sql.naming import conv
+except:
+ conv = None
+
+
+class Operations(util.ModuleClsProxy):
+
+ """Define high level migration operations.
+
+ Each operation corresponds to some schema migration operation,
+ executed against a particular :class:`.MigrationContext`
+ which in turn represents connectivity to a database,
+ or a file output stream.
+
+ While :class:`.Operations` is normally configured as
+ part of the :meth:`.EnvironmentContext.run_migrations`
+ method called from an ``env.py`` script, a standalone
+ :class:`.Operations` instance can be
+ made for use cases external to regular Alembic
+ migrations by passing in a :class:`.MigrationContext`::
+
+ from alembic.migration import MigrationContext
+ from alembic.operations import Operations
+
+ conn = myengine.connect()
+ ctx = MigrationContext.configure(conn)
+ op = Operations(ctx)
+
+ op.alter_column("t", "c", nullable=True)
+
+ Note that as of 0.8, most of the methods on this class are produced
+ dynamically using the :meth:`.Operations.register_operation`
+ method.
+
+ """
+
+ _to_impl = util.Dispatcher()
+
+ def __init__(self, migration_context, impl=None):
+ """Construct a new :class:`.Operations`
+
+ :param migration_context: a :class:`.MigrationContext`
+ instance.
+
+ """
+ self.migration_context = migration_context
+ if impl is None:
+ self.impl = migration_context.impl
+ else:
+ self.impl = impl
+
+ self.schema_obj = schemaobj.SchemaObjects(migration_context)
+
+ @classmethod
+ def register_operation(cls, name, sourcename=None):
+ """Register a new operation for this class.
+
+ This method is normally used to add new operations
+ to the :class:`.Operations` class, and possibly the
+ :class:`.BatchOperations` class as well. All Alembic migration
+ operations are implemented via this system, however the system
+ is also available as a public API to facilitate adding custom
+ operations.
+
+ .. versionadded:: 0.8.0
+
+ .. seealso::
+
+ :ref:`operation_plugins`
+
+
+ """
+ def register(op_cls):
+ if sourcename is None:
+ fn = getattr(op_cls, name)
+ source_name = fn.__name__
+ else:
+ fn = getattr(op_cls, sourcename)
+ source_name = fn.__name__
+
+ spec = inspect.getargspec(fn)
+
+ name_args = spec[0]
+ assert name_args[0:2] == ['cls', 'operations']
+
+ name_args[0:2] = ['self']
+
+ args = inspect.formatargspec(*spec)
+ num_defaults = len(spec[3]) if spec[3] else 0
+ if num_defaults:
+ defaulted_vals = name_args[0 - num_defaults:]
+ else:
+ defaulted_vals = ()
+
+ apply_kw = inspect.formatargspec(
+ name_args, spec[1], spec[2],
+ defaulted_vals,
+ formatvalue=lambda x: '=' + x)
+
+ func_text = textwrap.dedent("""\
+ def %(name)s%(args)s:
+ %(doc)r
+ return op_cls.%(source_name)s%(apply_kw)s
+ """ % {
+ 'name': name,
+ 'source_name': source_name,
+ 'args': args,
+ 'apply_kw': apply_kw,
+ 'doc': fn.__doc__,
+ 'meth': fn.__name__
+ })
+ globals_ = {'op_cls': op_cls}
+ lcl = {}
+ exec_(func_text, globals_, lcl)
+ setattr(cls, name, lcl[name])
+ fn.__func__.__doc__ = "This method is proxied on "\
+ "the :class:`.%s` class, via the :meth:`.%s.%s` method." % (
+ cls.__name__, cls.__name__, name
+ )
+ return op_cls
+ return register
+
+ @classmethod
+ def implementation_for(cls, op_cls):
+ """Register an implementation for a given :class:`.MigrateOperation`.
+
+ This is part of the operation extensibility API.
+
+ .. seealso::
+
+ :ref:`operation_plugins` - example of use
+
+ """
+
+ def decorate(fn):
+ cls._to_impl.dispatch_for(op_cls)(fn)
+ return fn
+ return decorate
+
+ @classmethod
+ @contextmanager
+ def context(cls, migration_context):
+ op = Operations(migration_context)
+ op._install_proxy()
+ yield op
+ op._remove_proxy()
+
+ @contextmanager
+ def batch_alter_table(
+ self, table_name, schema=None, recreate="auto", copy_from=None,
+ table_args=(), table_kwargs=util.immutabledict(),
+ reflect_args=(), reflect_kwargs=util.immutabledict(),
+ naming_convention=None):
+ """Invoke a series of per-table migrations in batch.
+
+ Batch mode allows a series of operations specific to a table
+ to be syntactically grouped together, and allows for alternate
+ modes of table migration, in particular the "recreate" style of
+ migration required by SQLite.
+
+ "recreate" style is as follows:
+
+ 1. A new table is created with the new specification, based on the
+ migration directives within the batch, using a temporary name.
+
+ 2. the data copied from the existing table to the new table.
+
+ 3. the existing table is dropped.
+
+ 4. the new table is renamed to the existing table name.
+
+ The directive by default will only use "recreate" style on the
+ SQLite backend, and only if directives are present which require
+ this form, e.g. anything other than ``add_column()``. The batch
+ operation on other backends will proceed using standard ALTER TABLE
+ operations.
+
+ The method is used as a context manager, which returns an instance
+ of :class:`.BatchOperations`; this object is the same as
+ :class:`.Operations` except that table names and schema names
+ are omitted. E.g.::
+
+ with op.batch_alter_table("some_table") as batch_op:
+ batch_op.add_column(Column('foo', Integer))
+ batch_op.drop_column('bar')
+
+ The operations within the context manager are invoked at once
+ when the context is ended. When run against SQLite, if the
+ migrations include operations not supported by SQLite's ALTER TABLE,
+ the entire table will be copied to a new one with the new
+ specification, moving all data across as well.
+
+ The copy operation by default uses reflection to retrieve the current
+ structure of the table, and therefore :meth:`.batch_alter_table`
+ in this mode requires that the migration is run in "online" mode.
+ The ``copy_from`` parameter may be passed which refers to an existing
+ :class:`.Table` object, which will bypass this reflection step.
+
+ .. note:: The table copy operation will currently not copy
+ CHECK constraints, and may not copy UNIQUE constraints that are
+ unnamed, as is possible on SQLite. See the section
+ :ref:`sqlite_batch_constraints` for workarounds.
+
+ :param table_name: name of table
+ :param schema: optional schema name.
+ :param recreate: under what circumstances the table should be
+ recreated. At its default of ``"auto"``, the SQLite dialect will
+ recreate the table if any operations other than ``add_column()``,
+ ``create_index()``, or ``drop_index()`` are
+ present. Other options include ``"always"`` and ``"never"``.
+ :param copy_from: optional :class:`~sqlalchemy.schema.Table` object
+ that will act as the structure of the table being copied. If omitted,
+ table reflection is used to retrieve the structure of the table.
+
+ .. versionadded:: 0.7.6 Fully implemented the
+ :paramref:`~.Operations.batch_alter_table.copy_from`
+ parameter.
+
+ .. seealso::
+
+ :ref:`batch_offline_mode`
+
+ :paramref:`~.Operations.batch_alter_table.reflect_args`
+
+ :paramref:`~.Operations.batch_alter_table.reflect_kwargs`
+
+ :param reflect_args: a sequence of additional positional arguments that
+ will be applied to the table structure being reflected / copied;
+ this may be used to pass column and constraint overrides to the
+ table that will be reflected, in lieu of passing the whole
+ :class:`~sqlalchemy.schema.Table` using
+ :paramref:`~.Operations.batch_alter_table.copy_from`.
+
+ .. versionadded:: 0.7.1
+
+ :param reflect_kwargs: a dictionary of additional keyword arguments
+ that will be applied to the table structure being copied; this may be
+ used to pass additional table and reflection options to the table that
+ will be reflected, in lieu of passing the whole
+ :class:`~sqlalchemy.schema.Table` using
+ :paramref:`~.Operations.batch_alter_table.copy_from`.
+
+ .. versionadded:: 0.7.1
+
+ :param table_args: a sequence of additional positional arguments that
+ will be applied to the new :class:`~sqlalchemy.schema.Table` when
+ created, in addition to those copied from the source table.
+ This may be used to provide additional constraints such as CHECK
+ constraints that may not be reflected.
+ :param table_kwargs: a dictionary of additional keyword arguments
+ that will be applied to the new :class:`~sqlalchemy.schema.Table`
+ when created, in addition to those copied from the source table.
+ This may be used to provide for additional table options that may
+ not be reflected.
+
+ .. versionadded:: 0.7.0
+
+ :param naming_convention: a naming convention dictionary of the form
+ described at :ref:`autogen_naming_conventions` which will be applied
+ to the :class:`~sqlalchemy.schema.MetaData` during the reflection
+ process. This is typically required if one wants to drop SQLite
+ constraints, as these constraints will not have names when
+ reflected on this backend. Requires SQLAlchemy **0.9.4** or greater.
+
+ .. seealso::
+
+ :ref:`dropping_sqlite_foreign_keys`
+
+ .. versionadded:: 0.7.1
+
+ .. note:: batch mode requires SQLAlchemy 0.8 or above.
+
+ .. seealso::
+
+ :ref:`batch_migrations`
+
+ """
+ impl = batch.BatchOperationsImpl(
+ self, table_name, schema, recreate,
+ copy_from, table_args, table_kwargs, reflect_args,
+ reflect_kwargs, naming_convention)
+ batch_op = BatchOperations(self.migration_context, impl=impl)
+ yield batch_op
+ impl.flush()
+
+ def get_context(self):
+ """Return the :class:`.MigrationContext` object that's
+ currently in use.
+
+ """
+
+ return self.migration_context
+
+ def invoke(self, operation):
+ """Given a :class:`.MigrateOperation`, invoke it in terms of
+ this :class:`.Operations` instance.
+
+ .. versionadded:: 0.8.0
+
+ """
+ fn = self._to_impl.dispatch(
+ operation, self.migration_context.impl.__dialect__)
+ return fn(self, operation)
+
+ def f(self, name):
+ """Indicate a string name that has already had a naming convention
+ applied to it.
+
+ This feature combines with the SQLAlchemy ``naming_convention`` feature
+ to disambiguate constraint names that have already had naming
+ conventions applied to them, versus those that have not. This is
+ necessary in the case that the ``"%(constraint_name)s"`` token
+ is used within a naming convention, so that it can be identified
+ that this particular name should remain fixed.
+
+ If the :meth:`.Operations.f` is used on a constraint, the naming
+ convention will not take effect::
+
+ op.add_column('t', 'x', Boolean(name=op.f('ck_bool_t_x')))
+
+ Above, the CHECK constraint generated will have the name
+ ``ck_bool_t_x`` regardless of whether or not a naming convention is
+ in use.
+
+ Alternatively, if a naming convention is in use, and 'f' is not used,
+ names will be converted along conventions. If the ``target_metadata``
+ contains the naming convention
+ ``{"ck": "ck_bool_%(table_name)s_%(constraint_name)s"}``, then the
+ output of the following:
+
+ op.add_column('t', 'x', Boolean(name='x'))
+
+ will be::
+
+ CONSTRAINT ck_bool_t_x CHECK (x in (1, 0)))
+
+ The function is rendered in the output of autogenerate when
+ a particular constraint name is already converted, for SQLAlchemy
+ version **0.9.4 and greater only**. Even though ``naming_convention``
+ was introduced in 0.9.2, the string disambiguation service is new
+ as of 0.9.4.
+
+ .. versionadded:: 0.6.4
+
+ """
+ if conv:
+ return conv(name)
+ else:
+ raise NotImplementedError(
+ "op.f() feature requires SQLAlchemy 0.9.4 or greater.")
+
+ def inline_literal(self, value, type_=None):
+ """Produce an 'inline literal' expression, suitable for
+ using in an INSERT, UPDATE, or DELETE statement.
+
+ When using Alembic in "offline" mode, CRUD operations
+ aren't compatible with SQLAlchemy's default behavior surrounding
+ literal values,
+ which is that they are converted into bound values and passed
+ separately into the ``execute()`` method of the DBAPI cursor.
+ An offline SQL
+ script needs to have these rendered inline. While it should
+ always be noted that inline literal values are an **enormous**
+ security hole in an application that handles untrusted input,
+ a schema migration is not run in this context, so
+ literals are safe to render inline, with the caveat that
+ advanced types like dates may not be supported directly
+ by SQLAlchemy.
+
+ See :meth:`.execute` for an example usage of
+ :meth:`.inline_literal`.
+
+ The environment can also be configured to attempt to render
+ "literal" values inline automatically, for those simple types
+ that are supported by the dialect; see
+ :paramref:`.EnvironmentContext.configure.literal_binds` for this
+ more recently added feature.
+
+ :param value: The value to render. Strings, integers, and simple
+ numerics should be supported. Other types like boolean,
+ dates, etc. may or may not be supported yet by various
+ backends.
+ :param ``type_``: optional - a :class:`sqlalchemy.types.TypeEngine`
+ subclass stating the type of this value. In SQLAlchemy
+ expressions, this is usually derived automatically
+ from the Python type of the value itself, as well as
+ based on the context in which the value is used.
+
+ .. seealso::
+
+ :paramref:`.EnvironmentContext.configure.literal_binds`
+
+ """
+ return sqla_compat._literal_bindparam(None, value, type_=type_)
+
+ def get_bind(self):
+ """Return the current 'bind'.
+
+ Under normal circumstances, this is the
+ :class:`~sqlalchemy.engine.Connection` currently being used
+ to emit SQL to the database.
+
+ In a SQL script context, this value is ``None``. [TODO: verify this]
+
+ """
+ return self.migration_context.impl.bind
+
+
+class BatchOperations(Operations):
+ """Modifies the interface :class:`.Operations` for batch mode.
+
+ This basically omits the ``table_name`` and ``schema`` parameters
+ from associated methods, as these are a given when running under batch
+ mode.
+
+ .. seealso::
+
+ :meth:`.Operations.batch_alter_table`
+
+ Note that as of 0.8, most of the methods on this class are produced
+ dynamically using the :meth:`.Operations.register_operation`
+ method.
+
+ """
+
+ def _noop(self, operation):
+ raise NotImplementedError(
+ "The %s method does not apply to a batch table alter operation."
+ % operation)
diff --git a/alembic/batch.py b/alembic/operations/batch.py
index 1006739..726df78 100644
--- a/alembic/batch.py
+++ b/alembic/operations/batch.py
@@ -3,8 +3,8 @@ from sqlalchemy import Table, MetaData, Index, select, Column, \
from sqlalchemy import types as sqltypes
from sqlalchemy import schema as sql_schema
from sqlalchemy.util import OrderedDict
-from . import util
-from .ddl.base import _columns_for_constraint, _is_type_bound
+from .. import util
+from ..util.sqla_compat import _columns_for_constraint, _is_type_bound
class BatchOperationsImpl(object):
diff --git a/alembic/operations.py b/alembic/operations/ops.py
index 2bf8060..1a38d07 100644
--- a/alembic/operations.py
+++ b/alembic/operations/ops.py
@@ -1,345 +1,82 @@
-from contextlib import contextmanager
+from .. import util
+from ..util import sqla_compat
+from . import schemaobj
+from sqlalchemy.types import NULLTYPE
+from .base import Operations, BatchOperations
-from sqlalchemy.types import NULLTYPE, Integer
-from sqlalchemy import schema as sa_schema
-from . import util, batch
-from .compat import string_types
-from .ddl import impl
+class MigrateOperation(object):
+ """base class for migration command and organization objects.
-__all__ = ('Operations', 'BatchOperations')
+ This system is part of the operation extensibility API.
-try:
- from sqlalchemy.sql.naming import conv
-except:
- conv = None
+ .. versionadded:: 0.8.0
+ .. seealso::
-class Operations(object):
-
- """Define high level migration operations.
-
- Each operation corresponds to some schema migration operation,
- executed against a particular :class:`.MigrationContext`
- which in turn represents connectivity to a database,
- or a file output stream.
-
- While :class:`.Operations` is normally configured as
- part of the :meth:`.EnvironmentContext.run_migrations`
- method called from an ``env.py`` script, a standalone
- :class:`.Operations` instance can be
- made for use cases external to regular Alembic
- migrations by passing in a :class:`.MigrationContext`::
-
- from alembic.migration import MigrationContext
- from alembic.operations import Operations
+ :ref:`operation_objects`
- conn = myengine.connect()
- ctx = MigrationContext.configure(conn)
- op = Operations(ctx)
+ :ref:`operation_plugins`
- op.alter_column("t", "c", nullable=True)
+ :ref:`customizing_revision`
"""
- def __init__(self, migration_context, impl=None):
- """Construct a new :class:`.Operations`
-
- :param migration_context: a :class:`.MigrationContext`
- instance.
- """
- self.migration_context = migration_context
- if impl is None:
- self.impl = migration_context.impl
- else:
- self.impl = impl
+class AddConstraintOp(MigrateOperation):
+ """Represent an add constraint operation."""
@classmethod
- @contextmanager
- def context(cls, migration_context):
- from .op import _install_proxy, _remove_proxy
- op = Operations(migration_context)
- _install_proxy(op)
- yield op
- _remove_proxy()
-
- def _primary_key_constraint(self, name, table_name, cols, schema=None):
- m = self._metadata()
- columns = [sa_schema.Column(n, NULLTYPE) for n in cols]
- t1 = sa_schema.Table(table_name, m,
- *columns,
- schema=schema)
- p = sa_schema.PrimaryKeyConstraint(*columns, name=name)
- t1.append_constraint(p)
- return p
-
- def _foreign_key_constraint(self, name, source, referent,
- local_cols, remote_cols,
- onupdate=None, ondelete=None,
- deferrable=None, source_schema=None,
- referent_schema=None, initially=None,
- match=None, **dialect_kw):
- m = self._metadata()
- if source == referent:
- t1_cols = local_cols + remote_cols
- else:
- t1_cols = local_cols
- sa_schema.Table(
- referent, m,
- *[sa_schema.Column(n, NULLTYPE) for n in remote_cols],
- schema=referent_schema)
-
- t1 = sa_schema.Table(
- source, m,
- *[sa_schema.Column(n, NULLTYPE) for n in t1_cols],
- schema=source_schema)
-
- tname = "%s.%s" % (referent_schema, referent) if referent_schema \
- else referent
-
- if util.sqla_08:
- # "match" kw unsupported in 0.7
- dialect_kw['match'] = match
-
- f = sa_schema.ForeignKeyConstraint(local_cols,
- ["%s.%s" % (tname, n)
- for n in remote_cols],
- name=name,
- onupdate=onupdate,
- ondelete=ondelete,
- deferrable=deferrable,
- initially=initially,
- **dialect_kw
- )
- t1.append_constraint(f)
-
- return f
-
- def _unique_constraint(self, name, source, local_cols, schema=None, **kw):
- t = sa_schema.Table(
- source, self._metadata(),
- *[sa_schema.Column(n, NULLTYPE) for n in local_cols],
- schema=schema)
- kw['name'] = name
- uq = sa_schema.UniqueConstraint(*[t.c[n] for n in local_cols], **kw)
- # TODO: need event tests to ensure the event
- # is fired off here
- t.append_constraint(uq)
- return uq
-
- def _check_constraint(self, name, source, condition, schema=None, **kw):
- t = sa_schema.Table(source, self._metadata(),
- sa_schema.Column('x', Integer), schema=schema)
- ck = sa_schema.CheckConstraint(condition, name=name, **kw)
- t.append_constraint(ck)
- return ck
-
- def _metadata(self):
- kw = {}
- if 'target_metadata' in self.migration_context.opts:
- mt = self.migration_context.opts['target_metadata']
- if hasattr(mt, 'naming_convention'):
- kw['naming_convention'] = mt.naming_convention
- return sa_schema.MetaData(**kw)
-
- def _table(self, name, *columns, **kw):
- m = self._metadata()
- t = sa_schema.Table(name, m, *columns, **kw)
- for f in t.foreign_keys:
- self._ensure_table_for_fk(m, f)
- return t
-
- def _column(self, name, type_, **kw):
- return sa_schema.Column(name, type_, **kw)
-
- def _index(self, name, tablename, columns, schema=None, **kw):
- t = sa_schema.Table(
- tablename or 'no_table', self._metadata(),
- schema=schema
- )
- idx = sa_schema.Index(
- name,
- *[impl._textual_index_column(t, n) for n in columns],
- **kw)
- return idx
-
- def _parse_table_key(self, table_key):
- if '.' in table_key:
- tokens = table_key.split('.')
- sname = ".".join(tokens[0:-1])
- tname = tokens[-1]
- else:
- tname = table_key
- sname = None
- return (sname, tname)
-
- def _ensure_table_for_fk(self, metadata, fk):
- """create a placeholder Table object for the referent of a
- ForeignKey.
-
- """
- if isinstance(fk._colspec, string_types):
- table_key, cname = fk._colspec.rsplit('.', 1)
- sname, tname = self._parse_table_key(table_key)
- if table_key not in metadata.tables:
- rel_t = sa_schema.Table(tname, metadata, schema=sname)
- else:
- rel_t = metadata.tables[table_key]
- if cname not in rel_t.c:
- rel_t.append_column(sa_schema.Column(cname, NULLTYPE))
-
- @contextmanager
- def batch_alter_table(
- self, table_name, schema=None, recreate="auto", copy_from=None,
- table_args=(), table_kwargs=util.immutabledict(),
- reflect_args=(), reflect_kwargs=util.immutabledict(),
- naming_convention=None):
- """Invoke a series of per-table migrations in batch.
-
- Batch mode allows a series of operations specific to a table
- to be syntactically grouped together, and allows for alternate
- modes of table migration, in particular the "recreate" style of
- migration required by SQLite.
-
- "recreate" style is as follows:
-
- 1. A new table is created with the new specification, based on the
- migration directives within the batch, using a temporary name.
-
- 2. the data copied from the existing table to the new table.
-
- 3. the existing table is dropped.
-
- 4. the new table is renamed to the existing table name.
-
- The directive by default will only use "recreate" style on the
- SQLite backend, and only if directives are present which require
- this form, e.g. anything other than ``add_column()``. The batch
- operation on other backends will proceed using standard ALTER TABLE
- operations.
-
- The method is used as a context manager, which returns an instance
- of :class:`.BatchOperations`; this object is the same as
- :class:`.Operations` except that table names and schema names
- are omitted. E.g.::
-
- with op.batch_alter_table("some_table") as batch_op:
- batch_op.add_column(Column('foo', Integer))
- batch_op.drop_column('bar')
-
- The operations within the context manager are invoked at once
- when the context is ended. When run against SQLite, if the
- migrations include operations not supported by SQLite's ALTER TABLE,
- the entire table will be copied to a new one with the new
- specification, moving all data across as well.
-
- The copy operation by default uses reflection to retrieve the current
- structure of the table, and therefore :meth:`.batch_alter_table`
- in this mode requires that the migration is run in "online" mode.
- The ``copy_from`` parameter may be passed which refers to an existing
- :class:`.Table` object, which will bypass this reflection step.
-
- .. note:: The table copy operation will currently not copy
- CHECK constraints, and may not copy UNIQUE constraints that are
- unnamed, as is possible on SQLite. See the section
- :ref:`sqlite_batch_constraints` for workarounds.
-
- :param table_name: name of table
- :param schema: optional schema name.
- :param recreate: under what circumstances the table should be
- recreated. At its default of ``"auto"``, the SQLite dialect will
- recreate the table if any operations other than ``add_column()``,
- ``create_index()``, or ``drop_index()`` are
- present. Other options include ``"always"`` and ``"never"``.
- :param copy_from: optional :class:`~sqlalchemy.schema.Table` object
- that will act as the structure of the table being copied. If omitted,
- table reflection is used to retrieve the structure of the table.
-
- .. versionadded:: 0.7.6 Fully implemented the
- :paramref:`~.Operations.batch_alter_table.copy_from`
- parameter.
-
- .. seealso::
-
- :ref:`batch_offline_mode`
-
- :paramref:`~.Operations.batch_alter_table.reflect_args`
-
- :paramref:`~.Operations.batch_alter_table.reflect_kwargs`
-
- :param reflect_args: a sequence of additional positional arguments that
- will be applied to the table structure being reflected / copied;
- this may be used to pass column and constraint overrides to the
- table that will be reflected, in lieu of passing the whole
- :class:`~sqlalchemy.schema.Table` using
- :paramref:`~.Operations.batch_alter_table.copy_from`.
-
- .. versionadded:: 0.7.1
-
- :param reflect_kwargs: a dictionary of additional keyword arguments
- that will be applied to the table structure being copied; this may be
- used to pass additional table and reflection options to the table that
- will be reflected, in lieu of passing the whole
- :class:`~sqlalchemy.schema.Table` using
- :paramref:`~.Operations.batch_alter_table.copy_from`.
-
- .. versionadded:: 0.7.1
-
- :param table_args: a sequence of additional positional arguments that
- will be applied to the new :class:`~sqlalchemy.schema.Table` when
- created, in addition to those copied from the source table.
- This may be used to provide additional constraints such as CHECK
- constraints that may not be reflected.
- :param table_kwargs: a dictionary of additional keyword arguments
- that will be applied to the new :class:`~sqlalchemy.schema.Table`
- when created, in addition to those copied from the source table.
- This may be used to provide for additional table options that may
- not be reflected.
-
- .. versionadded:: 0.7.0
-
- :param naming_convention: a naming convention dictionary of the form
- described at :ref:`autogen_naming_conventions` which will be applied
- to the :class:`~sqlalchemy.schema.MetaData` during the reflection
- process. This is typically required if one wants to drop SQLite
- constraints, as these constraints will not have names when
- reflected on this backend. Requires SQLAlchemy **0.9.4** or greater.
-
- .. seealso::
-
- :ref:`dropping_sqlite_foreign_keys`
-
- .. versionadded:: 0.7.1
-
- .. note:: batch mode requires SQLAlchemy 0.8 or above.
+ def from_constraint(cls, constraint):
+ funcs = {
+ "unique_constraint": CreateUniqueConstraintOp.from_constraint,
+ "foreign_key_constraint": CreateForeignKeyOp.from_constraint,
+ "primary_key_constraint": CreatePrimaryKeyOp.from_constraint,
+ "check_constraint": CreateCheckConstraintOp.from_constraint,
+ "column_check_constraint": CreateCheckConstraintOp.from_constraint,
+ }
+ return funcs[constraint.__visit_name__](constraint)
- .. seealso::
- :ref:`batch_migrations`
+@Operations.register_operation("drop_constraint")
+@BatchOperations.register_operation("drop_constraint", "batch_drop_constraint")
+class DropConstraintOp(MigrateOperation):
+ """Represent a drop constraint operation."""
- """
- impl = batch.BatchOperationsImpl(
- self, table_name, schema, recreate,
- copy_from, table_args, table_kwargs, reflect_args,
- reflect_kwargs, naming_convention)
- batch_op = BatchOperations(self.migration_context, impl=impl)
- yield batch_op
- impl.flush()
-
- def get_context(self):
- """Return the :class:`.MigrationContext` object that's
- currently in use.
+ def __init__(self, constraint_name, table_name, type_=None, schema=None):
+ self.constraint_name = constraint_name
+ self.table_name = table_name
+ self.constraint_type = type_
+ self.schema = schema
- """
+ @classmethod
+ def from_constraint(cls, constraint):
+ types = {
+ "unique_constraint": "unique",
+ "foreign_key_constraint": "foreignkey",
+ "primary_key_constraint": "primary",
+ "check_constraint": "check",
+ "column_check_constraint": "check",
+ }
- return self.migration_context
+ constraint_table = sqla_compat._table_for_constraint(constraint)
+ return cls(
+ constraint.name,
+ constraint_table.name,
+ schema=constraint_table.schema,
+ type_=types[constraint.__visit_name__]
+ )
- def rename_table(self, old_table_name, new_table_name, schema=None):
- """Emit an ALTER TABLE to rename a table.
+ @classmethod
+ @util._with_legacy_names([("type", "type_")])
+ def drop_constraint(
+ cls, operations, name, table_name, type_=None, schema=None):
+ """Drop a constraint of the given name, typically via DROP CONSTRAINT.
- :param old_table_name: old name.
- :param new_table_name: new name.
+ :param name: name of the constraint.
+ :param table_name: table name.
+ :param ``type_``: optional, required on MySQL. can be
+ 'foreignkey', 'primary', 'unique', or 'check'.
:param schema: Optional schema name to operate within. To control
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
@@ -349,234 +86,96 @@ class Operations(object):
:class:`~sqlalchemy.sql.elements.quoted_name` construct.
"""
- self.impl.rename_table(
- old_table_name,
- new_table_name,
- schema=schema
- )
- @util._with_legacy_names([('name', 'new_column_name')])
- def alter_column(self, table_name, column_name,
- nullable=None,
- server_default=False,
- new_column_name=None,
- type_=None,
- autoincrement=None,
- existing_type=None,
- existing_server_default=False,
- existing_nullable=None,
- existing_autoincrement=None,
- schema=None
- ):
- """Issue an "alter column" instruction using the
- current migration context.
+ op = cls(name, table_name, type_=type_, schema=schema)
+ return operations.invoke(op)
- Generally, only that aspect of the column which
- is being changed, i.e. name, type, nullability,
- default, needs to be specified. Multiple changes
- can also be specified at once and the backend should
- "do the right thing", emitting each change either
- separately or together as the backend allows.
-
- MySQL has special requirements here, since MySQL
- cannot ALTER a column without a full specification.
- When producing MySQL-compatible migration files,
- it is recommended that the ``existing_type``,
- ``existing_server_default``, and ``existing_nullable``
- parameters be present, if not being altered.
+ @classmethod
+ def batch_drop_constraint(cls, operations, name, type_=None):
+ """Issue a "drop constraint" instruction using the
+ current batch migration context.
- Type changes which are against the SQLAlchemy
- "schema" types :class:`~sqlalchemy.types.Boolean`
- and :class:`~sqlalchemy.types.Enum` may also
- add or drop constraints which accompany those
- types on backends that don't support them natively.
- The ``existing_server_default`` argument is
- used in this case as well to remove a previous
- constraint.
+ The batch form of this call omits the ``table_name`` and ``schema``
+ arguments from the call.
- :param table_name: string name of the target table.
- :param column_name: string name of the target column,
- as it exists before the operation begins.
- :param nullable: Optional; specify ``True`` or ``False``
- to alter the column's nullability.
- :param server_default: Optional; specify a string
- SQL expression, :func:`~sqlalchemy.sql.expression.text`,
- or :class:`~sqlalchemy.schema.DefaultClause` to indicate
- an alteration to the column's default value.
- Set to ``None`` to have the default removed.
- :param new_column_name: Optional; specify a string name here to
- indicate the new name within a column rename operation.
- :param ``type_``: Optional; a :class:`~sqlalchemy.types.TypeEngine`
- type object to specify a change to the column's type.
- For SQLAlchemy types that also indicate a constraint (i.e.
- :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`),
- the constraint is also generated.
- :param autoincrement: set the ``AUTO_INCREMENT`` flag of the column;
- currently understood by the MySQL dialect.
- :param existing_type: Optional; a
- :class:`~sqlalchemy.types.TypeEngine`
- type object to specify the previous type. This
- is required for all MySQL column alter operations that
- don't otherwise specify a new type, as well as for
- when nullability is being changed on a SQL Server
- column. It is also used if the type is a so-called
- SQLlchemy "schema" type which may define a constraint (i.e.
- :class:`~sqlalchemy.types.Boolean`,
- :class:`~sqlalchemy.types.Enum`),
- so that the constraint can be dropped.
- :param existing_server_default: Optional; The existing
- default value of the column. Required on MySQL if
- an existing default is not being changed; else MySQL
- removes the default.
- :param existing_nullable: Optional; the existing nullability
- of the column. Required on MySQL if the existing nullability
- is not being changed; else MySQL sets this to NULL.
- :param existing_autoincrement: Optional; the existing autoincrement
- of the column. Used for MySQL's system of altering a column
- that specifies ``AUTO_INCREMENT``.
- :param schema: Optional schema name to operate within. To control
- quoting of the schema outside of the default behavior, use
- the SQLAlchemy construct
- :class:`~sqlalchemy.sql.elements.quoted_name`.
+ .. seealso::
- .. versionadded:: 0.7.0 'schema' can now accept a
- :class:`~sqlalchemy.sql.elements.quoted_name` construct.
+ :meth:`.Operations.drop_constraint`
"""
-
- compiler = self.impl.dialect.statement_compiler(
- self.impl.dialect,
- None
+ op = cls(
+ name, operations.impl.table_name,
+ type_=type_, schema=operations.impl.schema
)
+ return operations.invoke(op)
- def _count_constraint(constraint):
- return not isinstance(
- constraint,
- sa_schema.PrimaryKeyConstraint) and \
- (not constraint._create_rule or
- constraint._create_rule(compiler))
-
- if existing_type and type_:
- t = self._table(table_name,
- sa_schema.Column(column_name, existing_type),
- schema=schema
- )
- for constraint in t.constraints:
- if _count_constraint(constraint):
- self.impl.drop_constraint(constraint)
-
- self.impl.alter_column(table_name, column_name,
- nullable=nullable,
- server_default=server_default,
- name=new_column_name,
- type_=type_,
- schema=schema,
- autoincrement=autoincrement,
- existing_type=existing_type,
- existing_server_default=existing_server_default,
- existing_nullable=existing_nullable,
- existing_autoincrement=existing_autoincrement
- )
-
- if type_:
- t = self._table(table_name,
- sa_schema.Column(column_name, type_),
- schema=schema
- )
- for constraint in t.constraints:
- if _count_constraint(constraint):
- self.impl.add_constraint(constraint)
-
- def f(self, name):
- """Indicate a string name that has already had a naming convention
- applied to it.
-
- This feature combines with the SQLAlchemy ``naming_convention`` feature
- to disambiguate constraint names that have already had naming
- conventions applied to them, versus those that have not. This is
- necessary in the case that the ``"%(constraint_name)s"`` token
- is used within a naming convention, so that it can be identified
- that this particular name should remain fixed.
-
- If the :meth:`.Operations.f` is used on a constraint, the naming
- convention will not take effect::
-
- op.add_column('t', 'x', Boolean(name=op.f('ck_bool_t_x')))
-
- Above, the CHECK constraint generated will have the name
- ``ck_bool_t_x`` regardless of whether or not a naming convention is
- in use.
-
- Alternatively, if a naming convention is in use, and 'f' is not used,
- names will be converted along conventions. If the ``target_metadata``
- contains the naming convention
- ``{"ck": "ck_bool_%(table_name)s_%(constraint_name)s"}``, then the
- output of the following:
-
- op.add_column('t', 'x', Boolean(name='x'))
-
- will be::
-
- CONSTRAINT ck_bool_t_x CHECK (x in (1, 0)))
-
- The function is rendered in the output of autogenerate when
- a particular constraint name is already converted, for SQLAlchemy
- version **0.9.4 and greater only**. Even though ``naming_convention``
- was introduced in 0.9.2, the string disambiguation service is new
- as of 0.9.4.
-
- .. versionadded:: 0.6.4
-
- """
- if conv:
- return conv(name)
- else:
- raise NotImplementedError(
- "op.f() feature requires SQLAlchemy 0.9.4 or greater.")
- def add_column(self, table_name, column, schema=None):
- """Issue an "add column" instruction using the current
- migration context.
-
- e.g.::
+@Operations.register_operation("create_primary_key")
+@BatchOperations.register_operation(
+ "create_primary_key", "batch_create_primary_key")
+class CreatePrimaryKeyOp(AddConstraintOp):
+ """Represent a create primary key operation."""
- from alembic import op
- from sqlalchemy import Column, String
-
- op.add_column('organization',
- Column('name', String())
- )
+ def __init__(
+ self, constraint_name, table_name, columns, schema=None, **kw):
+ self.constraint_name = constraint_name
+ self.table_name = table_name
+ self.columns = columns
+ self.schema = schema
+ self.kw = kw
- The provided :class:`~sqlalchemy.schema.Column` object can also
- specify a :class:`~sqlalchemy.schema.ForeignKey`, referencing
- a remote table name. Alembic will automatically generate a stub
- "referenced" table and emit a second ALTER statement in order
- to add the constraint separately::
+ @classmethod
+ def from_constraint(cls, constraint):
+ constraint_table = sqla_compat._table_for_constraint(constraint)
+
+ return cls(
+ constraint.name,
+ constraint_table.name,
+ schema=constraint_table.schema,
+ *constraint.columns
+ )
- from alembic import op
- from sqlalchemy import Column, INTEGER, ForeignKey
+ def to_constraint(self, migration_context=None):
+ schema_obj = schemaobj.SchemaObjects(migration_context)
+ return schema_obj.primary_key_constraint(
+ self.constraint_name, self.table_name,
+ self.columns, schema=self.schema)
- op.add_column('organization',
- Column('account_id', INTEGER, ForeignKey('accounts.id'))
- )
+ @classmethod
+ @util._with_legacy_names([('name', 'constraint_name')])
+ def create_primary_key(
+ cls, operations,
+ constraint_name, table_name, columns, schema=None):
+ """Issue a "create primary key" instruction using the current
+ migration context.
- Note that this statement uses the :class:`~sqlalchemy.schema.Column`
- construct as is from the SQLAlchemy library. In particular,
- default values to be created on the database side are
- specified using the ``server_default`` parameter, and not
- ``default`` which only specifies Python-side defaults::
+ e.g.::
from alembic import op
- from sqlalchemy import Column, TIMESTAMP, func
+ op.create_primary_key(
+ "pk_my_table", "my_table",
+ ["id", "version"]
+ )
- # specify "DEFAULT NOW" along with the column add
- op.add_column('account',
- Column('timestamp', TIMESTAMP, server_default=func.now())
- )
+ This internally generates a :class:`~sqlalchemy.schema.Table` object
+ containing the necessary columns, then generates a new
+ :class:`~sqlalchemy.schema.PrimaryKeyConstraint`
+ object which it then associates with the
+ :class:`~sqlalchemy.schema.Table`.
+ Any event listeners associated with this action will be fired
+ off normally. The :class:`~sqlalchemy.schema.AddConstraint`
+ construct is ultimately used to generate the ALTER statement.
- :param table_name: String name of the parent table.
- :param column: a :class:`sqlalchemy.schema.Column` object
- representing the new column.
+ :param name: Name of the primary key constraint. The name is necessary
+ so that an ALTER statement can be emitted. For setups that
+ use an automated naming scheme such as that described at
+ :ref:`sqla:constraint_naming_conventions`
+ ``name`` here can be ``None``, as the event listener will
+ apply the name to the constraint object when it is associated
+ with the table.
+ :param table_name: String name of the target table.
+ :param columns: a list of string column names to be applied to the
+ primary key constraint.
:param schema: Optional schema name to operate within. To control
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
@@ -585,102 +184,103 @@ class Operations(object):
.. versionadded:: 0.7.0 'schema' can now accept a
:class:`~sqlalchemy.sql.elements.quoted_name` construct.
-
"""
+ op = cls(constraint_name, table_name, columns, schema)
+ return operations.invoke(op)
- t = self._table(table_name, column, schema=schema)
- self.impl.add_column(
- table_name,
- column,
- schema=schema
- )
- for constraint in t.constraints:
- if not isinstance(constraint, sa_schema.PrimaryKeyConstraint):
- self.impl.add_constraint(constraint)
- for index in t.indexes:
- self.impl._exec(sa_schema.CreateIndex(index))
+ @classmethod
+ def batch_create_primary_key(cls, operations, constraint_name, columns):
+ """Issue a "create primary key" instruction using the
+ current batch migration context.
- def drop_column(self, table_name, column_name, **kw):
- """Issue a "drop column" instruction using the current
- migration context.
+ The batch form of this call omits the ``table_name`` and ``schema``
+ arguments from the call.
- e.g.::
+ .. seealso::
- drop_column('organization', 'account_id')
+ :meth:`.Operations.create_primary_key`
- :param table_name: name of table
- :param column_name: name of column
- :param schema: Optional schema name to operate within. To control
- quoting of the schema outside of the default behavior, use
- the SQLAlchemy construct
- :class:`~sqlalchemy.sql.elements.quoted_name`.
+ """
+ raise NotImplementedError("not yet implemented")
- .. versionadded:: 0.7.0 'schema' can now accept a
- :class:`~sqlalchemy.sql.elements.quoted_name` construct.
- :param mssql_drop_check: Optional boolean. When ``True``, on
- Microsoft SQL Server only, first
- drop the CHECK constraint on the column using a
- SQL-script-compatible
- block that selects into a @variable from sys.check_constraints,
- then exec's a separate DROP CONSTRAINT for that constraint.
- :param mssql_drop_default: Optional boolean. When ``True``, on
- Microsoft SQL Server only, first
- drop the DEFAULT constraint on the column using a
- SQL-script-compatible
- block that selects into a @variable from sys.default_constraints,
- then exec's a separate DROP CONSTRAINT for that default.
- :param mssql_drop_foreign_key: Optional boolean. When ``True``, on
- Microsoft SQL Server only, first
- drop a single FOREIGN KEY constraint on the column using a
- SQL-script-compatible
- block that selects into a @variable from
- sys.foreign_keys/sys.foreign_key_columns,
- then exec's a separate DROP CONSTRAINT for that default. Only
- works if the column has exactly one FK constraint which refers to
- it, at the moment.
+@Operations.register_operation("create_unique_constraint")
+@BatchOperations.register_operation(
+ "create_unique_constraint", "batch_create_unique_constraint")
+class CreateUniqueConstraintOp(AddConstraintOp):
+ """Represent a create unique constraint operation."""
- .. versionadded:: 0.6.2
+ def __init__(
+ self, constraint_name, table_name, columns, schema=None, **kw):
+ self.constraint_name = constraint_name
+ self.table_name = table_name
+ self.columns = columns
+ self.schema = schema
+ self.kw = kw
- """
+ @classmethod
+ def from_constraint(cls, constraint):
+ constraint_table = sqla_compat._table_for_constraint(constraint)
- self.impl.drop_column(
- table_name,
- self._column(column_name, NULLTYPE),
+ kw = {}
+ if constraint.deferrable:
+ kw['deferrable'] = constraint.deferrable
+ if constraint.initially:
+ kw['initially'] = constraint.initially
+
+ return cls(
+ constraint.name,
+ constraint_table.name,
+ [c.name for c in constraint.columns],
+ schema=constraint_table.schema,
**kw
)
- def create_primary_key(self, name, table_name, cols, schema=None):
- """Issue a "create primary key" instruction using the current
- migration context.
+ def to_constraint(self, migration_context=None):
+ schema_obj = schemaobj.SchemaObjects(migration_context)
+ return schema_obj.unique_constraint(
+ self.constraint_name, self.table_name, self.columns,
+ schema=self.schema, **self.kw)
+
+ @classmethod
+ @util._with_legacy_names([
+ ('name', 'constraint_name'),
+ ('source', 'table_name')
+ ])
+ def create_unique_constraint(
+ cls, operations, constraint_name, table_name, columns,
+ schema=None, **kw):
+ """Issue a "create unique constraint" instruction using the
+ current migration context.
e.g.::
from alembic import op
- op.create_primary_key(
- "pk_my_table", "my_table",
- ["id", "version"]
- )
+ op.create_unique_constraint("uq_user_name", "user", ["name"])
This internally generates a :class:`~sqlalchemy.schema.Table` object
containing the necessary columns, then generates a new
- :class:`~sqlalchemy.schema.PrimaryKeyConstraint`
+ :class:`~sqlalchemy.schema.UniqueConstraint`
object which it then associates with the
:class:`~sqlalchemy.schema.Table`.
Any event listeners associated with this action will be fired
off normally. The :class:`~sqlalchemy.schema.AddConstraint`
construct is ultimately used to generate the ALTER statement.
- :param name: Name of the primary key constraint. The name is necessary
+ :param name: Name of the unique constraint. The name is necessary
so that an ALTER statement can be emitted. For setups that
use an automated naming scheme such as that described at
- :ref:`sqla:constraint_naming_conventions`
+ :ref:`sqla:constraint_naming_conventions`,
``name`` here can be ``None``, as the event listener will
apply the name to the constraint object when it is associated
with the table.
- :param table_name: String name of the target table.
- :param cols: a list of string column names to be applied to the
- primary key constraint.
+ :param table_name: String name of the source table.
+ :param columns: a list of string column names in the
+ source table.
+ :param deferrable: optional bool. If set, emit DEFERRABLE or
+ NOT DEFERRABLE when issuing DDL for this constraint.
+ :param initially: optional string. If set, emit INITIALLY <value>
+ when issuing DDL for this constraint.
:param schema: Optional schema name to operate within. To control
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
@@ -690,12 +290,94 @@ class Operations(object):
:class:`~sqlalchemy.sql.elements.quoted_name` construct.
"""
- self.impl.add_constraint(
- self._primary_key_constraint(name, table_name, cols,
- schema)
+
+ op = cls(
+ constraint_name, table_name, columns,
+ schema=schema, **kw
)
+ return operations.invoke(op)
+
+ @classmethod
+ @util._with_legacy_names([('name', 'constraint_name')])
+ def batch_create_unique_constraint(
+ cls, operations, constraint_name, columns, **kw):
+ """Issue a "create unique constraint" instruction using the
+ current batch migration context.
- def create_foreign_key(self, name, source, referent, local_cols,
+ The batch form of this call omits the ``source`` and ``schema``
+ arguments from the call.
+
+ .. seealso::
+
+ :meth:`.Operations.create_unique_constraint`
+
+ """
+ kw['schema'] = operations.impl.schema
+ op = cls(
+ constraint_name, operations.impl.table_name, columns,
+ **kw
+ )
+ return operations.invoke(op)
+
+
+@Operations.register_operation("create_foreign_key")
+@BatchOperations.register_operation(
+ "create_foreign_key", "batch_create_foreign_key")
+class CreateForeignKeyOp(AddConstraintOp):
+ """Represent a create foreign key constraint operation."""
+
+ def __init__(
+ self, constraint_name, source_table, referent_table, local_cols,
+ remote_cols, **kw):
+ self.constraint_name = constraint_name
+ self.source_table = source_table
+ self.referent_table = referent_table
+ self.local_cols = local_cols
+ self.remote_cols = remote_cols
+ self.kw = kw
+
+ @classmethod
+ def from_constraint(cls, constraint):
+ kw = {}
+ if constraint.onupdate:
+ kw['onupdate'] = constraint.onupdate
+ if constraint.ondelete:
+ kw['ondelete'] = constraint.ondelete
+ if constraint.initially:
+ kw['initially'] = constraint.initially
+ if constraint.deferrable:
+ kw['deferrable'] = constraint.deferrable
+ if constraint.use_alter:
+ kw['use_alter'] = constraint.use_alter
+
+ source_schema, source_table, \
+ source_columns, target_schema, \
+ target_table, target_columns = sqla_compat._fk_spec(constraint)
+
+ kw['source_schema'] = source_schema
+ kw['referent_schema'] = target_schema
+
+ return cls(
+ constraint.name,
+ source_table,
+ target_table,
+ source_columns,
+ target_columns,
+ **kw
+ )
+
+ def to_constraint(self, migration_context=None):
+ schema_obj = schemaobj.SchemaObjects(migration_context)
+ return schema_obj.foreign_key_constraint(
+ self.constraint_name,
+ self.source_table, self.referent_table,
+ self.local_cols, self.remote_cols,
+ **self.kw)
+
+ @classmethod
+ @util._with_legacy_names([('name', 'constraint_name')])
+ def create_foreign_key(cls, operations, constraint_name,
+ source_table, referent_table, local_cols,
remote_cols, onupdate=None, ondelete=None,
deferrable=None, initially=None, match=None,
source_schema=None, referent_schema=None,
@@ -726,8 +408,8 @@ class Operations(object):
``name`` here can be ``None``, as the event listener will
apply the name to the constraint object when it is associated
with the table.
- :param source: String name of the source table.
- :param referent: String name of the destination table.
+ :param source_table: String name of the source table.
+ :param referent_table: String name of the destination table.
:param local_cols: a list of string column names in the
source table.
:param remote_cols: a list of string column names in the
@@ -745,68 +427,100 @@ class Operations(object):
"""
- self.impl.add_constraint(
- self._foreign_key_constraint(name, source, referent,
- local_cols, remote_cols,
- onupdate=onupdate, ondelete=ondelete,
- deferrable=deferrable,
- source_schema=source_schema,
- referent_schema=referent_schema,
- initially=initially, match=match,
- **dialect_kw)
+ op = cls(
+ constraint_name,
+ source_table, referent_table,
+ local_cols, remote_cols,
+ onupdate=onupdate, ondelete=ondelete,
+ deferrable=deferrable,
+ source_schema=source_schema,
+ referent_schema=referent_schema,
+ initially=initially, match=match,
+ **dialect_kw
)
+ return operations.invoke(op)
- def create_unique_constraint(self, name, source, local_cols,
- schema=None, **kw):
- """Issue a "create unique constraint" instruction using the
- current migration context.
+ @classmethod
+ @util._with_legacy_names([('name', 'constraint_name')])
+ def batch_create_foreign_key(
+ cls, operations, constraint_name, referent_table,
+ local_cols, remote_cols,
+ referent_schema=None,
+ onupdate=None, ondelete=None,
+ deferrable=None, initially=None, match=None,
+ **dialect_kw):
+ """Issue a "create foreign key" instruction using the
+ current batch migration context.
- e.g.::
+ The batch form of this call omits the ``source`` and ``source_schema``
+ arguments from the call.
- from alembic import op
- op.create_unique_constraint("uq_user_name", "user", ["name"])
+ e.g.::
- This internally generates a :class:`~sqlalchemy.schema.Table` object
- containing the necessary columns, then generates a new
- :class:`~sqlalchemy.schema.UniqueConstraint`
- object which it then associates with the
- :class:`~sqlalchemy.schema.Table`.
- Any event listeners associated with this action will be fired
- off normally. The :class:`~sqlalchemy.schema.AddConstraint`
- construct is ultimately used to generate the ALTER statement.
+ with batch_alter_table("address") as batch_op:
+ batch_op.create_foreign_key(
+ "fk_user_address",
+ "user", ["user_id"], ["id"])
- :param name: Name of the unique constraint. The name is necessary
- so that an ALTER statement can be emitted. For setups that
- use an automated naming scheme such as that described at
- :ref:`sqla:constraint_naming_conventions`,
- ``name`` here can be ``None``, as the event listener will
- apply the name to the constraint object when it is associated
- with the table.
- :param source: String name of the source table. Dotted schema names are
- supported.
- :param local_cols: a list of string column names in the
- source table.
- :param deferrable: optional bool. If set, emit DEFERRABLE or
- NOT DEFERRABLE when issuing DDL for this constraint.
- :param initially: optional string. If set, emit INITIALLY <value>
- when issuing DDL for this constraint.
- :param schema: Optional schema name to operate within. To control
- quoting of the schema outside of the default behavior, use
- the SQLAlchemy construct
- :class:`~sqlalchemy.sql.elements.quoted_name`.
+ .. seealso::
- .. versionadded:: 0.7.0 'schema' can now accept a
- :class:`~sqlalchemy.sql.elements.quoted_name` construct.
+ :meth:`.Operations.create_foreign_key`
"""
+ op = cls(
+ constraint_name,
+ operations.impl.table_name, referent_table,
+ local_cols, remote_cols,
+ onupdate=onupdate, ondelete=ondelete,
+ deferrable=deferrable,
+ source_schema=operations.impl.schema,
+ referent_schema=referent_schema,
+ initially=initially, match=match,
+ **dialect_kw
+ )
+ return operations.invoke(op)
+
+
+@Operations.register_operation("create_check_constraint")
+@BatchOperations.register_operation(
+ "create_check_constraint", "batch_create_check_constraint")
+class CreateCheckConstraintOp(AddConstraintOp):
+ """Represent a create check constraint operation."""
+
+ def __init__(
+ self, constraint_name, table_name, condition, schema=None, **kw):
+ self.constraint_name = constraint_name
+ self.table_name = table_name
+ self.condition = condition
+ self.schema = schema
+ self.kw = kw
- self.impl.add_constraint(
- self._unique_constraint(name, source, local_cols,
- schema=schema, **kw)
+ @classmethod
+ def from_constraint(cls, constraint):
+ constraint_table = sqla_compat._table_for_constraint(constraint)
+
+ return cls(
+ constraint.name,
+ constraint_table.name,
+ constraint.condition,
+ schema=constraint_table.schema
)
- def create_check_constraint(self, name, source, condition,
- schema=None, **kw):
+ def to_constraint(self, migration_context=None):
+ schema_obj = schemaobj.SchemaObjects(migration_context)
+ return schema_obj.check_constraint(
+ self.constraint_name, self.table_name,
+ self.condition, schema=self.schema, **self.kw)
+
+ @classmethod
+ @util._with_legacy_names([
+ ('name', 'constraint_name'),
+ ('source', 'table_name')
+ ])
+ def create_check_constraint(
+ cls, operations,
+ constraint_name, table_name, condition,
+ schema=None, **kw):
"""Issue a "create check constraint" instruction using the
current migration context.
@@ -833,7 +547,7 @@ class Operations(object):
``name`` here can be ``None``, as the event listener will
apply the name to the constraint object when it is associated
with the table.
- :param source: String name of the source table.
+ :param table_name: String name of the source table.
:param condition: SQL expression that's the condition of the
constraint. Can be a string or SQLAlchemy expression language
structure.
@@ -850,12 +564,250 @@ class Operations(object):
:class:`~sqlalchemy.sql.elements.quoted_name` construct.
"""
- self.impl.add_constraint(
- self._check_constraint(
- name, source, condition, schema=schema, **kw)
+ op = cls(constraint_name, table_name, condition, schema=schema, **kw)
+ return operations.invoke(op)
+
+ @classmethod
+ @util._with_legacy_names([('name', 'constraint_name')])
+ def batch_create_check_constraint(
+ cls, operations, constraint_name, condition, **kw):
+ """Issue a "create check constraint" instruction using the
+ current batch migration context.
+
+ The batch form of this call omits the ``source`` and ``schema``
+ arguments from the call.
+
+ .. seealso::
+
+ :meth:`.Operations.create_check_constraint`
+
+ """
+ raise NotImplementedError("not yet implemented")
+
+
+@Operations.register_operation("create_index")
+@BatchOperations.register_operation("create_index", "batch_create_index")
+class CreateIndexOp(MigrateOperation):
+ """Represent a create index operation."""
+
+ def __init__(
+ self, index_name, table_name, columns, schema=None,
+ unique=False, quote=None, _orig_index=None, **kw):
+ self.index_name = index_name
+ self.table_name = table_name
+ self.columns = columns
+ self.schema = schema
+ self.unique = unique
+ self.quote = quote
+ self.kw = kw
+ self._orig_index = _orig_index
+
+ @classmethod
+ def from_index(cls, index):
+ return cls(
+ index.name,
+ index.table.name,
+ sqla_compat._get_index_expressions(index),
+ schema=index.table.schema,
+ unique=index.unique,
+ quote=index.name.quote,
+ _orig_index=index,
+ **index.dialect_kwargs
+ )
+
+ def to_index(self, migration_context=None):
+ if self._orig_index:
+ return self._orig_index
+ schema_obj = schemaobj.SchemaObjects(migration_context)
+ return schema_obj.index(
+ self.index_name, self.table_name, self.columns, schema=self.schema,
+ unique=self.unique, quote=self.quote, **self.kw)
+
+ @classmethod
+ @util._with_legacy_names([('name', 'index_name')])
+ def create_index(
+ cls, operations,
+ index_name, table_name, columns, schema=None,
+ unique=False, quote=None, **kw):
+ """Issue a "create index" instruction using the current
+ migration context.
+
+ e.g.::
+
+ from alembic import op
+ op.create_index('ik_test', 't1', ['foo', 'bar'])
+
+ Functional indexes can be produced by using the
+ :func:`sqlalchemy.sql.expression.text` construct::
+
+ from alembic import op
+ from sqlalchemy import text
+ op.create_index('ik_test', 't1', [text('lower(foo)')])
+
+ .. versionadded:: 0.6.7 support for making use of the
+ :func:`~sqlalchemy.sql.expression.text` construct in
+ conjunction with
+ :meth:`.Operations.create_index` in
+ order to produce functional expressions within CREATE INDEX.
+
+ :param index_name: name of the index.
+ :param table_name: name of the owning table.
+ :param columns: a list consisting of string column names and/or
+ :func:`~sqlalchemy.sql.expression.text` constructs.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ .. versionadded:: 0.7.0 'schema' can now accept a
+ :class:`~sqlalchemy.sql.elements.quoted_name` construct.
+
+ :param unique: If True, create a unique index.
+
+ :param quote:
+ Force quoting of this column's name on or off, corresponding
+ to ``True`` or ``False``. When left at its default
+ of ``None``, the column identifier will be quoted according to
+ whether the name is case sensitive (identifiers with at least one
+ upper case character are treated as case sensitive), or if it's a
+ reserved word. This flag is only needed to force quoting of a
+ reserved word which is not known by the SQLAlchemy dialect.
+
+ :param \**kw: Additional keyword arguments not mentioned above are
+ dialect specific, and passed in the form
+ ``<dialectname>_<argname>``.
+ See the documentation regarding an individual dialect at
+ :ref:`dialect_toplevel` for detail on documented arguments.
+ """
+ op = cls(
+ index_name, table_name, columns, schema=schema,
+ unique=unique, quote=quote, **kw
+ )
+ return operations.invoke(op)
+
+ @classmethod
+ def batch_create_index(cls, operations, index_name, columns, **kw):
+ """Issue a "create index" instruction using the
+ current batch migration context.
+
+ .. seealso::
+
+ :meth:`.Operations.create_index`
+
+ """
+
+ op = cls(
+ index_name, operations.impl.table_name, columns,
+ schema=operations.impl.schema, **kw
+ )
+ return operations.invoke(op)
+
+
+@Operations.register_operation("drop_index")
+@BatchOperations.register_operation("drop_index", "batch_drop_index")
+class DropIndexOp(MigrateOperation):
+ """Represent a drop index operation."""
+
+ def __init__(self, index_name, table_name=None, schema=None):
+ self.index_name = index_name
+ self.table_name = table_name
+ self.schema = schema
+
+ @classmethod
+ def from_index(cls, index):
+ return cls(
+ index.name,
+ index.table.name,
+ schema=index.table.schema,
+ )
+
+ def to_index(self, migration_context=None):
+ schema_obj = schemaobj.SchemaObjects(migration_context)
+
+ # need a dummy column name here since SQLAlchemy
+ # 0.7.6 and further raises on Index with no columns
+ return schema_obj.index(
+ self.index_name, self.table_name, ['x'], schema=self.schema)
+
+ @classmethod
+ @util._with_legacy_names([
+ ('name', 'index_name'), ('tablename', 'table_name')])
+ def drop_index(cls, operations, index_name, table_name=None, schema=None):
+ """Issue a "drop index" instruction using the current
+ migration context.
+
+ e.g.::
+
+ drop_index("accounts")
+
+ :param index_name: name of the index.
+ :param table_name: name of the owning table. Some
+ backends such as Microsoft SQL Server require this.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ .. versionadded:: 0.7.0 'schema' can now accept a
+ :class:`~sqlalchemy.sql.elements.quoted_name` construct.
+
+ """
+ op = cls(index_name, table_name=table_name, schema=schema)
+ return operations.invoke(op)
+
+ @classmethod
+ @util._with_legacy_names([('name', 'index_name')])
+ def batch_drop_index(cls, operations, index_name, **kw):
+ """Issue a "drop index" instruction using the
+ current batch migration context.
+
+ .. seealso::
+
+ :meth:`.Operations.drop_index`
+
+ """
+
+ op = cls(
+ index_name, table_name=operations.impl.table_name,
+ schema=operations.impl.schema
+ )
+ return operations.invoke(op)
+
+
+@Operations.register_operation("create_table")
+class CreateTableOp(MigrateOperation):
+ """Represent a create table operation."""
+
+ def __init__(
+ self, table_name, columns, schema=None, _orig_table=None, **kw):
+ self.table_name = table_name
+ self.columns = columns
+ self.schema = schema
+ self.kw = kw
+ self._orig_table = _orig_table
+
+ @classmethod
+ def from_table(cls, table):
+ return cls(
+ table.name,
+ list(table.c) + list(table.constraints),
+ schema=table.schema,
+ _orig_table=table,
+ **table.kwargs
)
- def create_table(self, name, *columns, **kw):
+ def to_table(self, migration_context=None):
+ if self._orig_table is not None:
+ return self._orig_table
+ schema_obj = schemaobj.SchemaObjects(migration_context)
+
+ return schema_obj.table(
+ self.table_name, *self.columns, schema=self.schema, **self.kw
+ )
+
+ @classmethod
+ @util._with_legacy_names([('name', 'table_name')])
+ def create_table(cls, operations, table_name, *columns, **kw):
"""Issue a "create table" instruction using the current migration
context.
@@ -917,7 +869,7 @@ class Operations(object):
.. versionadded:: 0.7.0
- :param name: Name of the table
+ :param table_name: Name of the table
:param \*columns: collection of :class:`~sqlalchemy.schema.Column`
objects within
the table, as well as optional :class:`~sqlalchemy.schema.Constraint`
@@ -940,11 +892,33 @@ class Operations(object):
object is returned.
"""
- table = self._table(name, *columns, **kw)
- self.impl.create_table(table)
- return table
+ op = cls(table_name, columns, **kw)
+ return operations.invoke(op)
+
+
+@Operations.register_operation("drop_table")
+class DropTableOp(MigrateOperation):
+ """Represent a drop table operation."""
- def drop_table(self, name, **kw):
+ def __init__(self, table_name, schema=None, table_kw=None):
+ self.table_name = table_name
+ self.schema = schema
+ self.table_kw = table_kw or {}
+
+ @classmethod
+ def from_table(cls, table):
+ return cls(table.name, schema=table.schema)
+
+ def to_table(self, migration_context):
+ schema_obj = schemaobj.SchemaObjects(migration_context)
+ return schema_obj.table(
+ self.table_name,
+ schema=self.schema,
+ **self.table_kw)
+
+ @classmethod
+ @util._with_legacy_names([('name', 'table_name')])
+ def drop_table(cls, operations, table_name, schema=None, **kw):
"""Issue a "drop table" instruction using the current
migration context.
@@ -953,7 +927,7 @@ class Operations(object):
drop_table("accounts")
- :param name: Name of the table
+ :param table_name: Name of the table
:param schema: Optional schema name to operate within. To control
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
@@ -966,37 +940,33 @@ class Operations(object):
:class:`sqlalchemy.schema.Table` object created for the command.
"""
- self.impl.drop_table(
- self._table(name, **kw)
- )
+ op = cls(table_name, schema=schema, table_kw=kw)
+ operations.invoke(op)
- def create_index(self, name, table_name, columns, schema=None,
- unique=False, quote=None, **kw):
- """Issue a "create index" instruction using the current
- migration context.
- e.g.::
+class AlterTableOp(MigrateOperation):
+ """Represent an alter table operation."""
- from alembic import op
- op.create_index('ik_test', 't1', ['foo', 'bar'])
+ def __init__(self, table_name, schema=None):
+ self.table_name = table_name
+ self.schema = schema
- Functional indexes can be produced by using the
- :func:`sqlalchemy.sql.expression.text` construct::
- from alembic import op
- from sqlalchemy import text
- op.create_index('ik_test', 't1', [text('lower(foo)')])
+@Operations.register_operation("rename_table")
+class RenameTableOp(AlterTableOp):
+ """Represent a rename table operation."""
- .. versionadded:: 0.6.7 support for making use of the
- :func:`~sqlalchemy.sql.expression.text` construct in
- conjunction with
- :meth:`.Operations.create_index` in
- order to produce functional expressions within CREATE INDEX.
+ def __init__(self, old_table_name, new_table_name, schema=None):
+ super(RenameTableOp, self).__init__(old_table_name, schema=schema)
+ self.new_table_name = new_table_name
- :param name: name of the index.
- :param table_name: name of the owning table.
- :param columns: a list consisting of string column names and/or
- :func:`~sqlalchemy.sql.expression.text` constructs.
+ @classmethod
+ def rename_table(
+ cls, operations, old_table_name, new_table_name, schema=None):
+ """Emit an ALTER TABLE to rename a table.
+
+ :param old_table_name: old name.
+ :param new_table_name: new name.
:param schema: Optional schema name to operate within. To control
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
@@ -1005,40 +975,239 @@ class Operations(object):
.. versionadded:: 0.7.0 'schema' can now accept a
:class:`~sqlalchemy.sql.elements.quoted_name` construct.
- :param unique: If True, create a unique index.
+ """
+ op = cls(old_table_name, new_table_name, schema=schema)
+ return operations.invoke(op)
+
+
+@Operations.register_operation("alter_column")
+@BatchOperations.register_operation("alter_column", "batch_alter_column")
+class AlterColumnOp(AlterTableOp):
+ """Represent an alter column operation."""
+
+ def __init__(
+ self, table_name, column_name, schema=None,
+ existing_type=None,
+ existing_server_default=False,
+ existing_nullable=None,
+ modify_nullable=None,
+ modify_server_default=False,
+ modify_name=None,
+ modify_type=None,
+ **kw
- :param quote:
- Force quoting of this column's name on or off, corresponding
- to ``True`` or ``False``. When left at its default
- of ``None``, the column identifier will be quoted according to
- whether the name is case sensitive (identifiers with at least one
- upper case character are treated as case sensitive), or if it's a
- reserved word. This flag is only needed to force quoting of a
- reserved word which is not known by the SQLAlchemy dialect.
+ ):
+ super(AlterColumnOp, self).__init__(table_name, schema=schema)
+ self.column_name = column_name
+ self.existing_type = existing_type
+ self.existing_server_default = existing_server_default
+ self.existing_nullable = existing_nullable
+ self.modify_nullable = modify_nullable
+ self.modify_server_default = modify_server_default
+ self.modify_name = modify_name
+ self.modify_type = modify_type
+ self.kw = kw
+
+ @classmethod
+ @util._with_legacy_names([('name', 'new_column_name')])
+ def alter_column(
+ cls, operations, table_name, column_name,
+ nullable=None,
+ server_default=False,
+ new_column_name=None,
+ type_=None,
+ existing_type=None,
+ existing_server_default=False,
+ existing_nullable=None,
+ schema=None, **kw
+ ):
+ """Issue an "alter column" instruction using the
+ current migration context.
+
+ Generally, only that aspect of the column which
+ is being changed, i.e. name, type, nullability,
+ default, needs to be specified. Multiple changes
+ can also be specified at once and the backend should
+ "do the right thing", emitting each change either
+ separately or together as the backend allows.
+
+ MySQL has special requirements here, since MySQL
+ cannot ALTER a column without a full specification.
+ When producing MySQL-compatible migration files,
+ it is recommended that the ``existing_type``,
+ ``existing_server_default``, and ``existing_nullable``
+ parameters be present, if not being altered.
+
+ Type changes which are against the SQLAlchemy
+ "schema" types :class:`~sqlalchemy.types.Boolean`
+ and :class:`~sqlalchemy.types.Enum` may also
+ add or drop constraints which accompany those
+ types on backends that don't support them natively.
+ The ``existing_server_default`` argument is
+ used in this case as well to remove a previous
+ constraint.
+
+ :param table_name: string name of the target table.
+ :param column_name: string name of the target column,
+ as it exists before the operation begins.
+ :param nullable: Optional; specify ``True`` or ``False``
+ to alter the column's nullability.
+ :param server_default: Optional; specify a string
+ SQL expression, :func:`~sqlalchemy.sql.expression.text`,
+ or :class:`~sqlalchemy.schema.DefaultClause` to indicate
+ an alteration to the column's default value.
+ Set to ``None`` to have the default removed.
+ :param new_column_name: Optional; specify a string name here to
+ indicate the new name within a column rename operation.
+ :param ``type_``: Optional; a :class:`~sqlalchemy.types.TypeEngine`
+ type object to specify a change to the column's type.
+ For SQLAlchemy types that also indicate a constraint (i.e.
+ :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`),
+ the constraint is also generated.
+ :param autoincrement: set the ``AUTO_INCREMENT`` flag of the column;
+ currently understood by the MySQL dialect.
+ :param existing_type: Optional; a
+ :class:`~sqlalchemy.types.TypeEngine`
+ type object to specify the previous type. This
+ is required for all MySQL column alter operations that
+ don't otherwise specify a new type, as well as for
+ when nullability is being changed on a SQL Server
+ column. It is also used if the type is a so-called
+ SQLlchemy "schema" type which may define a constraint (i.e.
+ :class:`~sqlalchemy.types.Boolean`,
+ :class:`~sqlalchemy.types.Enum`),
+ so that the constraint can be dropped.
+ :param existing_server_default: Optional; The existing
+ default value of the column. Required on MySQL if
+ an existing default is not being changed; else MySQL
+ removes the default.
+ :param existing_nullable: Optional; the existing nullability
+ of the column. Required on MySQL if the existing nullability
+ is not being changed; else MySQL sets this to NULL.
+ :param existing_autoincrement: Optional; the existing autoincrement
+ of the column. Used for MySQL's system of altering a column
+ that specifies ``AUTO_INCREMENT``.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ .. versionadded:: 0.7.0 'schema' can now accept a
+ :class:`~sqlalchemy.sql.elements.quoted_name` construct.
- :param \**kw: Additional keyword arguments not mentioned above are
- dialect specific, and passed in the form ``<dialectname>_<argname>``.
- See the documentation regarding an individual dialect at
- :ref:`dialect_toplevel` for detail on documented arguments.
"""
- self.impl.create_index(
- self._index(name, table_name, columns, schema=schema,
- unique=unique, quote=quote, **kw)
+ alt = cls(
+ table_name, column_name, schema=schema,
+ existing_type=existing_type,
+ existing_server_default=existing_server_default,
+ existing_nullable=existing_nullable,
+ modify_name=new_column_name,
+ modify_type=type_,
+ modify_server_default=server_default,
+ modify_nullable=nullable,
+ **kw
)
- @util._with_legacy_names([('tablename', 'table_name')])
- def drop_index(self, name, table_name=None, schema=None):
- """Issue a "drop index" instruction using the current
+ return operations.invoke(alt)
+
+ @classmethod
+ def batch_alter_column(
+ cls, operations, column_name,
+ nullable=None,
+ server_default=False,
+ new_column_name=None,
+ type_=None,
+ existing_type=None,
+ existing_server_default=False,
+ existing_nullable=None,
+ **kw
+ ):
+ """Issue an "alter column" instruction using the current
+ batch migration context.
+
+ .. seealso::
+
+ :meth:`.Operations.add_column`
+
+ """
+ alt = cls(
+ operations.impl.table_name, column_name,
+ schema=operations.impl.schema,
+ existing_type=existing_type,
+ existing_server_default=existing_server_default,
+ existing_nullable=existing_nullable,
+ modify_name=new_column_name,
+ modify_type=type_,
+ modify_server_default=server_default,
+ modify_nullable=nullable,
+ **kw
+ )
+
+ return operations.invoke(alt)
+
+
+@Operations.register_operation("add_column")
+@BatchOperations.register_operation("add_column", "batch_add_column")
+class AddColumnOp(AlterTableOp):
+ """Represent an add column operation."""
+
+ def __init__(self, table_name, column, schema=None):
+ super(AddColumnOp, self).__init__(table_name, schema=schema)
+ self.column = column
+
+ @classmethod
+ def from_column(cls, col):
+ return cls(col.table.name, col, schema=col.table.schema)
+
+ @classmethod
+ def from_column_and_tablename(cls, schema, tname, col):
+ return cls(tname, col, schema=schema)
+
+ @classmethod
+ def add_column(cls, operations, table_name, column, schema=None):
+ """Issue an "add column" instruction using the current
migration context.
e.g.::
- drop_index("accounts")
+ from alembic import op
+ from sqlalchemy import Column, String
- :param name: name of the index.
- :param table_name: name of the owning table. Some
- backends such as Microsoft SQL Server require this.
+ op.add_column('organization',
+ Column('name', String())
+ )
+
+ The provided :class:`~sqlalchemy.schema.Column` object can also
+ specify a :class:`~sqlalchemy.schema.ForeignKey`, referencing
+ a remote table name. Alembic will automatically generate a stub
+ "referenced" table and emit a second ALTER statement in order
+ to add the constraint separately::
+
+ from alembic import op
+ from sqlalchemy import Column, INTEGER, ForeignKey
+
+ op.add_column('organization',
+ Column('account_id', INTEGER, ForeignKey('accounts.id'))
+ )
+
+ Note that this statement uses the :class:`~sqlalchemy.schema.Column`
+ construct as is from the SQLAlchemy library. In particular,
+ default values to be created on the database side are
+ specified using the ``server_default`` parameter, and not
+ ``default`` which only specifies Python-side defaults::
+
+ from alembic import op
+ from sqlalchemy import Column, TIMESTAMP, func
+
+ # specify "DEFAULT NOW" along with the column add
+ op.add_column('account',
+ Column('timestamp', TIMESTAMP, server_default=func.now())
+ )
+
+ :param table_name: String name of the parent table.
+ :param column: a :class:`sqlalchemy.schema.Column` object
+ representing the new column.
:param schema: Optional schema name to operate within. To control
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
@@ -1047,21 +1216,59 @@ class Operations(object):
.. versionadded:: 0.7.0 'schema' can now accept a
:class:`~sqlalchemy.sql.elements.quoted_name` construct.
+
"""
- # need a dummy column name here since SQLAlchemy
- # 0.7.6 and further raises on Index with no columns
- self.impl.drop_index(
- self._index(name, table_name, ['x'], schema=schema)
+
+ op = cls(table_name, column, schema=schema)
+ return operations.invoke(op)
+
+ @classmethod
+ def batch_add_column(cls, operations, column):
+ """Issue an "add column" instruction using the current
+ batch migration context.
+
+ .. seealso::
+
+ :meth:`.Operations.add_column`
+
+ """
+ op = cls(
+ operations.impl.table_name, column,
+ schema=operations.impl.schema
)
+ return operations.invoke(op)
- @util._with_legacy_names([("type", "type_")])
- def drop_constraint(self, name, table_name, type_=None, schema=None):
- """Drop a constraint of the given name, typically via DROP CONSTRAINT.
- :param name: name of the constraint.
- :param table_name: table name.
- :param ``type_``: optional, required on MySQL. can be
- 'foreignkey', 'primary', 'unique', or 'check'.
+@Operations.register_operation("drop_column")
+@BatchOperations.register_operation("drop_column", "batch_drop_column")
+class DropColumnOp(AlterTableOp):
+ """Represent a drop column operation."""
+
+ def __init__(self, table_name, column_name, schema=None, **kw):
+ super(DropColumnOp, self).__init__(table_name, schema=schema)
+ self.column_name = column_name
+ self.kw = kw
+
+ @classmethod
+ def from_column_and_tablename(cls, schema, tname, col):
+ return cls(tname, col.name, schema=schema)
+
+ def to_column(self, migration_context=None):
+ schema_obj = schemaobj.SchemaObjects(migration_context)
+ return schema_obj.column(self.column_name, NULLTYPE)
+
+ @classmethod
+ def drop_column(
+ cls, operations, table_name, column_name, schema=None, **kw):
+ """Issue a "drop column" instruction using the current
+ migration context.
+
+ e.g.::
+
+ drop_column('organization', 'account_id')
+
+ :param table_name: name of table
+ :param column_name: name of column
:param schema: Optional schema name to operate within. To control
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
@@ -1070,28 +1277,62 @@ class Operations(object):
.. versionadded:: 0.7.0 'schema' can now accept a
:class:`~sqlalchemy.sql.elements.quoted_name` construct.
+ :param mssql_drop_check: Optional boolean. When ``True``, on
+ Microsoft SQL Server only, first
+ drop the CHECK constraint on the column using a
+ SQL-script-compatible
+ block that selects into a @variable from sys.check_constraints,
+ then exec's a separate DROP CONSTRAINT for that constraint.
+ :param mssql_drop_default: Optional boolean. When ``True``, on
+ Microsoft SQL Server only, first
+ drop the DEFAULT constraint on the column using a
+ SQL-script-compatible
+ block that selects into a @variable from sys.default_constraints,
+ then exec's a separate DROP CONSTRAINT for that default.
+ :param mssql_drop_foreign_key: Optional boolean. When ``True``, on
+ Microsoft SQL Server only, first
+ drop a single FOREIGN KEY constraint on the column using a
+ SQL-script-compatible
+ block that selects into a @variable from
+ sys.foreign_keys/sys.foreign_key_columns,
+ then exec's a separate DROP CONSTRAINT for that default. Only
+ works if the column has exactly one FK constraint which refers to
+ it, at the moment.
+
+ .. versionadded:: 0.6.2
+
"""
- t = self._table(table_name, schema=schema)
- types = {
- 'foreignkey': lambda name: sa_schema.ForeignKeyConstraint(
- [], [], name=name),
- 'primary': sa_schema.PrimaryKeyConstraint,
- 'unique': sa_schema.UniqueConstraint,
- 'check': lambda name: sa_schema.CheckConstraint("", name=name),
- None: sa_schema.Constraint
- }
- try:
- const = types[type_]
- except KeyError:
- raise TypeError("'type' can be one of %s" %
- ", ".join(sorted(repr(x) for x in types)))
+ op = cls(table_name, column_name, schema=schema, **kw)
+ return operations.invoke(op)
+
+ @classmethod
+ def batch_drop_column(cls, operations, column_name):
+ """Issue a "drop column" instruction using the current
+ batch migration context.
+
+ .. seealso::
+
+ :meth:`.Operations.drop_column`
+
+ """
+ op = cls(
+ operations.impl.table_name, column_name,
+ schema=operations.impl.schema)
+ return operations.invoke(op)
+
+
+@Operations.register_operation("bulk_insert")
+class BulkInsertOp(MigrateOperation):
+ """Represent a bulk insert operation."""
- const = const(name=name)
- t.append_constraint(const)
- self.impl.drop_constraint(const)
+ def __init__(self, table, rows, multiinsert=True):
+ self.table = table
+ self.rows = rows
+ self.multiinsert = multiinsert
- def bulk_insert(self, table, rows, multiinsert=True):
+ @classmethod
+ def bulk_insert(cls, operations, table, rows, multiinsert=True):
"""Issue a "bulk insert" operation using the current
migration context.
@@ -1174,53 +1415,21 @@ class Operations(object):
.. versionadded:: 0.6.4
"""
- self.impl.bulk_insert(table, rows, multiinsert=multiinsert)
-
- def inline_literal(self, value, type_=None):
- """Produce an 'inline literal' expression, suitable for
- using in an INSERT, UPDATE, or DELETE statement.
-
- When using Alembic in "offline" mode, CRUD operations
- aren't compatible with SQLAlchemy's default behavior surrounding
- literal values,
- which is that they are converted into bound values and passed
- separately into the ``execute()`` method of the DBAPI cursor.
- An offline SQL
- script needs to have these rendered inline. While it should
- always be noted that inline literal values are an **enormous**
- security hole in an application that handles untrusted input,
- a schema migration is not run in this context, so
- literals are safe to render inline, with the caveat that
- advanced types like dates may not be supported directly
- by SQLAlchemy.
-
- See :meth:`.execute` for an example usage of
- :meth:`.inline_literal`.
-
- The environment can also be configured to attempt to render
- "literal" values inline automatically, for those simple types
- that are supported by the dialect; see
- :paramref:`.EnvironmentContext.configure.literal_binds` for this
- more recently added feature.
-
- :param value: The value to render. Strings, integers, and simple
- numerics should be supported. Other types like boolean,
- dates, etc. may or may not be supported yet by various
- backends.
- :param ``type_``: optional - a :class:`sqlalchemy.types.TypeEngine`
- subclass stating the type of this value. In SQLAlchemy
- expressions, this is usually derived automatically
- from the Python type of the value itself, as well as
- based on the context in which the value is used.
- .. seealso::
+ op = cls(table, rows, multiinsert=multiinsert)
+ operations.invoke(op)
- :paramref:`.EnvironmentContext.configure.literal_binds`
- """
- return impl._literal_bindparam(None, value, type_=type_)
+@Operations.register_operation("execute")
+class ExecuteSQLOp(MigrateOperation):
+ """Represent an execute SQL operation."""
+
+ def __init__(self, sqltext, execution_options=None):
+ self.sqltext = sqltext
+ self.execution_options = execution_options
- def execute(self, sql, execution_options=None):
+ @classmethod
+ def execute(cls, operations, sqltext, execution_options=None):
"""Execute the given SQL using the current migration context.
In a SQL script context, the statement is emitted directly to the
@@ -1283,177 +1492,74 @@ class Operations(object):
execution options, will be passed to
:meth:`sqlalchemy.engine.Connection.execution_options`.
"""
- self.migration_context.impl.execute(
- sql,
- execution_options=execution_options)
+ op = cls(sqltext, execution_options=execution_options)
+ return operations.invoke(op)
- def get_bind(self):
- """Return the current 'bind'.
- Under normal circumstances, this is the
- :class:`~sqlalchemy.engine.Connection` currently being used
- to emit SQL to the database.
+class OpContainer(MigrateOperation):
+ """Represent a sequence of operations operation."""
+ def __init__(self, ops=()):
+ self.ops = ops
- In a SQL script context, this value is ``None``. [TODO: verify this]
- """
- return self.migration_context.impl.bind
+class ModifyTableOps(OpContainer):
+ """Contains a sequence of operations that all apply to a single Table."""
+ def __init__(self, table_name, ops, schema=None):
+ super(ModifyTableOps, self).__init__(ops)
+ self.table_name = table_name
+ self.schema = schema
-class BatchOperations(Operations):
- """Modifies the interface :class:`.Operations` for batch mode.
- This basically omits the ``table_name`` and ``schema`` parameters
- from associated methods, as these are a given when running under batch
- mode.
+class UpgradeOps(OpContainer):
+ """contains a sequence of operations that would apply to the
+ 'upgrade' stream of a script.
.. seealso::
- :meth:`.Operations.batch_alter_table`
+ :ref:`customizing_revision`
"""
- def _noop(self, operation):
- raise NotImplementedError(
- "The %s method does not apply to a batch table alter operation."
- % operation)
-
- def add_column(self, column):
- """Issue an "add column" instruction using the current
- batch migration context.
-
- .. seealso::
-
- :meth:`.Operations.add_column`
-
- """
-
- return super(BatchOperations, self).add_column(
- self.impl.table_name, column, schema=self.impl.schema)
-
- def alter_column(self, column_name, **kw):
- """Issue an "alter column" instruction using the current
- batch migration context.
-
- .. seealso::
-
- :meth:`.Operations.add_column`
-
- """
- kw['schema'] = self.impl.schema
- return super(BatchOperations, self).alter_column(
- self.impl.table_name, column_name, **kw)
-
- def drop_column(self, column_name):
- """Issue a "drop column" instruction using the current
- batch migration context.
-
- .. seealso::
-
- :meth:`.Operations.drop_column`
-
- """
- return super(BatchOperations, self).drop_column(
- self.impl.table_name, column_name, schema=self.impl.schema)
-
- def create_primary_key(self, name, cols):
- """Issue a "create primary key" instruction using the
- current batch migration context.
-
- The batch form of this call omits the ``table_name`` and ``schema``
- arguments from the call.
-
- .. seealso::
-
- :meth:`.Operations.create_primary_key`
-
- """
- raise NotImplementedError("not yet implemented")
-
- def create_foreign_key(
- self, name, referent, local_cols, remote_cols, **kw):
- """Issue a "create foreign key" instruction using the
- current batch migration context.
-
- The batch form of this call omits the ``source`` and ``source_schema``
- arguments from the call.
-
- e.g.::
-
- with batch_alter_table("address") as batch_op:
- batch_op.create_foreign_key(
- "fk_user_address",
- "user", ["user_id"], ["id"])
-
- .. seealso::
-
- :meth:`.Operations.create_foreign_key`
- """
- return super(BatchOperations, self).create_foreign_key(
- name, self.impl.table_name, referent, local_cols, remote_cols,
- source_schema=self.impl.schema, **kw)
-
- def create_unique_constraint(self, name, local_cols, **kw):
- """Issue a "create unique constraint" instruction using the
- current batch migration context.
-
- The batch form of this call omits the ``source`` and ``schema``
- arguments from the call.
-
- .. seealso::
-
- :meth:`.Operations.create_unique_constraint`
-
- """
- kw['schema'] = self.impl.schema
- return super(BatchOperations, self).create_unique_constraint(
- name, self.impl.table_name, local_cols, **kw)
+class DowngradeOps(OpContainer):
+ """contains a sequence of operations that would apply to the
+ 'downgrade' stream of a script.
- def create_check_constraint(self, name, condition, **kw):
- """Issue a "create check constraint" instruction using the
- current batch migration context.
-
- The batch form of this call omits the ``source`` and ``schema``
- arguments from the call.
-
- .. seealso::
-
- :meth:`.Operations.create_check_constraint`
+ .. seealso::
- """
- raise NotImplementedError("not yet implemented")
+ :ref:`customizing_revision`
- def drop_constraint(self, name, type_=None):
- """Issue a "drop constraint" instruction using the
- current batch migration context.
-
- The batch form of this call omits the ``table_name`` and ``schema``
- arguments from the call.
+ """
- .. seealso::
- :meth:`.Operations.drop_constraint`
+class MigrationScript(MigrateOperation):
+ """represents a migration script.
- """
- return super(BatchOperations, self).drop_constraint(
- name, self.impl.table_name, type_=type_,
- schema=self.impl.schema)
+ E.g. when autogenerate encounters this object, this corresponds to the
+ production of an actual script file.
- def create_index(self, name, columns, **kw):
- """Issue a "create index" instruction using the
- current batch migration context."""
+ A normal :class:`.MigrationScript` object would contain a single
+ :class:`.UpgradeOps` and a single :class:`.DowngradeOps` directive.
- kw['schema'] = self.impl.schema
+ .. seealso::
- return super(BatchOperations, self).create_index(
- name, self.impl.table_name, columns, **kw)
+ :ref:`customizing_revision`
- def drop_index(self, name, **kw):
- """Issue a "drop index" instruction using the
- current batch migration context."""
+ """
- kw['schema'] = self.impl.schema
+ def __init__(
+ self, rev_id, upgrade_ops, downgrade_ops,
+ message=None,
+ imports=None, head=None, splice=None,
+ branch_label=None, version_path=None):
+ self.rev_id = rev_id
+ self.message = message
+ self.imports = imports
+ self.head = head
+ self.splice = splice
+ self.branch_label = branch_label
+ self.version_path = version_path
+ self.upgrade_ops = upgrade_ops
+ self.downgrade_ops = downgrade_ops
- return super(BatchOperations, self).drop_index(
- name, self.impl.table_name, **kw)
diff --git a/alembic/operations/schemaobj.py b/alembic/operations/schemaobj.py
new file mode 100644
index 0000000..b590aca
--- /dev/null
+++ b/alembic/operations/schemaobj.py
@@ -0,0 +1,157 @@
+from sqlalchemy import schema as sa_schema
+from sqlalchemy.types import NULLTYPE, Integer
+from ..util.compat import string_types
+from .. import util
+
+
+class SchemaObjects(object):
+
+ def __init__(self, migration_context=None):
+ self.migration_context = migration_context
+
+ def primary_key_constraint(self, name, table_name, cols, schema=None):
+ m = self.metadata()
+ columns = [sa_schema.Column(n, NULLTYPE) for n in cols]
+ t1 = sa_schema.Table(table_name, m,
+ *columns,
+ schema=schema)
+ p = sa_schema.PrimaryKeyConstraint(*columns, name=name)
+ t1.append_constraint(p)
+ return p
+
+ def foreign_key_constraint(
+ self, name, source, referent,
+ local_cols, remote_cols,
+ onupdate=None, ondelete=None,
+ deferrable=None, source_schema=None,
+ referent_schema=None, initially=None,
+ match=None, **dialect_kw):
+ m = self.metadata()
+ if source == referent:
+ t1_cols = local_cols + remote_cols
+ else:
+ t1_cols = local_cols
+ sa_schema.Table(
+ referent, m,
+ *[sa_schema.Column(n, NULLTYPE) for n in remote_cols],
+ schema=referent_schema)
+
+ t1 = sa_schema.Table(
+ source, m,
+ *[sa_schema.Column(n, NULLTYPE) for n in t1_cols],
+ schema=source_schema)
+
+ tname = "%s.%s" % (referent_schema, referent) if referent_schema \
+ else referent
+
+ if util.sqla_08:
+ # "match" kw unsupported in 0.7
+ dialect_kw['match'] = match
+
+ f = sa_schema.ForeignKeyConstraint(local_cols,
+ ["%s.%s" % (tname, n)
+ for n in remote_cols],
+ name=name,
+ onupdate=onupdate,
+ ondelete=ondelete,
+ deferrable=deferrable,
+ initially=initially,
+ **dialect_kw
+ )
+ t1.append_constraint(f)
+
+ return f
+
+ def unique_constraint(self, name, source, local_cols, schema=None, **kw):
+ t = sa_schema.Table(
+ source, self.metadata(),
+ *[sa_schema.Column(n, NULLTYPE) for n in local_cols],
+ schema=schema)
+ kw['name'] = name
+ uq = sa_schema.UniqueConstraint(*[t.c[n] for n in local_cols], **kw)
+ # TODO: need event tests to ensure the event
+ # is fired off here
+ t.append_constraint(uq)
+ return uq
+
+ def check_constraint(self, name, source, condition, schema=None, **kw):
+ t = sa_schema.Table(source, self.metadata(),
+ sa_schema.Column('x', Integer), schema=schema)
+ ck = sa_schema.CheckConstraint(condition, name=name, **kw)
+ t.append_constraint(ck)
+ return ck
+
+ def generic_constraint(self, name, table_name, type_, schema=None, **kw):
+ t = self.table(table_name, schema=schema)
+ types = {
+ 'foreignkey': lambda name: sa_schema.ForeignKeyConstraint(
+ [], [], name=name),
+ 'primary': sa_schema.PrimaryKeyConstraint,
+ 'unique': sa_schema.UniqueConstraint,
+ 'check': lambda name: sa_schema.CheckConstraint("", name=name),
+ None: sa_schema.Constraint
+ }
+ try:
+ const = types[type_]
+ except KeyError:
+ raise TypeError("'type' can be one of %s" %
+ ", ".join(sorted(repr(x) for x in types)))
+ else:
+ const = const(name=name)
+ t.append_constraint(const)
+ return const
+
+ def metadata(self):
+ kw = {}
+ if self.migration_context is not None and \
+ 'target_metadata' in self.migration_context.opts:
+ mt = self.migration_context.opts['target_metadata']
+ if hasattr(mt, 'naming_convention'):
+ kw['naming_convention'] = mt.naming_convention
+ return sa_schema.MetaData(**kw)
+
+ def table(self, name, *columns, **kw):
+ m = self.metadata()
+ t = sa_schema.Table(name, m, *columns, **kw)
+ for f in t.foreign_keys:
+ self._ensure_table_for_fk(m, f)
+ return t
+
+ def column(self, name, type_, **kw):
+ return sa_schema.Column(name, type_, **kw)
+
+ def index(self, name, tablename, columns, schema=None, **kw):
+ t = sa_schema.Table(
+ tablename or 'no_table', self.metadata(),
+ schema=schema
+ )
+ idx = sa_schema.Index(
+ name,
+ *[util.sqla_compat._textual_index_column(t, n) for n in columns],
+ **kw)
+ return idx
+
+ def _parse_table_key(self, table_key):
+ if '.' in table_key:
+ tokens = table_key.split('.')
+ sname = ".".join(tokens[0:-1])
+ tname = tokens[-1]
+ else:
+ tname = table_key
+ sname = None
+ return (sname, tname)
+
+ def _ensure_table_for_fk(self, metadata, fk):
+ """create a placeholder Table object for the referent of a
+ ForeignKey.
+
+ """
+ if isinstance(fk._colspec, string_types):
+ table_key, cname = fk._colspec.rsplit('.', 1)
+ sname, tname = self._parse_table_key(table_key)
+ if table_key not in metadata.tables:
+ rel_t = sa_schema.Table(tname, metadata, schema=sname)
+ else:
+ rel_t = metadata.tables[table_key]
+ if cname not in rel_t.c:
+ rel_t.append_column(sa_schema.Column(cname, NULLTYPE))
diff --git a/alembic/operations/toimpl.py b/alembic/operations/toimpl.py
new file mode 100644
index 0000000..1327367
--- /dev/null
+++ b/alembic/operations/toimpl.py
@@ -0,0 +1,162 @@
+from . import ops
+
+from . import Operations
+from sqlalchemy import schema as sa_schema
+
+
+@Operations.implementation_for(ops.AlterColumnOp)
+def alter_column(operations, operation):
+
+ compiler = operations.impl.dialect.statement_compiler(
+ operations.impl.dialect,
+ None
+ )
+
+ existing_type = operation.existing_type
+ existing_nullable = operation.existing_nullable
+ existing_server_default = operation.existing_server_default
+ type_ = operation.modify_type
+ column_name = operation.column_name
+ table_name = operation.table_name
+ schema = operation.schema
+ server_default = operation.modify_server_default
+ new_column_name = operation.modify_name
+ nullable = operation.modify_nullable
+
+ def _count_constraint(constraint):
+ return not isinstance(
+ constraint,
+ sa_schema.PrimaryKeyConstraint) and \
+ (not constraint._create_rule or
+ constraint._create_rule(compiler))
+
+ if existing_type and type_:
+ t = operations.schema_obj.table(
+ table_name,
+ sa_schema.Column(column_name, existing_type),
+ schema=schema
+ )
+ for constraint in t.constraints:
+ if _count_constraint(constraint):
+ operations.impl.drop_constraint(constraint)
+
+ operations.impl.alter_column(
+ table_name, column_name,
+ nullable=nullable,
+ server_default=server_default,
+ name=new_column_name,
+ type_=type_,
+ schema=schema,
+ existing_type=existing_type,
+ existing_server_default=existing_server_default,
+ existing_nullable=existing_nullable,
+ **operation.kw
+ )
+
+ if type_:
+ t = operations.schema_obj.table(
+ table_name,
+ operations.schema_obj.column(column_name, type_),
+ schema=schema
+ )
+ for constraint in t.constraints:
+ if _count_constraint(constraint):
+ operations.impl.add_constraint(constraint)
+
+
+@Operations.implementation_for(ops.DropTableOp)
+def drop_table(operations, operation):
+ operations.impl.drop_table(
+ operation.to_table(operations.migration_context)
+ )
+
+
+@Operations.implementation_for(ops.DropColumnOp)
+def drop_column(operations, operation):
+ column = operation.to_column(operations.migration_context)
+ operations.impl.drop_column(
+ operation.table_name,
+ column,
+ schema=operation.schema,
+ **operation.kw
+ )
+
+
+@Operations.implementation_for(ops.CreateIndexOp)
+def create_index(operations, operation):
+ idx = operation.to_index(operations.migration_context)
+ operations.impl.create_index(idx)
+
+
+@Operations.implementation_for(ops.DropIndexOp)
+def drop_index(operations, operation):
+ operations.impl.drop_index(
+ operation.to_index(operations.migration_context)
+ )
+
+
+@Operations.implementation_for(ops.CreateTableOp)
+def create_table(operations, operation):
+ table = operation.to_table(operations.migration_context)
+ operations.impl.create_table(table)
+ return table
+
+
+@Operations.implementation_for(ops.RenameTableOp)
+def rename_table(operations, operation):
+ operations.impl.rename_table(
+ operation.table_name,
+ operation.new_table_name,
+ schema=operation.schema)
+
+
+@Operations.implementation_for(ops.AddColumnOp)
+def add_column(operations, operation):
+ table_name = operation.table_name
+ column = operation.column
+ schema = operation.schema
+
+ t = operations.schema_obj.table(table_name, column, schema=schema)
+ operations.impl.add_column(
+ table_name,
+ column,
+ schema=schema
+ )
+ for constraint in t.constraints:
+ if not isinstance(constraint, sa_schema.PrimaryKeyConstraint):
+ operations.impl.add_constraint(constraint)
+ for index in t.indexes:
+ operations.impl.create_index(index)
+
+
+@Operations.implementation_for(ops.AddConstraintOp)
+def create_constraint(operations, operation):
+ operations.impl.add_constraint(
+ operation.to_constraint(operations.migration_context)
+ )
+
+
+@Operations.implementation_for(ops.DropConstraintOp)
+def drop_constraint(operations, operation):
+ operations.impl.drop_constraint(
+ operations.schema_obj.generic_constraint(
+ operation.constraint_name,
+ operation.table_name,
+ operation.constraint_type,
+ schema=operation.schema,
+ )
+ )
+
+
+@Operations.implementation_for(ops.BulkInsertOp)
+def bulk_insert(operations, operation):
+ operations.impl.bulk_insert(
+ operation.table, operation.rows, multiinsert=operation.multiinsert)
+
+
+@Operations.implementation_for(ops.ExecuteSQLOp)
+def execute_sql(operations, operation):
+ operations.migration_context.impl.execute(
+ operation.sqltext,
+ execution_options=operation.execution_options
+ )
diff --git a/alembic/runtime/__init__.py b/alembic/runtime/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/alembic/runtime/__init__.py
diff --git a/alembic/environment.py b/alembic/runtime/environment.py
index 860315b..3b04fea 100644
--- a/alembic/environment.py
+++ b/alembic/runtime/environment.py
@@ -1,9 +1,9 @@
-from .operations import Operations
+from ..operations import Operations
from .migration import MigrationContext
-from . import util
+from .. import util
-class EnvironmentContext(object):
+class EnvironmentContext(util.ModuleClsProxy):
"""Represent the state made available to an ``env.py`` script.
@@ -96,14 +96,11 @@ class EnvironmentContext(object):
be made available as ``from alembic import context``.
"""
- from .context import _install_proxy
- _install_proxy(self)
+ self._install_proxy()
return self
def __exit__(self, *arg, **kw):
- from . import context, op
- context._remove_proxy()
- op._remove_proxy()
+ self._remove_proxy()
def is_offline_mode(self):
"""Return True if the current migrations environment
@@ -293,6 +290,7 @@ class EnvironmentContext(object):
include_symbol=None,
include_object=None,
include_schemas=False,
+ process_revision_directives=None,
compare_type=False,
compare_server_default=False,
render_item=None,
@@ -656,6 +654,43 @@ class EnvironmentContext(object):
:ref:`autogen_module_prefix`
+ :param process_revision_directives: a callable function that will
+ be passed a structure representing the end result of an autogenerate
+ or plain "revision" operation, which can be manipulated to affect
+ how the ``alembic revision`` command ultimately outputs new
+ revision scripts. The structure of the callable is::
+
+ def process_revision_directives(context, revision, directives):
+ pass
+
+ The ``directives`` parameter is a Python list containing
+ a single :class:`.MigrationScript` directive, which represents
+ the revision file to be generated. This list as well as its
+ contents may be freely modified to produce any set of commands.
+ The section :ref:`customizing_revision` shows an example of
+ doing this. The ``context`` parameter is the
+ :class:`.MigrationContext` in use,
+ and ``revision`` is a tuple of revision identifiers representing the
+ current revision of the database.
+
+ The callable is invoked at all times when the ``--autogenerate``
+ option is passed to ``alembic revision``. If ``--autogenerate``
+ is not passed, the callable is invoked only if the
+ ``revision_environment`` variable is set to True in the Alembic
+ configuration, in which case the given ``directives`` collection
+ will contain empty :class:`.UpgradeOps` and :class:`.DowngradeOps`
+ collections for ``.upgrade_ops`` and ``.downgrade_ops``. The
+ ``--autogenerate`` option itself can be inferred by inspecting
+ ``context.config.cmd_opts.autogenerate``.
+
+
+ .. versionadded:: 0.8.0
+
+ .. seealso::
+
+ :ref:`customizing_revision`
+
+
Parameters specific to individual backends:
:param mssql_batch_separator: The "batch separator" which will
@@ -696,6 +731,8 @@ class EnvironmentContext(object):
opts['alembic_module_prefix'] = alembic_module_prefix
opts['user_module_prefix'] = user_module_prefix
opts['literal_binds'] = literal_binds
+ opts['process_revision_directives'] = process_revision_directives
+
if render_item is not None:
opts['render_item'] = render_item
if compare_type is not None:
diff --git a/alembic/migration.py b/alembic/runtime/migration.py
index 9b46052..84a3c7f 100644
--- a/alembic/migration.py
+++ b/alembic/runtime/migration.py
@@ -6,8 +6,8 @@ from sqlalchemy import MetaData, Table, Column, String, literal_column
from sqlalchemy.engine.strategies import MockEngineStrategy
from sqlalchemy.engine import url as sqla_url
-from .compat import callable, EncodedIO
-from . import ddl, util
+from ..util.compat import callable, EncodedIO
+from .. import ddl, util
log = logging.getLogger(__name__)
diff --git a/alembic/script/__init__.py b/alembic/script/__init__.py
new file mode 100644
index 0000000..cae294f
--- /dev/null
+++ b/alembic/script/__init__.py
@@ -0,0 +1,3 @@
+from .base import ScriptDirectory, Script # noqa
+
+__all__ = ['ScriptDirectory', 'Script']
diff --git a/alembic/script.py b/alembic/script/base.py
index 095a04b..e30c8b2 100644
--- a/alembic/script.py
+++ b/alembic/script/base.py
@@ -2,10 +2,10 @@ import datetime
import os
import re
import shutil
-from . import util
-from . import compat
+from .. import util
+from ..util import compat
from . import revision
-from . import migration
+from ..runtime import migration
from contextlib import contextmanager
diff --git a/alembic/revision.py b/alembic/script/revision.py
index 4eea514..e9958b1 100644
--- a/alembic/revision.py
+++ b/alembic/script/revision.py
@@ -1,10 +1,9 @@
import re
import collections
-import itertools
-from . import util
+from .. import util
from sqlalchemy import util as sqlautil
-from . import compat
+from ..util import compat
_relative_destination = re.compile(r'(?:(.+?)@)?(\w+)?((?:\+|-)\d+)')
diff --git a/alembic/testing/assertions.py b/alembic/testing/assertions.py
index b3a5acd..6acca21 100644
--- a/alembic/testing/assertions.py
+++ b/alembic/testing/assertions.py
@@ -2,9 +2,9 @@ from __future__ import absolute_import
import re
-from alembic import util
+from .. import util
from sqlalchemy.engine import default
-from alembic.compat import text_type, py3k
+from ..util.compat import text_type, py3k
import contextlib
from sqlalchemy.util import decorator
from sqlalchemy import exc as sa_exc
diff --git a/alembic/testing/env.py b/alembic/testing/env.py
index 9c53d5d..f8ad447 100644
--- a/alembic/testing/env.py
+++ b/alembic/testing/env.py
@@ -4,9 +4,9 @@ import os
import shutil
import textwrap
-from alembic.compat import u
-from alembic.script import Script, ScriptDirectory
-from alembic import util
+from ..util.compat import u
+from ..script import Script, ScriptDirectory
+from .. import util
from . import engines
from . import provision
diff --git a/alembic/testing/exclusions.py b/alembic/testing/exclusions.py
index 88df9fc..90f8bc6 100644
--- a/alembic/testing/exclusions.py
+++ b/alembic/testing/exclusions.py
@@ -14,11 +14,12 @@ from .plugin.plugin_base import SkipTest
from sqlalchemy.util import decorator
from . import config
from sqlalchemy import util
-from alembic import compat
+from ..util import compat
import inspect
import contextlib
from .compat import get_url_driver_name, get_url_backend_name
+
def skip_if(predicate, reason=None):
rule = compound()
pred = _as_predicate(predicate, reason)
diff --git a/alembic/testing/fixtures.py b/alembic/testing/fixtures.py
index ae25fd2..7e05525 100644
--- a/alembic/testing/fixtures.py
+++ b/alembic/testing/fixtures.py
@@ -5,13 +5,12 @@ import re
from sqlalchemy import create_engine, text, MetaData
import alembic
-from alembic.compat import configparser
-from alembic import util
-from alembic.compat import string_types, text_type
-from alembic.migration import MigrationContext
-from alembic.environment import EnvironmentContext
-from alembic.operations import Operations
-from alembic.ddl.impl import _impls
+from ..util.compat import configparser
+from .. import util
+from ..util.compat import string_types, text_type
+from ..migration import MigrationContext
+from ..environment import EnvironmentContext
+from ..operations import Operations
from contextlib import contextmanager
from .plugin.plugin_base import SkipTest
from .assertions import _get_dialect, eq_
diff --git a/alembic/testing/mock.py b/alembic/testing/mock.py
index cdfcb88..b82a404 100644
--- a/alembic/testing/mock.py
+++ b/alembic/testing/mock.py
@@ -12,7 +12,7 @@
"""
from __future__ import absolute_import
-from alembic.compat import py33
+from ..util.compat import py33
if py33:
from unittest.mock import MagicMock, Mock, call, patch
diff --git a/alembic/testing/provision.py b/alembic/testing/provision.py
index 801d36b..37ae141 100644
--- a/alembic/testing/provision.py
+++ b/alembic/testing/provision.py
@@ -3,9 +3,9 @@
"""
from sqlalchemy.engine import url as sa_url
from sqlalchemy import text
-from alembic import compat
-from alembic.testing import config, engines
-from alembic.testing.compat import get_url_backend_name
+from ..util import compat
+from . import config, engines
+from .compat import get_url_backend_name
FOLLOWER_IDENT = None
diff --git a/alembic/util.py b/alembic/util.py
deleted file mode 100644
index 2e0f731..0000000
--- a/alembic/util.py
+++ /dev/null
@@ -1,405 +0,0 @@
-import sys
-import os
-import textwrap
-import warnings
-import re
-import inspect
-import uuid
-import collections
-
-from mako.template import Template
-from sqlalchemy.engine import url
-from sqlalchemy import __version__
-
-from .compat import callable, exec_, load_module_py, load_module_pyc, \
- binary_type, string_types, py27
-
-
-class CommandError(Exception):
- pass
-
-
-def _safe_int(value):
- try:
- return int(value)
- except:
- return value
-_vers = tuple(
- [_safe_int(x) for x in re.findall(r'(\d+|[abc]\d)', __version__)])
-sqla_07 = _vers > (0, 7, 2)
-sqla_079 = _vers >= (0, 7, 9)
-sqla_08 = _vers >= (0, 8, 0)
-sqla_083 = _vers >= (0, 8, 3)
-sqla_084 = _vers >= (0, 8, 4)
-sqla_09 = _vers >= (0, 9, 0)
-sqla_092 = _vers >= (0, 9, 2)
-sqla_094 = _vers >= (0, 9, 4)
-sqla_094 = _vers >= (0, 9, 4)
-sqla_099 = _vers >= (0, 9, 9)
-sqla_100 = _vers >= (1, 0, 0)
-sqla_105 = _vers >= (1, 0, 5)
-if not sqla_07:
- raise CommandError(
- "SQLAlchemy 0.7.3 or greater is required. ")
-
-from sqlalchemy.util import format_argspec_plus, update_wrapper
-from sqlalchemy.util.compat import inspect_getfullargspec
-
-import logging
-log = logging.getLogger(__name__)
-
-if py27:
- # disable "no handler found" errors
- logging.getLogger('alembic').addHandler(logging.NullHandler())
-
-
-try:
- import fcntl
- import termios
- import struct
- ioctl = fcntl.ioctl(0, termios.TIOCGWINSZ,
- struct.pack('HHHH', 0, 0, 0, 0))
- _h, TERMWIDTH, _hp, _wp = struct.unpack('HHHH', ioctl)
- if TERMWIDTH <= 0: # can occur if running in emacs pseudo-tty
- TERMWIDTH = None
-except (ImportError, IOError):
- TERMWIDTH = None
-
-
-def template_to_file(template_file, dest, output_encoding, **kw):
- with open(dest, 'wb') as f:
- template = Template(filename=template_file)
- f.write(
- template.render_unicode(**kw).encode(output_encoding)
- )
-
-
-def create_module_class_proxy(cls, globals_, locals_):
- """Create module level proxy functions for the
- methods on a given class.
-
- The functions will have a compatible signature
- as the methods. A proxy is established
- using the ``_install_proxy(obj)`` function,
- and removed using ``_remove_proxy()``, both
- installed by calling this function.
-
- """
- attr_names = set()
-
- def _install_proxy(obj):
- globals_['_proxy'] = obj
- for name in attr_names:
- globals_[name] = getattr(obj, name)
-
- def _remove_proxy():
- globals_['_proxy'] = None
- for name in attr_names:
- del globals_[name]
-
- globals_['_install_proxy'] = _install_proxy
- globals_['_remove_proxy'] = _remove_proxy
-
- def _create_op_proxy(name):
- fn = getattr(cls, name)
- spec = inspect.getargspec(fn)
- if spec[0] and spec[0][0] == 'self':
- spec[0].pop(0)
- args = inspect.formatargspec(*spec)
- num_defaults = 0
- if spec[3]:
- num_defaults += len(spec[3])
- name_args = spec[0]
- if num_defaults:
- defaulted_vals = name_args[0 - num_defaults:]
- else:
- defaulted_vals = ()
-
- apply_kw = inspect.formatargspec(
- name_args, spec[1], spec[2],
- defaulted_vals,
- formatvalue=lambda x: '=' + x)
-
- def _name_error(name):
- raise NameError(
- "Can't invoke function '%s', as the proxy object has "
- "not yet been "
- "established for the Alembic '%s' class. "
- "Try placing this code inside a callable." % (
- name, cls.__name__
- ))
- globals_['_name_error'] = _name_error
-
- func_text = textwrap.dedent("""\
- def %(name)s(%(args)s):
- %(doc)r
- try:
- p = _proxy
- except NameError:
- _name_error('%(name)s')
- return _proxy.%(name)s(%(apply_kw)s)
- e
- """ % {
- 'name': name,
- 'args': args[1:-1],
- 'apply_kw': apply_kw[1:-1],
- 'doc': fn.__doc__,
- })
- lcl = {}
- exec_(func_text, globals_, lcl)
- return lcl[name]
-
- for methname in dir(cls):
- if not methname.startswith('_'):
- if callable(getattr(cls, methname)):
- locals_[methname] = _create_op_proxy(methname)
- else:
- attr_names.add(methname)
-
-
-def write_outstream(stream, *text):
- encoding = getattr(stream, 'encoding', 'ascii') or 'ascii'
- for t in text:
- if not isinstance(t, binary_type):
- t = t.encode(encoding, 'replace')
- t = t.decode(encoding)
- try:
- stream.write(t)
- except IOError:
- # suppress "broken pipe" errors.
- # no known way to handle this on Python 3 however
- # as the exception is "ignored" (noisily) in TextIOWrapper.
- break
-
-
-def coerce_resource_to_filename(fname):
- """Interpret a filename as either a filesystem location or as a package
- resource.
-
- Names that are non absolute paths and contain a colon
- are interpreted as resources and coerced to a file location.
-
- """
- if not os.path.isabs(fname) and ":" in fname:
- import pkg_resources
- fname = pkg_resources.resource_filename(*fname.split(':'))
- return fname
-
-
-def status(_statmsg, fn, *arg, **kw):
- msg(_statmsg + " ...", False)
- try:
- ret = fn(*arg, **kw)
- write_outstream(sys.stdout, " done\n")
- return ret
- except:
- write_outstream(sys.stdout, " FAILED\n")
- raise
-
-
-def err(message):
- log.error(message)
- msg("FAILED: %s" % message)
- sys.exit(-1)
-
-
-def obfuscate_url_pw(u):
- u = url.make_url(u)
- if u.password:
- u.password = 'XXXXX'
- return str(u)
-
-
-def asbool(value):
- return value is not None and \
- value.lower() == 'true'
-
-
-def warn(msg):
- warnings.warn(msg)
-
-
-def msg(msg, newline=True):
- if TERMWIDTH is None:
- write_outstream(sys.stdout, msg)
- if newline:
- write_outstream(sys.stdout, "\n")
- else:
- # left indent output lines
- lines = textwrap.wrap(msg, TERMWIDTH)
- if len(lines) > 1:
- for line in lines[0:-1]:
- write_outstream(sys.stdout, " ", line, "\n")
- write_outstream(sys.stdout, " ", lines[-1], ("\n" if newline else ""))
-
-
-def load_python_file(dir_, filename):
- """Load a file from the given path as a Python module."""
-
- module_id = re.sub(r'\W', "_", filename)
- path = os.path.join(dir_, filename)
- _, ext = os.path.splitext(filename)
- if ext == ".py":
- if os.path.exists(path):
- module = load_module_py(module_id, path)
- elif os.path.exists(simple_pyc_file_from_path(path)):
- # look for sourceless load
- module = load_module_pyc(
- module_id, simple_pyc_file_from_path(path))
- else:
- raise ImportError("Can't find Python file %s" % path)
- elif ext in (".pyc", ".pyo"):
- module = load_module_pyc(module_id, path)
- del sys.modules[module_id]
- return module
-
-
-def simple_pyc_file_from_path(path):
- """Given a python source path, return the so-called
- "sourceless" .pyc or .pyo path.
-
- This just a .pyc or .pyo file where the .py file would be.
-
- Even with PEP-3147, which normally puts .pyc/.pyo files in __pycache__,
- this use case remains supported as a so-called "sourceless module import".
-
- """
- if sys.flags.optimize:
- return path + "o" # e.g. .pyo
- else:
- return path + "c" # e.g. .pyc
-
-
-def pyc_file_from_path(path):
- """Given a python source path, locate the .pyc.
-
- See http://www.python.org/dev/peps/pep-3147/
- #detecting-pep-3147-availability
- http://www.python.org/dev/peps/pep-3147/#file-extension-checks
-
- """
- import imp
- has3147 = hasattr(imp, 'get_tag')
- if has3147:
- return imp.cache_from_source(path)
- else:
- return simple_pyc_file_from_path(path)
-
-
-def rev_id():
- val = int(uuid.uuid4()) % 100000000000000
- return hex(val)[2:-1]
-
-
-def to_tuple(x, default=None):
- if x is None:
- return default
- elif isinstance(x, string_types):
- return (x, )
- elif isinstance(x, collections.Iterable):
- return tuple(x)
- else:
- raise ValueError("Don't know how to turn %r into a tuple" % x)
-
-
-def format_as_comma(value):
- if value is None:
- return ""
- elif isinstance(value, string_types):
- return value
- elif isinstance(value, collections.Iterable):
- return ", ".join(value)
- else:
- raise ValueError("Don't know how to comma-format %r" % value)
-
-
-class memoized_property(object):
-
- """A read-only @property that is only evaluated once."""
-
- def __init__(self, fget, doc=None):
- self.fget = fget
- self.__doc__ = doc or fget.__doc__
- self.__name__ = fget.__name__
-
- def __get__(self, obj, cls):
- if obj is None:
- return self
- obj.__dict__[self.__name__] = result = self.fget(obj)
- return result
-
-
-class immutabledict(dict):
-
- def _immutable(self, *arg, **kw):
- raise TypeError("%s object is immutable" % self.__class__.__name__)
-
- __delitem__ = __setitem__ = __setattr__ = \
- clear = pop = popitem = setdefault = \
- update = _immutable
-
- def __new__(cls, *args):
- new = dict.__new__(cls)
- dict.__init__(new, *args)
- return new
-
- def __init__(self, *args):
- pass
-
- def __reduce__(self):
- return immutabledict, (dict(self), )
-
- def union(self, d):
- if not self:
- return immutabledict(d)
- else:
- d2 = immutabledict(self)
- dict.update(d2, d)
- return d2
-
- def __repr__(self):
- return "immutabledict(%s)" % dict.__repr__(self)
-
-
-def _with_legacy_names(translations):
- def decorate(fn):
-
- spec = inspect_getfullargspec(fn)
- metadata = dict(target='target', fn='fn')
- metadata.update(format_argspec_plus(spec, grouped=False))
-
- has_keywords = bool(spec[2])
-
- if not has_keywords:
- metadata['args'] += ", **kw"
- metadata['apply_kw'] += ", **kw"
-
- def go(*arg, **kw):
- names = set(kw).difference(spec[0])
- for oldname, newname in translations:
- if oldname in kw:
- kw[newname] = kw.pop(oldname)
- names.discard(oldname)
-
- warnings.warn(
- "Argument '%s' is now named '%s' for function '%s'" %
- (oldname, newname, fn.__name__))
- if not has_keywords and names:
- raise TypeError("Unknown arguments: %s" % ", ".join(names))
- return fn(*arg, **kw)
-
- code = 'lambda %(args)s: %(target)s(%(apply_kw)s)' % (
- metadata)
- decorated = eval(code, {"target": go})
- decorated.__defaults__ = getattr(fn, '__func__', fn).__defaults__
- update_wrapper(decorated, fn)
- if hasattr(decorated, '__wrapped__'):
- # update_wrapper in py3k applies __wrapped__, which causes
- # inspect.getargspec() to ignore the extra arguments on our
- # wrapper as of Python 3.4. We need this for the
- # "module class proxy" thing though, so just del the __wrapped__
- # for now. See #175 as well as bugs.python.org/issue17482
- del decorated.__wrapped__
- return decorated
-
- return decorate
diff --git a/alembic/util/__init__.py b/alembic/util/__init__.py
new file mode 100644
index 0000000..bd7196c
--- /dev/null
+++ b/alembic/util/__init__.py
@@ -0,0 +1,20 @@
+from .langhelpers import ( # noqa
+ asbool, rev_id, to_tuple, to_list, memoized_property,
+ immutabledict, _with_legacy_names, Dispatcher, ModuleClsProxy)
+from .messaging import ( # noqa
+ write_outstream, status, err, obfuscate_url_pw, warn, msg, format_as_comma)
+from .pyfiles import ( # noqa
+ template_to_file, coerce_resource_to_filename, simple_pyc_file_from_path,
+ pyc_file_from_path, load_python_file)
+from .sqla_compat import ( # noqa
+ sqla_07, sqla_079, sqla_08, sqla_083, sqla_084, sqla_09, sqla_092,
+ sqla_094, sqla_094, sqla_099, sqla_100, sqla_105)
+
+
+class CommandError(Exception):
+ pass
+
+
+if not sqla_07:
+ raise CommandError(
+ "SQLAlchemy 0.7.3 or greater is required. ")
diff --git a/alembic/compat.py b/alembic/util/compat.py
index a9e35f0..a9e35f0 100644
--- a/alembic/compat.py
+++ b/alembic/util/compat.py
diff --git a/alembic/util/langhelpers.py b/alembic/util/langhelpers.py
new file mode 100644
index 0000000..904848c
--- /dev/null
+++ b/alembic/util/langhelpers.py
@@ -0,0 +1,275 @@
+import textwrap
+import warnings
+import inspect
+import uuid
+import collections
+
+from .compat import callable, exec_, string_types, with_metaclass
+
+from sqlalchemy.util import format_argspec_plus, update_wrapper
+from sqlalchemy.util.compat import inspect_getfullargspec
+
+
+class _ModuleClsMeta(type):
+ def __setattr__(cls, key, value):
+ super(_ModuleClsMeta, cls).__setattr__(key, value)
+ cls._update_module_proxies(key)
+
+
+class ModuleClsProxy(with_metaclass(_ModuleClsMeta)):
+ """Create module level proxy functions for the
+ methods on a given class.
+
+ The functions will have a compatible signature
+ as the methods.
+
+ """
+
+ _setups = collections.defaultdict(lambda: (set(), []))
+
+ @classmethod
+ def _update_module_proxies(cls, name):
+ attr_names, modules = cls._setups[cls]
+ for globals_, locals_ in modules:
+ cls._add_proxied_attribute(name, globals_, locals_, attr_names)
+
+ def _install_proxy(self):
+ attr_names, modules = self._setups[self.__class__]
+ for globals_, locals_ in modules:
+ globals_['_proxy'] = self
+ for attr_name in attr_names:
+ globals_[attr_name] = getattr(self, attr_name)
+
+ def _remove_proxy(self):
+ attr_names, modules = self._setups[self.__class__]
+ for globals_, locals_ in modules:
+ globals_['_proxy'] = None
+ for attr_name in attr_names:
+ del globals_[attr_name]
+
+ @classmethod
+ def create_module_class_proxy(cls, globals_, locals_):
+ attr_names, modules = cls._setups[cls]
+ modules.append(
+ (globals_, locals_)
+ )
+ cls._setup_proxy(globals_, locals_, attr_names)
+
+ @classmethod
+ def _setup_proxy(cls, globals_, locals_, attr_names):
+ for methname in dir(cls):
+ cls._add_proxied_attribute(methname, globals_, locals_, attr_names)
+
+ @classmethod
+ def _add_proxied_attribute(cls, methname, globals_, locals_, attr_names):
+ if not methname.startswith('_'):
+ meth = getattr(cls, methname)
+ if callable(meth):
+ locals_[methname] = cls._create_method_proxy(
+ methname, globals_, locals_)
+ else:
+ attr_names.add(methname)
+
+ @classmethod
+ def _create_method_proxy(cls, name, globals_, locals_):
+ fn = getattr(cls, name)
+ spec = inspect.getargspec(fn)
+ if spec[0] and spec[0][0] == 'self':
+ spec[0].pop(0)
+ args = inspect.formatargspec(*spec)
+ num_defaults = 0
+ if spec[3]:
+ num_defaults += len(spec[3])
+ name_args = spec[0]
+ if num_defaults:
+ defaulted_vals = name_args[0 - num_defaults:]
+ else:
+ defaulted_vals = ()
+
+ apply_kw = inspect.formatargspec(
+ name_args, spec[1], spec[2],
+ defaulted_vals,
+ formatvalue=lambda x: '=' + x)
+
+ def _name_error(name):
+ raise NameError(
+ "Can't invoke function '%s', as the proxy object has "
+ "not yet been "
+ "established for the Alembic '%s' class. "
+ "Try placing this code inside a callable." % (
+ name, cls.__name__
+ ))
+ globals_['_name_error'] = _name_error
+
+ func_text = textwrap.dedent("""\
+ def %(name)s(%(args)s):
+ %(doc)r
+ try:
+ p = _proxy
+ except NameError:
+ _name_error('%(name)s')
+ return _proxy.%(name)s(%(apply_kw)s)
+ e
+ """ % {
+ 'name': name,
+ 'args': args[1:-1],
+ 'apply_kw': apply_kw[1:-1],
+ 'doc': fn.__doc__,
+ })
+ lcl = {}
+ exec_(func_text, globals_, lcl)
+ return lcl[name]
+
+
+def asbool(value):
+ return value is not None and \
+ value.lower() == 'true'
+
+
+def rev_id():
+ val = int(uuid.uuid4()) % 100000000000000
+ return hex(val)[2:-1]
+
+
+def to_list(x, default=None):
+ if x is None:
+ return default
+ elif isinstance(x, string_types):
+ return [x]
+ elif isinstance(x, collections.Iterable):
+ return list(x)
+ else:
+ raise ValueError("Don't know how to turn %r into a list" % x)
+
+
+def to_tuple(x, default=None):
+ if x is None:
+ return default
+ elif isinstance(x, string_types):
+ return (x, )
+ elif isinstance(x, collections.Iterable):
+ return tuple(x)
+ else:
+ raise ValueError("Don't know how to turn %r into a tuple" % x)
+
+
+class memoized_property(object):
+
+ """A read-only @property that is only evaluated once."""
+
+ def __init__(self, fget, doc=None):
+ self.fget = fget
+ self.__doc__ = doc or fget.__doc__
+ self.__name__ = fget.__name__
+
+ def __get__(self, obj, cls):
+ if obj is None:
+ return self
+ obj.__dict__[self.__name__] = result = self.fget(obj)
+ return result
+
+
+class immutabledict(dict):
+
+ def _immutable(self, *arg, **kw):
+ raise TypeError("%s object is immutable" % self.__class__.__name__)
+
+ __delitem__ = __setitem__ = __setattr__ = \
+ clear = pop = popitem = setdefault = \
+ update = _immutable
+
+ def __new__(cls, *args):
+ new = dict.__new__(cls)
+ dict.__init__(new, *args)
+ return new
+
+ def __init__(self, *args):
+ pass
+
+ def __reduce__(self):
+ return immutabledict, (dict(self), )
+
+ def union(self, d):
+ if not self:
+ return immutabledict(d)
+ else:
+ d2 = immutabledict(self)
+ dict.update(d2, d)
+ return d2
+
+ def __repr__(self):
+ return "immutabledict(%s)" % dict.__repr__(self)
+
+
+def _with_legacy_names(translations):
+ def decorate(fn):
+
+ spec = inspect_getfullargspec(fn)
+ metadata = dict(target='target', fn='fn')
+ metadata.update(format_argspec_plus(spec, grouped=False))
+
+ has_keywords = bool(spec[2])
+
+ if not has_keywords:
+ metadata['args'] += ", **kw"
+ metadata['apply_kw'] += ", **kw"
+
+ def go(*arg, **kw):
+ names = set(kw).difference(spec[0])
+ for oldname, newname in translations:
+ if oldname in kw:
+ kw[newname] = kw.pop(oldname)
+ names.discard(oldname)
+
+ warnings.warn(
+ "Argument '%s' is now named '%s' for function '%s'" %
+ (oldname, newname, fn.__name__))
+ if not has_keywords and names:
+ raise TypeError("Unknown arguments: %s" % ", ".join(names))
+ return fn(*arg, **kw)
+
+ code = 'lambda %(args)s: %(target)s(%(apply_kw)s)' % (
+ metadata)
+ decorated = eval(code, {"target": go})
+ decorated.__defaults__ = getattr(fn, '__func__', fn).__defaults__
+ update_wrapper(decorated, fn)
+ if hasattr(decorated, '__wrapped__'):
+ # update_wrapper in py3k applies __wrapped__, which causes
+ # inspect.getargspec() to ignore the extra arguments on our
+ # wrapper as of Python 3.4. We need this for the
+ # "module class proxy" thing though, so just del the __wrapped__
+ # for now. See #175 as well as bugs.python.org/issue17482
+ del decorated.__wrapped__
+ return decorated
+
+ return decorate
+
+
+class Dispatcher(object):
+ def __init__(self):
+ self._registry = {}
+
+ def dispatch_for(self, target, qualifier='default'):
+ def decorate(fn):
+ assert isinstance(target, type)
+ assert target not in self._registry
+ self._registry[(target, qualifier)] = fn
+ return fn
+ return decorate
+
+ def dispatch(self, obj, qualifier='default'):
+ for spcls in type(obj).__mro__:
+ if qualifier != 'default' and (spcls, qualifier) in self._registry:
+ return self._registry[(spcls, qualifier)]
+ elif (spcls, 'default') in self._registry:
+ return self._registry[(spcls, 'default')]
+ else:
+ raise ValueError("no dispatch function for object: %s" % obj)
+
+ def branch(self):
+ """Return a copy of this dispatcher that is independently
+ writable."""
+
+ d = Dispatcher()
+ d._registry.update(self._registry)
+ return d
diff --git a/alembic/util/messaging.py b/alembic/util/messaging.py
new file mode 100644
index 0000000..c202e96
--- /dev/null
+++ b/alembic/util/messaging.py
@@ -0,0 +1,94 @@
+from .compat import py27, binary_type, string_types
+import sys
+from sqlalchemy.engine import url
+import warnings
+import textwrap
+import collections
+import logging
+
+log = logging.getLogger(__name__)
+
+if py27:
+ # disable "no handler found" errors
+ logging.getLogger('alembic').addHandler(logging.NullHandler())
+
+
+try:
+ import fcntl
+ import termios
+ import struct
+ ioctl = fcntl.ioctl(0, termios.TIOCGWINSZ,
+ struct.pack('HHHH', 0, 0, 0, 0))
+ _h, TERMWIDTH, _hp, _wp = struct.unpack('HHHH', ioctl)
+ if TERMWIDTH <= 0: # can occur if running in emacs pseudo-tty
+ TERMWIDTH = None
+except (ImportError, IOError):
+ TERMWIDTH = None
+
+
+def write_outstream(stream, *text):
+ encoding = getattr(stream, 'encoding', 'ascii') or 'ascii'
+ for t in text:
+ if not isinstance(t, binary_type):
+ t = t.encode(encoding, 'replace')
+ t = t.decode(encoding)
+ try:
+ stream.write(t)
+ except IOError:
+ # suppress "broken pipe" errors.
+ # no known way to handle this on Python 3 however
+ # as the exception is "ignored" (noisily) in TextIOWrapper.
+ break
+
+
+def status(_statmsg, fn, *arg, **kw):
+ msg(_statmsg + " ...", False)
+ try:
+ ret = fn(*arg, **kw)
+ write_outstream(sys.stdout, " done\n")
+ return ret
+ except:
+ write_outstream(sys.stdout, " FAILED\n")
+ raise
+
+
+def err(message):
+ log.error(message)
+ msg("FAILED: %s" % message)
+ sys.exit(-1)
+
+
+def obfuscate_url_pw(u):
+ u = url.make_url(u)
+ if u.password:
+ u.password = 'XXXXX'
+ return str(u)
+
+
+def warn(msg):
+ warnings.warn(msg)
+
+
+def msg(msg, newline=True):
+ if TERMWIDTH is None:
+ write_outstream(sys.stdout, msg)
+ if newline:
+ write_outstream(sys.stdout, "\n")
+ else:
+ # left indent output lines
+ lines = textwrap.wrap(msg, TERMWIDTH)
+ if len(lines) > 1:
+ for line in lines[0:-1]:
+ write_outstream(sys.stdout, " ", line, "\n")
+ write_outstream(sys.stdout, " ", lines[-1], ("\n" if newline else ""))
+
+
+def format_as_comma(value):
+ if value is None:
+ return ""
+ elif isinstance(value, string_types):
+ return value
+ elif isinstance(value, collections.Iterable):
+ return ", ".join(value)
+ else:
+ raise ValueError("Don't know how to comma-format %r" % value)
diff --git a/alembic/util/pyfiles.py b/alembic/util/pyfiles.py
new file mode 100644
index 0000000..c51e187
--- /dev/null
+++ b/alembic/util/pyfiles.py
@@ -0,0 +1,80 @@
+import sys
+import os
+import re
+from .compat import load_module_py, load_module_pyc
+from mako.template import Template
+
+
+def template_to_file(template_file, dest, output_encoding, **kw):
+ with open(dest, 'wb') as f:
+ template = Template(filename=template_file)
+ f.write(
+ template.render_unicode(**kw).encode(output_encoding)
+ )
+
+
+def coerce_resource_to_filename(fname):
+ """Interpret a filename as either a filesystem location or as a package
+ resource.
+
+ Names that are non absolute paths and contain a colon
+ are interpreted as resources and coerced to a file location.
+
+ """
+ if not os.path.isabs(fname) and ":" in fname:
+ import pkg_resources
+ fname = pkg_resources.resource_filename(*fname.split(':'))
+ return fname
+
+
+def simple_pyc_file_from_path(path):
+ """Given a python source path, return the so-called
+ "sourceless" .pyc or .pyo path.
+
+ This just a .pyc or .pyo file where the .py file would be.
+
+ Even with PEP-3147, which normally puts .pyc/.pyo files in __pycache__,
+ this use case remains supported as a so-called "sourceless module import".
+
+ """
+ if sys.flags.optimize:
+ return path + "o" # e.g. .pyo
+ else:
+ return path + "c" # e.g. .pyc
+
+
+def pyc_file_from_path(path):
+ """Given a python source path, locate the .pyc.
+
+ See http://www.python.org/dev/peps/pep-3147/
+ #detecting-pep-3147-availability
+ http://www.python.org/dev/peps/pep-3147/#file-extension-checks
+
+ """
+ import imp
+ has3147 = hasattr(imp, 'get_tag')
+ if has3147:
+ return imp.cache_from_source(path)
+ else:
+ return simple_pyc_file_from_path(path)
+
+
+def load_python_file(dir_, filename):
+ """Load a file from the given path as a Python module."""
+
+ module_id = re.sub(r'\W', "_", filename)
+ path = os.path.join(dir_, filename)
+ _, ext = os.path.splitext(filename)
+ if ext == ".py":
+ if os.path.exists(path):
+ module = load_module_py(module_id, path)
+ elif os.path.exists(simple_pyc_file_from_path(path)):
+ # look for sourceless load
+ module = load_module_pyc(
+ module_id, simple_pyc_file_from_path(path))
+ else:
+ raise ImportError("Can't find Python file %s" % path)
+ elif ext in (".pyc", ".pyo"):
+ module = load_module_pyc(module_id, path)
+ del sys.modules[module_id]
+ return module
diff --git a/alembic/util/sqla_compat.py b/alembic/util/sqla_compat.py
new file mode 100644
index 0000000..871dcb8
--- /dev/null
+++ b/alembic/util/sqla_compat.py
@@ -0,0 +1,160 @@
+import re
+from sqlalchemy import __version__
+from sqlalchemy.schema import ForeignKeyConstraint, CheckConstraint, Column
+from sqlalchemy import types as sqltypes
+from sqlalchemy import schema, sql
+from sqlalchemy.sql.visitors import traverse
+from sqlalchemy.ext.compiler import compiles
+from sqlalchemy.sql.expression import _BindParamClause
+from . import compat
+
+
+def _safe_int(value):
+ try:
+ return int(value)
+ except:
+ return value
+_vers = tuple(
+ [_safe_int(x) for x in re.findall(r'(\d+|[abc]\d)', __version__)])
+sqla_07 = _vers > (0, 7, 2)
+sqla_079 = _vers >= (0, 7, 9)
+sqla_08 = _vers >= (0, 8, 0)
+sqla_083 = _vers >= (0, 8, 3)
+sqla_084 = _vers >= (0, 8, 4)
+sqla_09 = _vers >= (0, 9, 0)
+sqla_092 = _vers >= (0, 9, 2)
+sqla_094 = _vers >= (0, 9, 4)
+sqla_094 = _vers >= (0, 9, 4)
+sqla_099 = _vers >= (0, 9, 9)
+sqla_100 = _vers >= (1, 0, 0)
+sqla_105 = _vers >= (1, 0, 5)
+
+if sqla_08:
+ from sqlalchemy.sql.expression import TextClause
+else:
+ from sqlalchemy.sql.expression import _TextClause as TextClause
+
+
+def _table_for_constraint(constraint):
+ if isinstance(constraint, ForeignKeyConstraint):
+ return constraint.parent
+ else:
+ return constraint.table
+
+
+def _columns_for_constraint(constraint):
+ if isinstance(constraint, ForeignKeyConstraint):
+ return [fk.parent for fk in constraint.elements]
+ elif isinstance(constraint, CheckConstraint):
+ return _find_columns(constraint.sqltext)
+ else:
+ return list(constraint.columns)
+
+
+def _fk_spec(constraint):
+ if sqla_100:
+ source_columns = [
+ constraint.columns[key].name for key in constraint.column_keys]
+ else:
+ source_columns = [
+ element.parent.name for element in constraint.elements]
+
+ source_table = constraint.parent.name
+ source_schema = constraint.parent.schema
+ target_schema = constraint.elements[0].column.table.schema
+ target_table = constraint.elements[0].column.table.name
+ target_columns = [element.column.name for element in constraint.elements]
+
+ return (
+ source_schema, source_table,
+ source_columns, target_schema, target_table, target_columns)
+
+
+def _is_type_bound(constraint):
+ # this deals with SQLAlchemy #3260, don't copy CHECK constraints
+ # that will be generated by the type.
+ if sqla_100:
+ # new feature added for #3260
+ return constraint._type_bound
+ else:
+ # old way, look at what we know Boolean/Enum to use
+ return (
+ constraint._create_rule is not None and
+ isinstance(
+ getattr(constraint._create_rule, "target", None),
+ sqltypes.SchemaType)
+ )
+
+
+def _find_columns(clause):
+ """locate Column objects within the given expression."""
+
+ cols = set()
+ traverse(clause, {}, {'column': cols.add})
+ return cols
+
+
+def _textual_index_column(table, text_):
+ """a workaround for the Index construct's severe lack of flexibility"""
+ if isinstance(text_, compat.string_types):
+ c = Column(text_, sqltypes.NULLTYPE)
+ table.append_column(c)
+ return c
+ elif isinstance(text_, TextClause):
+ return _textual_index_element(table, text_)
+ else:
+ raise ValueError("String or text() construct expected")
+
+
+class _textual_index_element(sql.ColumnElement):
+ """Wrap around a sqlalchemy text() construct in such a way that
+ we appear like a column-oriented SQL expression to an Index
+ construct.
+
+ The issue here is that currently the Postgresql dialect, the biggest
+ recipient of functional indexes, keys all the index expressions to
+ the corresponding column expressions when rendering CREATE INDEX,
+ so the Index we create here needs to have a .columns collection that
+ is the same length as the .expressions collection. Ultimately
+ SQLAlchemy should support text() expressions in indexes.
+
+ See https://bitbucket.org/zzzeek/sqlalchemy/issue/3174/\
+ support-text-sent-to-indexes
+
+ """
+ __visit_name__ = '_textual_idx_element'
+
+ def __init__(self, table, text):
+ self.table = table
+ self.text = text
+ self.key = text.text
+ self.fake_column = schema.Column(self.text.text, sqltypes.NULLTYPE)
+ table.append_column(self.fake_column)
+
+ def get_children(self):
+ return [self.fake_column]
+
+
+@compiles(_textual_index_element)
+def _render_textual_index_column(element, compiler, **kw):
+ return compiler.process(element.text, **kw)
+
+
+class _literal_bindparam(_BindParamClause):
+ pass
+
+
+@compiles(_literal_bindparam)
+def _render_literal_bindparam(element, compiler, **kw):
+ return compiler.render_literal_bindparam(element, **kw)
+
+
+def _get_index_expressions(idx):
+ if sqla_08:
+ return list(idx.expressions)
+ else:
+ return list(idx.columns)
+
+
+def _get_index_column_names(idx):
+ return [getattr(exp, "name", None) for exp in _get_index_expressions(idx)]
diff --git a/docs/build/api.rst b/docs/build/api.rst
deleted file mode 100644
index fea4e14..0000000
--- a/docs/build/api.rst
+++ /dev/null
@@ -1,217 +0,0 @@
-.. _api:
-
-===========
-API Details
-===========
-
-This section describes some key functions used within the migration process, particularly those referenced within
-a migration environment's ``env.py`` file.
-
-Overview
-========
-
-The three main objects in use are the :class:`.EnvironmentContext`, :class:`.MigrationContext`,
-and :class:`.Operations` classes, pictured below.
-
-.. image:: api_overview.png
-
-An Alembic command begins by instantiating an :class:`.EnvironmentContext` object, then
-making it available via the ``alembic.context`` proxy module. The ``env.py``
-script, representing a user-configurable migration environment, is then
-invoked. The ``env.py`` script is then responsible for calling upon the
-:meth:`.EnvironmentContext.configure`, whose job it is to create
-a :class:`.MigrationContext` object.
-
-Before this method is called, there's not
-yet any database connection or dialect-specific state set up. While
-many methods on :class:`.EnvironmentContext` are usable at this stage,
-those which require database access, or at least access to the kind
-of database dialect in use, are not. Once the
-:meth:`.EnvironmentContext.configure` method is called, the :class:`.EnvironmentContext`
-is said to be *configured* with database connectivity, available via
-a new :class:`.MigrationContext` object. The :class:`.MigrationContext`
-is associated with the :class:`.EnvironmentContext` object
-via the :meth:`.EnvironmentContext.get_context` method.
-
-Finally, ``env.py`` calls upon the :meth:`.EnvironmentContext.run_migrations`
-method. Within this method, a new :class:`.Operations` object, which
-provides an API for individual database migration operations, is established
-within the ``alembic.op`` proxy module. The :class:`.Operations` object
-uses the :class:`.MigrationContext` object ultimately as a source of
-database connectivity, though in such a way that it does not care if the
-:class:`.MigrationContext` is talking to a real database or just writing
-out SQL to a file.
-
-The Environment Context
-=======================
-
-The :class:`.EnvironmentContext` class provides most of the
-API used within an ``env.py`` script. Within ``env.py``,
-the instantated :class:`.EnvironmentContext` is made available
-via a special *proxy module* called ``alembic.context``. That is,
-you can import ``alembic.context`` like a regular Python module,
-and each name you call upon it is ultimately routed towards the
-current :class:`.EnvironmentContext` in use.
-
-In particular, the key method used within ``env.py`` is :meth:`.EnvironmentContext.configure`,
-which establishes all the details about how the database will be accessed.
-
-.. automodule:: alembic.environment
- :members:
-
-The Migration Context
-=====================
-
-.. automodule:: alembic.migration
- :members:
-
-The Operations Object
-=====================
-
-Within migration scripts, actual database migration operations are handled
-via an instance of :class:`.Operations`. See :ref:`ops` for an overview
-of this object.
-
-Commands
-=========
-
-Alembic commands are all represented by functions in the :mod:`alembic.command`
-package. They all accept the same style of usage, being sent
-the :class:`~.alembic.config.Config` object as the first argument.
-
-Commands can be run programmatically, by first constructing a :class:`.Config`
-object, as in::
-
- from alembic.config import Config
- from alembic import command
- alembic_cfg = Config("/path/to/yourapp/alembic.ini")
- command.upgrade(alembic_cfg, "head")
-
-In many cases, and perhaps more often than not, an application will wish
-to call upon a series of Alembic commands and/or other features. It is
-usually a good idea to link multiple commands along a single connection
-and transaction, if feasible. This can be achieved using the
-:attr:`.Config.attributes` dictionary in order to share a connection::
-
- with engine.begin() as connection:
- alembic_cfg.attributes['connection'] = connection
- command.upgrade(alembic_cfg, "head")
-
-This recipe requires that ``env.py`` consumes this connection argument;
-see the example in :ref:`connection_sharing` for details.
-
-To write small API functions that make direct use of database and script directory
-information, rather than just running one of the built-in commands,
-use the :class:`.ScriptDirectory` and :class:`.MigrationContext`
-classes directly.
-
-.. currentmodule:: alembic.command
-
-.. automodule:: alembic.command
- :members:
-
-Configuration
-==============
-
-The :class:`.Config` object represents the configuration
-passed to the Alembic environment. From an API usage perspective,
-it is needed for the following use cases:
-
-* to create a :class:`.ScriptDirectory`, which allows you to work
- with the actual script files in a migration environment
-* to create an :class:`.EnvironmentContext`, which allows you to
- actually run the ``env.py`` module within the migration environment
-* to programatically run any of the commands in the :mod:`alembic.command`
- module.
-
-The :class:`.Config` is *not* needed for these cases:
-
-* to instantiate a :class:`.MigrationContext` directly - this object
- only needs a SQLAlchemy connection or dialect name.
-* to instantiate a :class:`.Operations` object - this object only
- needs a :class:`.MigrationContext`.
-
-.. currentmodule:: alembic.config
-
-.. automodule:: alembic.config
- :members:
-
-Script Directory
-================
-
-The :class:`.ScriptDirectory` object provides programmatic access
-to the Alembic version files present in the filesystem.
-
-.. automodule:: alembic.script
- :members:
-
-Revision
-========
-
-The :class:`.RevisionMap` object serves as the basis for revision
-management, used exclusively by :class:`.ScriptDirectory`.
-
-.. automodule:: alembic.revision
- :members:
-
-Autogeneration
-==============
-
-Alembic 0.3 introduces a small portion of the autogeneration system
-as a public API.
-
-.. autofunction:: alembic.autogenerate.compare_metadata
-
-DDL Internals
-=============
-
-These are some of the constructs used to generate migration
-instructions. The APIs here build off of the :class:`sqlalchemy.schema.DDLElement`
-and :mod:`sqlalchemy.ext.compiler` systems.
-
-For programmatic usage of Alembic's migration directives, the easiest
-route is to use the higher level functions given by :mod:`alembic.operations`.
-
-.. automodule:: alembic.ddl
- :members:
- :undoc-members:
-
-.. automodule:: alembic.ddl.base
- :members:
- :undoc-members:
-
-.. automodule:: alembic.ddl.impl
- :members:
- :undoc-members:
-
-MySQL
------
-
-.. automodule:: alembic.ddl.mysql
- :members:
- :undoc-members:
- :show-inheritance:
-
-MS-SQL
-------
-
-.. automodule:: alembic.ddl.mssql
- :members:
- :undoc-members:
- :show-inheritance:
-
-Postgresql
-----------
-
-.. automodule:: alembic.ddl.postgresql
- :members:
- :undoc-members:
- :show-inheritance:
-
-SQLite
-------
-
-.. automodule:: alembic.ddl.sqlite
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/build/api/api_overview.png b/docs/build/api/api_overview.png
new file mode 100644
index 0000000..37e7312
--- /dev/null
+++ b/docs/build/api/api_overview.png
Binary files differ
diff --git a/docs/build/api/autogenerate.rst b/docs/build/api/autogenerate.rst
new file mode 100644
index 0000000..b024ab1
--- /dev/null
+++ b/docs/build/api/autogenerate.rst
@@ -0,0 +1,235 @@
+.. _alembic.autogenerate.toplevel:
+
+==============
+Autogeneration
+==============
+
+The autogenerate system has two areas of API that are public:
+
+1. The ability to do a "diff" of a :class:`~sqlalchemy.schema.MetaData` object against
+ a database, and receive a data structure back. This structure
+ is available either as a rudimentary list of changes, or as
+ a :class:`.MigrateOperation` structure.
+
+2. The ability to alter how the ``alembic revision`` command generates
+ revision scripts, including support for multiple revision scripts
+ generated in one pass.
+
+Getting Diffs
+==============
+
+.. autofunction:: alembic.autogenerate.compare_metadata
+
+.. autofunction:: alembic.autogenerate.produce_migrations
+
+.. _customizing_revision:
+
+Customizing Revision Generation
+==========================================
+
+.. versionadded:: 0.8.0 - the ``alembic revision`` system is now customizable.
+
+The ``alembic revision`` command, also available programmatically
+via :func:`.command.revision`, essentially produces a single migration
+script after being run. Whether or not the ``--autogenerate`` option
+was specified basically determines if this script is a blank revision
+script with empty ``upgrade()`` and ``downgrade()`` functions, or was
+produced with alembic operation directives as the result of autogenerate.
+
+In either case, the system creates a full plan of what is to be done
+in the form of a :class:`.MigrateOperation` structure, which is then
+used to produce the script.
+
+For example, suppose we ran ``alembic revision --autogenerate``, and the
+end result was that it produced a new revision ``'eced083f5df'``
+with the following contents::
+
+ """create the organization table."""
+
+ # revision identifiers, used by Alembic.
+ revision = 'eced083f5df'
+ down_revision = 'beafc7d709f'
+
+ from alembic import op
+ import sqlalchemy as sa
+
+
+ def upgrade():
+ op.create_table(
+ 'organization',
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('name', sa.String(50), nullable=False)
+ )
+ op.add_column(
+ 'user',
+ sa.Column('organization_id', sa.Integer())
+ )
+ op.create_foreign_key(
+ 'org_fk', 'user', 'organization', ['organization_id'], ['id']
+ )
+
+ def downgrade():
+ op.drop_constraint('org_fk', 'user')
+ op.drop_column('user', 'organization_id')
+ op.drop_table('organization')
+
+The above script is generated by a :class:`.MigrateOperation` structure
+that looks like this::
+
+ from alembic.operations import ops
+ import sqlalchemy as sa
+
+ migration_script = ops.MigrationScript(
+ 'eced083f5df',
+ ops.UpgradeOps(
+ ops=[
+ ops.CreateTableOp(
+ 'organization',
+ [
+ sa.Column('id', sa.Integer(), primary_key=True),
+ sa.Column('name', sa.String(50), nullable=False)
+ ]
+ ),
+ ops.ModifyTableOps(
+ 'user',
+ ops=[
+ ops.AddColumnOp(
+ 'user',
+ sa.Column('organization_id', sa.Integer())
+ ),
+ ops.CreateForeignKeyOp(
+ 'org_fk', 'user', 'organization',
+ ['organization_id'], ['id']
+ )
+ ]
+ )
+ ]
+ ),
+ ops.DowngradeOps(
+ ops=[
+ ops.ModifyTableOps(
+ 'user',
+ ops=[
+ ops.DropConstraintOp('org_fk', 'user'),
+ ops.DropColumnOp('user', 'organization_id')
+ ]
+ ),
+ ops.DropTableOp('organization')
+ ]
+ ),
+ message='create the organization table.'
+ )
+
+When we deal with a :class:`.MigrationScript` structure, we can render
+the upgrade/downgrade sections into strings for debugging purposes
+using the :func:`.render_python_code` helper function::
+
+ from alembic.autogenerate import render_python_code
+ print(render_python_code(migration_script.upgrade_ops))
+
+Renders::
+
+ ### commands auto generated by Alembic - please adjust! ###
+ op.create_table('organization',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('name', sa.String(length=50), nullable=False),
+ sa.PrimaryKeyConstraint('id')
+ )
+ op.add_column('user', sa.Column('organization_id', sa.Integer(), nullable=True))
+ op.create_foreign_key('org_fk', 'user', 'organization', ['organization_id'], ['id'])
+ ### end Alembic commands ###
+
+Given that structures like the above are used to generate new revision
+files, and that we'd like to be able to alter these as they are created,
+we then need a system to access this structure when the
+:func:`.command.revision` command is used. The
+:paramref:`.EnvironmentContext.configure.process_revision_directives`
+parameter gives us a way to alter this. This is a function that
+is passed the above structure as generated by Alembic, giving us a chance
+to alter it.
+For example, if we wanted to put all the "upgrade" operations into
+a certain branch, and we wanted our script to not have any "downgrade"
+operations at all, we could build an extension as follows, illustrated
+within an ``env.py`` script::
+
+ def process_revision_directives(context, revision, directives):
+ script = directives[0]
+
+ # set specific branch
+ script.head = "mybranch@head"
+
+ # erase downgrade operations
+ script.downgrade_ops.ops[:] = []
+
+ # ...
+
+ def run_migrations_online():
+
+ # ...
+ with engine.connect() as connection:
+
+ context.configure(
+ connection=connection,
+ target_metadata=target_metadata,
+ process_revision_directives=process_revision_directives)
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+Above, the ``directives`` argument is a Python list. We may alter the
+given structure within this list in-place, or replace it with a new
+structure consisting of zero or more :class:`.MigrationScript` directives.
+The :func:`.command.revision` command will then produce scripts corresponding
+to whatever is in this list.
+
+.. autofunction:: alembic.autogenerate.render_python_code
+
+Autogenerating Custom Operation Directives
+==========================================
+
+In the section :ref:`operation_plugins`, we talked about adding new
+subclasses of :class:`.MigrateOperation` in order to add new ``op.``
+directives. In the preceding section :ref:`customizing_revision`, we
+also learned that these same :class:`.MigrateOperation` structures are at
+the base of how the autogenerate system knows what Python code to render.
+How to connect these two systems, so that our own custom operation
+directives can be used? First off, we'd probably be implementing
+a :paramref:`.EnvironmentContext.configure.process_revision_directives`
+plugin as described previously, so that we can add our own directives
+to the autogenerate stream. What if we wanted to add our ``CreateSequenceOp``
+to the autogenerate structure? We basically need to define an autogenerate
+renderer for it, as follows::
+
+ # note: this is a continuation of the example from the
+ # "Operation Plugins" section
+
+ from alembic.autogenerate import renderers
+
+ @renderers.dispatch_for(CreateSequenceOp)
+ def render_create_sequence(autogen_context, op):
+ return "op.create_sequence(%r, **%r)" % (
+ op.sequence_name,
+ op.kw
+ )
+
+With our render function established, we can our ``CreateSequenceOp``
+generated in an autogenerate context using the :func:`.render_python_code`
+debugging function in conjunction with an :class:`.UpgradeOps` structure::
+
+ from alembic.operations import ops
+ from alembic.autogenerate import render_python_code
+
+ upgrade_ops = ops.UpgradeOps(
+ ops=[
+ CreateSequenceOp("my_seq")
+ ]
+ )
+
+ print(render_python_code(upgrade_ops))
+
+Which produces::
+
+ ### commands auto generated by Alembic - please adjust! ###
+ op.create_sequence('my_seq', **{})
+ ### end Alembic commands ###
+
diff --git a/docs/build/api/commands.rst b/docs/build/api/commands.rst
new file mode 100644
index 0000000..65dcc09
--- /dev/null
+++ b/docs/build/api/commands.rst
@@ -0,0 +1,38 @@
+.. _alembic.command.toplevel:
+
+=========
+Commands
+=========
+
+Alembic commands are all represented by functions in the :ref:`alembic.command.toplevel`
+package. They all accept the same style of usage, being sent
+the :class:`.Config` object as the first argument.
+
+Commands can be run programmatically, by first constructing a :class:`.Config`
+object, as in::
+
+ from alembic.config import Config
+ from alembic import command
+ alembic_cfg = Config("/path/to/yourapp/alembic.ini")
+ command.upgrade(alembic_cfg, "head")
+
+In many cases, and perhaps more often than not, an application will wish
+to call upon a series of Alembic commands and/or other features. It is
+usually a good idea to link multiple commands along a single connection
+and transaction, if feasible. This can be achieved using the
+:attr:`.Config.attributes` dictionary in order to share a connection::
+
+ with engine.begin() as connection:
+ alembic_cfg.attributes['connection'] = connection
+ command.upgrade(alembic_cfg, "head")
+
+This recipe requires that ``env.py`` consumes this connection argument;
+see the example in :ref:`connection_sharing` for details.
+
+To write small API functions that make direct use of database and script directory
+information, rather than just running one of the built-in commands,
+use the :class:`.ScriptDirectory` and :class:`.MigrationContext`
+classes directly.
+
+.. automodule:: alembic.command
+ :members:
diff --git a/docs/build/api/config.rst b/docs/build/api/config.rst
new file mode 100644
index 0000000..25d934f
--- /dev/null
+++ b/docs/build/api/config.rst
@@ -0,0 +1,26 @@
+.. _alembic.config.toplevel:
+
+==============
+Configuration
+==============
+
+The :class:`.Config` object represents the configuration
+passed to the Alembic environment. From an API usage perspective,
+it is needed for the following use cases:
+
+* to create a :class:`.ScriptDirectory`, which allows you to work
+ with the actual script files in a migration environment
+* to create an :class:`.EnvironmentContext`, which allows you to
+ actually run the ``env.py`` module within the migration environment
+* to programatically run any of the commands in the :ref:`alembic.command.toplevel`
+ module.
+
+The :class:`.Config` is *not* needed for these cases:
+
+* to instantiate a :class:`.MigrationContext` directly - this object
+ only needs a SQLAlchemy connection or dialect name.
+* to instantiate a :class:`.Operations` object - this object only
+ needs a :class:`.MigrationContext`.
+
+.. automodule:: alembic.config
+ :members:
diff --git a/docs/build/api/ddl.rst b/docs/build/api/ddl.rst
new file mode 100644
index 0000000..2d114c8
--- /dev/null
+++ b/docs/build/api/ddl.rst
@@ -0,0 +1,56 @@
+.. _alembic.ddl.toplevel:
+
+=============
+DDL Internals
+=============
+
+These are some of the constructs used to generate migration
+instructions. The APIs here build off of the :class:`sqlalchemy.schema.DDLElement`
+and :ref:`sqlalchemy.ext.compiler_toplevel` systems.
+
+For programmatic usage of Alembic's migration directives, the easiest
+route is to use the higher level functions given by :ref:`alembic.operations.toplevel`.
+
+.. automodule:: alembic.ddl
+ :members:
+ :undoc-members:
+
+.. automodule:: alembic.ddl.base
+ :members:
+ :undoc-members:
+
+.. automodule:: alembic.ddl.impl
+ :members:
+ :undoc-members:
+
+MySQL
+=============
+
+.. automodule:: alembic.ddl.mysql
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+MS-SQL
+=============
+
+.. automodule:: alembic.ddl.mssql
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Postgresql
+=============
+
+.. automodule:: alembic.ddl.postgresql
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+SQLite
+=============
+
+.. automodule:: alembic.ddl.sqlite
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/docs/build/api/environment.rst b/docs/build/api/environment.rst
new file mode 100644
index 0000000..5a22773
--- /dev/null
+++ b/docs/build/api/environment.rst
@@ -0,0 +1,19 @@
+.. _alembic.runtime.environment.toplevel:
+
+=======================
+The Environment Context
+=======================
+
+The :class:`.EnvironmentContext` class provides most of the
+API used within an ``env.py`` script. Within ``env.py``,
+the instantated :class:`.EnvironmentContext` is made available
+via a special *proxy module* called ``alembic.context``. That is,
+you can import ``alembic.context`` like a regular Python module,
+and each name you call upon it is ultimately routed towards the
+current :class:`.EnvironmentContext` in use.
+
+In particular, the key method used within ``env.py`` is :meth:`.EnvironmentContext.configure`,
+which establishes all the details about how the database will be accessed.
+
+.. automodule:: alembic.runtime.environment
+ :members: EnvironmentContext
diff --git a/docs/build/api/index.rst b/docs/build/api/index.rst
new file mode 100644
index 0000000..aa7c1a9
--- /dev/null
+++ b/docs/build/api/index.rst
@@ -0,0 +1,33 @@
+.. _api:
+
+===========
+API Details
+===========
+
+Alembic's internal API has many public integration points that can be used
+to extend Alembic's functionality as well as to re-use its functionality
+in new ways. As the project has grown, more APIs are created and exposed
+for this purpose.
+
+Direct use of the vast majority of API details discussed here is not needed
+for rudimentary use of Alembic; the only API that is used normally by end users is
+the methods provided by the :class:`.Operations` class, which is discussed
+outside of this subsection, and the parameters that can be passed to
+the :meth:`.EnvironmentContext.configure` method, used when configuring
+one's ``env.py`` environment. However, real-world applications will
+usually end up using more of the internal API, in particular being able
+to run commands programmatically, as discussed in the section :doc:`/api/commands`.
+
+.. toctree::
+ :maxdepth: 2
+
+ overview
+ environment
+ migration
+ config
+ commands
+ operations
+ autogenerate
+ script
+ ddl
+
diff --git a/docs/build/api/migration.rst b/docs/build/api/migration.rst
new file mode 100644
index 0000000..ae74818
--- /dev/null
+++ b/docs/build/api/migration.rst
@@ -0,0 +1,8 @@
+.. _alembic.runtime.migration.toplevel:
+
+=====================
+The Migration Context
+=====================
+
+.. automodule:: alembic.runtime.migration
+ :members: MigrationContext
diff --git a/docs/build/api/operations.rst b/docs/build/api/operations.rst
new file mode 100644
index 0000000..d9ff238
--- /dev/null
+++ b/docs/build/api/operations.rst
@@ -0,0 +1,123 @@
+.. _alembic.operations.toplevel:
+
+=====================
+The Operations Object
+=====================
+
+Within migration scripts, actual database migration operations are handled
+via an instance of :class:`.Operations`. The :class:`.Operations` class
+lists out available migration operations that are linked to a
+:class:`.MigrationContext`, which communicates instructions originated
+by the :class:`.Operations` object into SQL that is sent to a database or SQL
+output stream.
+
+Most methods on the :class:`.Operations` class are generated dynamically
+using a "plugin" system, described in the next section
+:ref:`operation_plugins`. Additionally, when Alembic migration scripts
+actually run, the methods on the current :class:`.Operations` object are
+proxied out to the ``alembic.op`` module, so that they are available
+using module-style access.
+
+For an overview of how to use an :class:`.Operations` object directly
+in programs, as well as for reference to the standard operation methods
+as well as "batch" methods, see :ref:`ops`.
+
+.. _operation_plugins:
+
+Operation Plugins
+=====================
+
+The Operations object is extensible using a plugin system. This system
+allows one to add new ``op.<some_operation>`` methods at runtime. The
+steps to use this system are to first create a subclass of
+:class:`.MigrateOperation`, register it using the :meth:`.Operations.register_operation`
+class decorator, then build a default "implementation" function which is
+established using the :meth:`.Operations.implementation_for` decorator.
+
+.. versionadded:: 0.8.0 - the :class:`.Operations` class is now an
+ open namespace that is extensible via the creation of new
+ :class:`.MigrateOperation` subclasses.
+
+Below we illustrate a very simple operation ``CreateSequenceOp`` which
+will implement a new method ``op.create_sequence()`` for use in
+migration scripts::
+
+ from alembic.operations import Operations, MigrateOperation
+
+ @Operations.register_operation("create_sequence")
+ class CreateSequenceOp(MigrateOperation):
+ """Create a SEQUENCE."""
+
+ def __init__(self, sequence_name, **kw):
+ self.sequence_name = sequence_name
+ self.kw = kw
+
+ @classmethod
+ def create_sequence(cls, operations, sequence_name, **kw):
+ """Issue a "CREATE SEQUENCE" instruction."""
+
+ op = CreateSequenceOp(sequence_name, **kw)
+ return operations.invoke(op)
+
+Above, the ``CreateSequenceOp`` class represents a new operation that will
+be available as ``op.create_sequence()``. The reason the operation
+is represented as a stateful class is so that an operation and a specific
+set of arguments can be represented generically; the state can then correspond
+to different kinds of operations, such as invoking the instruction against
+a database, or autogenerating Python code for the operation into a
+script.
+
+In order to establish the migrate-script behavior of the new operation,
+we use the :meth:`.Operations.implementation_for` decorator::
+
+ @Operations.implementation_for(CreateSequenceOp)
+ def create_sequence(operations, operation):
+ operations.execute("CREATE SEQUENCE %s" % operation.sequence_name)
+
+Above, we use the simplest possible technique of invoking our DDL, which
+is just to call :meth:`.Operations.execute` with literal SQL. If this is
+all a custom operation needs, then this is fine. However, options for
+more comprehensive support include building out a custom SQL construct,
+as documented at :ref:`sqlalchemy.ext.compiler_toplevel`.
+
+With the above two steps, a migration script can now use a new method
+``op.create_sequence()`` that will proxy to our object as a classmethod::
+
+ def upgrade():
+ op.create_sequence("my_sequence")
+
+The registration of new operations only needs to occur in time for the
+``env.py`` script to invoke :meth:`.MigrationContext.run_migrations`;
+within the module level of the ``env.py`` script is sufficient.
+
+
+.. versionadded:: 0.8 - the migration operations available via the
+ :class:`.Operations` class as well as the ``alembic.op`` namespace
+ is now extensible using a plugin system.
+
+
+.. _operation_objects:
+
+Built-in Operation Objects
+==============================
+
+The migration operations present on :class:`.Operations` are themselves
+delivered via operation objects that represent an operation and its
+arguments. All operations descend from the :class:`.MigrateOperation`
+class, and are registered with the :class:`.Operations` class using
+the :meth:`.Operations.register_operation` class decorator. The
+:class:`.MigrateOperation` objects also serve as the basis for how the
+autogenerate system renders new migration scripts.
+
+.. seealso::
+
+ :ref:`operation_plugins`
+
+ :ref:`customizing_revision`
+
+The built-in operation objects are listed below.
+
+.. _alembic.operations.ops.toplevel:
+
+.. automodule:: alembic.operations.ops
+ :members:
diff --git a/docs/build/api/overview.rst b/docs/build/api/overview.rst
new file mode 100644
index 0000000..048d1e6
--- /dev/null
+++ b/docs/build/api/overview.rst
@@ -0,0 +1,47 @@
+========
+Overview
+========
+
+A visualization of the primary features of Alembic's internals is presented
+in the following figure. The module and class boxes do not list out
+all the operations provided by each unit; only a small set of representative
+elements intended to convey the primary purpose of each system.
+
+.. image:: api_overview.png
+
+The script runner for Alembic is present in the :ref:`alembic.config.toplevel` module.
+This module produces a :class:`.Config` object and passes it to the
+appropriate function in :ref:`alembic.command.toplevel`. Functions within
+:ref:`alembic.command.toplevel` will typically instantiate an
+:class:`.ScriptDirectory` instance, which represents the collection of
+version files, and an :class:`.EnvironmentContext`, which represents a
+configurational object passed to the environment's ``env.py`` script.
+
+Within the execution of ``env.py``, a :class:`.MigrationContext`
+object is produced when the :meth:`.EnvironmentContext.configure`
+method is called. :class:`.MigrationContext` is the gateway to the database
+for other parts of the application, and produces a :class:`.DefaultImpl`
+object which does the actual database communication, and knows how to
+create the specific SQL text of the various DDL directives such as
+ALTER TABLE; :class:`.DefaultImpl` has subclasses that are per-database-backend.
+In "offline" mode (e.g. ``--sql``), the :class:`.MigrationContext` will
+produce SQL to a file output stream instead of a database.
+
+During an upgrade or downgrade operation, a specific series of migration
+scripts are invoked starting with the :class:`.MigrationContext` in conjunction
+with the :class:`.ScriptDirectory`; the actual scripts themselves make use
+of the :class:`.Operations` object, which provide the end-user interface to
+specific database operations. The :class:`.Operations` object is generated
+based on a series of "operation directive" objects that are user-extensible,
+and start out in the :ref:`alembic.operations.ops.toplevel` module.
+
+Another prominent feature of Alembic is the "autogenerate" feature, which
+produces new migration scripts that contain Python code. The autogenerate
+feature starts in :ref:`alembic.autogenerate.toplevel`, and is used exclusively
+by the :func:`.alembic.command.revision` command when the ``--autogenerate``
+flag is passed. Autogenerate refers to the :class:`.MigrationContext`
+and :class:`.DefaultImpl` in order to access database connectivity and
+access per-backend rules for autogenerate comparisons. It also makes use
+of :ref:`alembic.operations.ops.toplevel` in order to represent the operations that
+it will render into scripts.
+
diff --git a/docs/build/api/script.rst b/docs/build/api/script.rst
new file mode 100644
index 0000000..8dc594b
--- /dev/null
+++ b/docs/build/api/script.rst
@@ -0,0 +1,20 @@
+.. _alembic.script.toplevel:
+
+================
+Script Directory
+================
+
+The :class:`.ScriptDirectory` object provides programmatic access
+to the Alembic version files present in the filesystem.
+
+.. automodule:: alembic.script
+ :members:
+
+Revision
+========
+
+The :class:`.RevisionMap` object serves as the basis for revision
+management, used exclusively by :class:`.ScriptDirectory`.
+
+.. automodule:: alembic.script.revision
+ :members:
diff --git a/docs/build/api_overview.png b/docs/build/api_overview.png
deleted file mode 100644
index dab204b..0000000
--- a/docs/build/api_overview.png
+++ /dev/null
Binary files differ
diff --git a/docs/build/assets/api_overview.graffle b/docs/build/assets/api_overview.graffle
index 7c083e5..1e58ea5 100644
--- a/docs/build/assets/api_overview.graffle
+++ b/docs/build/assets/api_overview.graffle
@@ -4,34 +4,56 @@
<dict>
<key>ActiveLayerIndex</key>
<integer>0</integer>
+ <key>ApplicationVersion</key>
+ <array>
+ <string>com.omnigroup.OmniGrafflePro</string>
+ <string>139.18.0.187838</string>
+ </array>
<key>AutoAdjust</key>
<true/>
- <key>CanvasColor</key>
+ <key>BackgroundGraphic</key>
<dict>
- <key>w</key>
- <string>1</string>
+ <key>Bounds</key>
+ <string>{{0, 0}, {1176, 768}}</string>
+ <key>Class</key>
+ <string>SolidGraphic</string>
+ <key>ID</key>
+ <integer>2</integer>
+ <key>Style</key>
+ <dict>
+ <key>shadow</key>
+ <dict>
+ <key>Draws</key>
+ <string>NO</string>
+ </dict>
+ <key>stroke</key>
+ <dict>
+ <key>Draws</key>
+ <string>NO</string>
+ </dict>
+ </dict>
</dict>
+ <key>BaseZoom</key>
+ <integer>0</integer>
<key>CanvasOrigin</key>
<string>{0, 0}</string>
- <key>CanvasScale</key>
- <real>1</real>
<key>ColumnAlign</key>
<integer>1</integer>
<key>ColumnSpacing</key>
<real>36</real>
<key>CreationDate</key>
- <string>2012-01-24 16:51:07 -0500</string>
+ <string>2012-01-24 21:51:07 +0000</string>
<key>Creator</key>
<string>classic</string>
<key>DisplayScale</key>
- <string>1 in = 1 in</string>
+ <string>1 0/72 in = 1.0000 in</string>
<key>GraphDocumentVersion</key>
- <integer>5</integer>
+ <integer>8</integer>
<key>GraphicsList</key>
<array>
<dict>
<key>Bounds</key>
- <string>{{319.25, 165}, {66, 12}}</string>
+ <string>{{601.74580087231288, 420}, {84, 12}}</string>
<key>Class</key>
<string>ShapedGraphic</string>
<key>FitText</key>
@@ -39,7 +61,7 @@
<key>Flow</key>
<string>Resize</string>
<key>ID</key>
- <integer>2054</integer>
+ <integer>2140</integer>
<key>Shape</key>
<string>Rectangle</string>
<key>Style</key>
@@ -60,89 +82,64 @@
<key>Align</key>
<integer>0</integer>
<key>Text</key>
- <string>{\rtf1\ansi\ansicpg1252\cocoartf1038\cocoasubrtf360
-{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
{\colortbl;\red255\green255\blue255;}
-\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\ql\qnatural
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720
-\f0\fs20 \cf0 &lt;&lt;proxies&gt;&gt;}</string>
- </dict>
- <key>Wrap</key>
- <string>NO</string>
- </dict>
- <dict>
- <key>Bounds</key>
- <string>{{444, 216.633}, {66, 12}}</string>
- <key>Class</key>
- <string>ShapedGraphic</string>
- <key>FitText</key>
- <string>YES</string>
- <key>Flow</key>
- <string>Resize</string>
- <key>ID</key>
- <integer>2053</integer>
- <key>Shape</key>
- <string>Rectangle</string>
- <key>Style</key>
- <dict>
- <key>shadow</key>
- <dict>
- <key>Draws</key>
- <string>NO</string>
- </dict>
- <key>stroke</key>
- <dict>
- <key>Draws</key>
- <string>NO</string>
- </dict>
- </dict>
- <key>Text</key>
- <dict>
- <key>Align</key>
+\f0\fs20 \cf0 &lt;&lt;instantiates&gt;&gt;}</string>
+ <key>VerticalPad</key>
<integer>0</integer>
- <key>Text</key>
- <string>{\rtf1\ansi\ansicpg1252\cocoartf1038\cocoasubrtf360
-{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
-{\colortbl;\red255\green255\blue255;}
-\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\ql\qnatural
-
-\f0\fs20 \cf0 &lt;&lt;proxies&gt;&gt;}</string>
</dict>
<key>Wrap</key>
<string>NO</string>
</dict>
<dict>
<key>Class</key>
- <string>LineGraphic</string>
- <key>Head</key>
- <dict>
- <key>ID</key>
- <integer>2048</integer>
- </dict>
- <key>ID</key>
- <integer>2051</integer>
- <key>Points</key>
+ <string>TableGroup</string>
+ <key>Graphics</key>
<array>
- <string>{165, 221.6}</string>
- <string>{109, 221.6}</string>
- </array>
- <key>Style</key>
- <dict>
- <key>stroke</key>
<dict>
- <key>HeadArrow</key>
- <string>StickArrow</string>
- <key>Pattern</key>
- <integer>1</integer>
- <key>TailArrow</key>
- <string>0</string>
+ <key>Bounds</key>
+ <string>{{191, 107.40116119384766}, {102.9071044921875, 14}}</string>
+ <key>Class</key>
+ <string>ShapedGraphic</string>
+ <key>FitText</key>
+ <string>Vertical</string>
+ <key>Flow</key>
+ <string>Resize</string>
+ <key>ID</key>
+ <integer>2132</integer>
+ <key>Shape</key>
+ <string>Rectangle</string>
+ <key>Style</key>
+ <dict>
+ <key>fill</key>
+ <dict>
+ <key>GradientCenter</key>
+ <string>{-0.29411799999999999, -0.264706}</string>
+ </dict>
+ </dict>
+ <key>Text</key>
+ <dict>
+ <key>Text</key>
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\qc
+
+\f0\b\fs24 \cf0 PostgresqlImpl}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
+ </dict>
+ <key>TextPlacement</key>
+ <integer>0</integer>
</dict>
- </dict>
- <key>Tail</key>
- <dict>
- <key>ID</key>
- <integer>33</integer>
- </dict>
+ </array>
+ <key>GroupConnect</key>
+ <string>YES</string>
+ <key>ID</key>
+ <integer>2131</integer>
</dict>
<dict>
<key>Class</key>
@@ -151,7 +148,7 @@
<array>
<dict>
<key>Bounds</key>
- <string>{{19, 207.6}, {90, 14}}</string>
+ <string>{{230.9169921875, 132.80233001708984}, {102.9071044921875, 14}}</string>
<key>Class</key>
<string>ShapedGraphic</string>
<key>FitText</key>
@@ -159,35 +156,46 @@
<key>Flow</key>
<string>Resize</string>
<key>ID</key>
- <integer>2049</integer>
+ <integer>2130</integer>
<key>Shape</key>
<string>Rectangle</string>
<key>Style</key>
<dict>
<key>fill</key>
<dict>
- <key>GradientAngle</key>
- <real>304</real>
<key>GradientCenter</key>
- <string>{-0.294118, -0.264706}</string>
+ <string>{-0.29411799999999999, -0.264706}</string>
</dict>
</dict>
<key>Text</key>
<dict>
<key>Text</key>
- <string>{\rtf1\ansi\ansicpg1252\cocoartf1038\cocoasubrtf360
-{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
{\colortbl;\red255\green255\blue255;}
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\qc
-\f0\b\fs24 \cf0 Config}</string>
+\f0\b\fs24 \cf0 MSSQLImpl}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
</dict>
<key>TextPlacement</key>
<integer>0</integer>
</dict>
+ </array>
+ <key>GroupConnect</key>
+ <string>YES</string>
+ <key>ID</key>
+ <integer>2129</integer>
+ </dict>
+ <dict>
+ <key>Class</key>
+ <string>TableGroup</string>
+ <key>Graphics</key>
+ <array>
<dict>
<key>Bounds</key>
- <string>{{19, 221.6}, {90, 14}}</string>
+ <string>{{226, 82}, {102.9071044921875, 14}}</string>
<key>Class</key>
<string>ShapedGraphic</string>
<key>FitText</key>
@@ -195,45 +203,179 @@
<key>Flow</key>
<string>Resize</string>
<key>ID</key>
- <integer>2050</integer>
+ <integer>2127</integer>
<key>Shape</key>
<string>Rectangle</string>
<key>Style</key>
<dict>
<key>fill</key>
<dict>
- <key>GradientAngle</key>
- <real>304</real>
<key>GradientCenter</key>
- <string>{-0.294118, -0.264706}</string>
+ <string>{-0.29411799999999999, -0.264706}</string>
</dict>
</dict>
<key>Text</key>
<dict>
- <key>Align</key>
- <integer>0</integer>
<key>Text</key>
- <string>{\rtf1\ansi\ansicpg1252\cocoartf1038\cocoasubrtf360
-{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
{\colortbl;\red255\green255\blue255;}
-\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\ql\qnatural
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\qc
-\f0\fs24 \cf0 ConfigParser}</string>
+\f0\b\fs24 \cf0 MySQLImpl}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
</dict>
<key>TextPlacement</key>
<integer>0</integer>
</dict>
</array>
- <key>GridH</key>
+ <key>GroupConnect</key>
+ <string>YES</string>
+ <key>ID</key>
+ <integer>2126</integer>
+ </dict>
+ <dict>
+ <key>Class</key>
+ <string>LineGraphic</string>
+ <key>Head</key>
+ <dict>
+ <key>ID</key>
+ <integer>2055</integer>
+ </dict>
+ <key>ID</key>
+ <integer>2135</integer>
+ <key>Points</key>
<array>
- <integer>2049</integer>
- <integer>2050</integer>
- <array/>
+ <string>{280.22809604806071, 146.80233001708984}</string>
+ <string>{272.46503226582109, 172.16651000976572}</string>
</array>
- <key>GroupConnect</key>
+ <key>Style</key>
+ <dict>
+ <key>stroke</key>
+ <dict>
+ <key>HeadArrow</key>
+ <string>UMLInheritance</string>
+ <key>Legacy</key>
+ <true/>
+ <key>TailArrow</key>
+ <string>0</string>
+ </dict>
+ </dict>
+ <key>Tail</key>
+ <dict>
+ <key>ID</key>
+ <integer>2129</integer>
+ </dict>
+ </dict>
+ <dict>
+ <key>Class</key>
+ <string>LineGraphic</string>
+ <key>Head</key>
+ <dict>
+ <key>ID</key>
+ <integer>2055</integer>
+ </dict>
+ <key>ID</key>
+ <integer>2134</integer>
+ <key>Points</key>
+ <array>
+ <string>{243.64926792598939, 121.40116119384763}</string>
+ <string>{252.32082843664148, 172.16651000976572}</string>
+ </array>
+ <key>Style</key>
+ <dict>
+ <key>stroke</key>
+ <dict>
+ <key>HeadArrow</key>
+ <string>UMLInheritance</string>
+ <key>Legacy</key>
+ <true/>
+ <key>TailArrow</key>
+ <string>0</string>
+ </dict>
+ </dict>
+ <key>Tail</key>
+ <dict>
+ <key>ID</key>
+ <integer>2131</integer>
+ </dict>
+ </dict>
+ <dict>
+ <key>Class</key>
+ <string>LineGraphic</string>
+ <key>Head</key>
+ <dict>
+ <key>ID</key>
+ <integer>2055</integer>
+ </dict>
+ <key>ID</key>
+ <integer>2133</integer>
+ <key>Points</key>
+ <array>
+ <string>{276.4518773872507, 95.999999999999986}</string>
+ <string>{265.55272336402226, 172.16651000976572}</string>
+ </array>
+ <key>Style</key>
+ <dict>
+ <key>stroke</key>
+ <dict>
+ <key>HeadArrow</key>
+ <string>UMLInheritance</string>
+ <key>Legacy</key>
+ <true/>
+ <key>TailArrow</key>
+ <string>0</string>
+ </dict>
+ </dict>
+ <key>Tail</key>
+ <dict>
+ <key>ID</key>
+ <integer>2126</integer>
+ </dict>
+ </dict>
+ <dict>
+ <key>Bounds</key>
+ <string>{{504, 310}, {84, 12}}</string>
+ <key>Class</key>
+ <string>ShapedGraphic</string>
+ <key>FitText</key>
<string>YES</string>
+ <key>Flow</key>
+ <string>Resize</string>
<key>ID</key>
- <integer>2048</integer>
+ <integer>2125</integer>
+ <key>Shape</key>
+ <string>Rectangle</string>
+ <key>Style</key>
+ <dict>
+ <key>shadow</key>
+ <dict>
+ <key>Draws</key>
+ <string>NO</string>
+ </dict>
+ <key>stroke</key>
+ <dict>
+ <key>Draws</key>
+ <string>NO</string>
+ </dict>
+ </dict>
+ <key>Text</key>
+ <dict>
+ <key>Align</key>
+ <integer>0</integer>
+ <key>Text</key>
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720
+
+\f0\fs20 \cf0 &lt;&lt;instantiates&gt;&gt;}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
+ </dict>
+ <key>Wrap</key>
+ <string>NO</string>
</dict>
<dict>
<key>Class</key>
@@ -244,15 +386,17 @@
<integer>33</integer>
</dict>
<key>ID</key>
- <integer>2046</integer>
+ <integer>2124</integer>
<key>OrthogonalBarAutomatic</key>
- <true/>
+ <false/>
+ <key>OrthogonalBarPoint</key>
+ <string>{0, 0}</string>
<key>OrthogonalBarPosition</key>
- <real>28.725006103515625</real>
+ <real>16</real>
<key>Points</key>
<array>
- <string>{385.25, 157}</string>
- <string>{304, 191.818}</string>
+ <string>{563, 340.34042553191489}</string>
+ <string>{497.13201904296875, 327.88251038766401}</string>
</array>
<key>Style</key>
<dict>
@@ -260,6 +404,8 @@
<dict>
<key>HeadArrow</key>
<string>StickArrow</string>
+ <key>Legacy</key>
+ <true/>
<key>LineType</key>
<integer>2</integer>
<key>Pattern</key>
@@ -271,7 +417,152 @@
<key>Tail</key>
<dict>
<key>ID</key>
- <integer>2042</integer>
+ <integer>2072</integer>
+ </dict>
+ </dict>
+ <dict>
+ <key>Bounds</key>
+ <string>{{494.00001409542369, 415.9000186920166}, {55, 12}}</string>
+ <key>Class</key>
+ <string>ShapedGraphic</string>
+ <key>FitText</key>
+ <string>YES</string>
+ <key>Flow</key>
+ <string>Resize</string>
+ <key>ID</key>
+ <integer>2123</integer>
+ <key>Line</key>
+ <dict>
+ <key>ID</key>
+ <integer>2139</integer>
+ <key>Position</key>
+ <real>0.37128287553787231</real>
+ <key>RotationType</key>
+ <integer>0</integer>
+ </dict>
+ <key>Shape</key>
+ <string>Rectangle</string>
+ <key>Style</key>
+ <dict>
+ <key>shadow</key>
+ <dict>
+ <key>Draws</key>
+ <string>NO</string>
+ </dict>
+ <key>stroke</key>
+ <dict>
+ <key>Draws</key>
+ <string>NO</string>
+ </dict>
+ </dict>
+ <key>Text</key>
+ <dict>
+ <key>Align</key>
+ <integer>0</integer>
+ <key>Text</key>
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720
+
+\f0\fs20 \cf0 &lt;&lt;uses&gt;&gt;}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
+ </dict>
+ <key>Wrap</key>
+ <string>NO</string>
+ </dict>
+ <dict>
+ <key>Bounds</key>
+ <string>{{713.35945466160774, 356.11699358749399}, {55, 12}}</string>
+ <key>Class</key>
+ <string>ShapedGraphic</string>
+ <key>FitText</key>
+ <string>YES</string>
+ <key>Flow</key>
+ <string>Resize</string>
+ <key>ID</key>
+ <integer>2122</integer>
+ <key>Line</key>
+ <dict>
+ <key>ID</key>
+ <integer>2121</integer>
+ <key>Position</key>
+ <real>0.49189183115959167</real>
+ <key>RotationType</key>
+ <integer>0</integer>
+ </dict>
+ <key>Shape</key>
+ <string>Rectangle</string>
+ <key>Style</key>
+ <dict>
+ <key>shadow</key>
+ <dict>
+ <key>Draws</key>
+ <string>NO</string>
+ </dict>
+ <key>stroke</key>
+ <dict>
+ <key>Draws</key>
+ <string>NO</string>
+ </dict>
+ </dict>
+ <key>Text</key>
+ <dict>
+ <key>Align</key>
+ <integer>0</integer>
+ <key>Text</key>
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720
+
+\f0\fs20 \cf0 &lt;&lt;uses&gt;&gt;}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
+ </dict>
+ <key>Wrap</key>
+ <string>NO</string>
+ </dict>
+ <dict>
+ <key>Class</key>
+ <string>LineGraphic</string>
+ <key>Head</key>
+ <dict>
+ <key>ID</key>
+ <integer>2081</integer>
+ <key>Info</key>
+ <integer>5</integer>
+ </dict>
+ <key>ID</key>
+ <integer>2121</integer>
+ <key>Points</key>
+ <array>
+ <string>{702, 363.10150901307452}</string>
+ <string>{781, 361.10002136230463}</string>
+ </array>
+ <key>Style</key>
+ <dict>
+ <key>stroke</key>
+ <dict>
+ <key>HeadArrow</key>
+ <string>StickArrow</string>
+ <key>HopLines</key>
+ <true/>
+ <key>HopType</key>
+ <integer>102</integer>
+ <key>Legacy</key>
+ <true/>
+ <key>Pattern</key>
+ <integer>1</integer>
+ <key>TailArrow</key>
+ <string>0</string>
+ </dict>
+ </dict>
+ <key>Tail</key>
+ <dict>
+ <key>ID</key>
+ <integer>2072</integer>
</dict>
</dict>
<dict>
@@ -280,18 +571,20 @@
<key>Head</key>
<dict>
<key>ID</key>
- <integer>38</integer>
+ <integer>2059</integer>
</dict>
<key>ID</key>
- <integer>2044</integer>
+ <integer>2120</integer>
<key>OrthogonalBarAutomatic</key>
<true/>
+ <key>OrthogonalBarPoint</key>
+ <string>{0, 0}</string>
<key>OrthogonalBarPosition</key>
- <real>52.850021362304688</real>
+ <real>-1</real>
<key>Points</key>
<array>
- <string>{454.25, 177}</string>
- <string>{442.638, 294.6}</string>
+ <string>{637, 406}</string>
+ <string>{565.78369522094727, 454.05202861384231}</string>
</array>
<key>Style</key>
<dict>
@@ -299,6 +592,8 @@
<dict>
<key>HeadArrow</key>
<string>StickArrow</string>
+ <key>Legacy</key>
+ <true/>
<key>LineType</key>
<integer>2</integer>
<key>Pattern</key>
@@ -310,12 +605,12 @@
<key>Tail</key>
<dict>
<key>ID</key>
- <integer>2043</integer>
+ <integer>2072</integer>
</dict>
</dict>
<dict>
<key>Bounds</key>
- <string>{{385.25, 172}, {69, 14}}</string>
+ <string>{{717, 400}, {68, 12}}</string>
<key>Class</key>
<string>ShapedGraphic</string>
<key>FitText</key>
@@ -323,20 +618,110 @@
<key>Flow</key>
<string>Resize</string>
<key>ID</key>
- <integer>2043</integer>
- <key>Magnets</key>
- <array>
- <string>{0.5, -0.142857}</string>
- </array>
+ <integer>2119</integer>
<key>Shape</key>
<string>Rectangle</string>
<key>Style</key>
<dict>
- <key>fill</key>
+ <key>shadow</key>
+ <dict>
+ <key>Draws</key>
+ <string>NO</string>
+ </dict>
+ <key>stroke</key>
<dict>
<key>Draws</key>
<string>NO</string>
</dict>
+ </dict>
+ <key>Text</key>
+ <dict>
+ <key>Align</key>
+ <integer>0</integer>
+ <key>Text</key>
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720
+
+\f0\fs20 \cf0 &lt;&lt;invokes&gt;&gt;}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
+ </dict>
+ <key>Wrap</key>
+ <string>NO</string>
+ </dict>
+ <dict>
+ <key>Class</key>
+ <string>LineGraphic</string>
+ <key>Head</key>
+ <dict>
+ <key>ID</key>
+ <integer>2072</integer>
+ <key>Info</key>
+ <integer>5</integer>
+ </dict>
+ <key>ID</key>
+ <integer>2118</integer>
+ <key>OrthogonalBarAutomatic</key>
+ <true/>
+ <key>OrthogonalBarPoint</key>
+ <string>{0, 0}</string>
+ <key>OrthogonalBarPosition</key>
+ <real>-1</real>
+ <key>Points</key>
+ <array>
+ <string>{759.34192925872742, 429.89997863769531}</string>
+ <string>{702, 384.99999999999994}</string>
+ </array>
+ <key>Style</key>
+ <dict>
+ <key>stroke</key>
+ <dict>
+ <key>HeadArrow</key>
+ <string>StickArrow</string>
+ <key>Legacy</key>
+ <true/>
+ <key>LineType</key>
+ <integer>2</integer>
+ <key>Pattern</key>
+ <integer>1</integer>
+ <key>TailArrow</key>
+ <string>0</string>
+ </dict>
+ </dict>
+ <key>Tail</key>
+ <dict>
+ <key>ID</key>
+ <integer>2048</integer>
+ <key>Info</key>
+ <integer>3</integer>
+ </dict>
+ </dict>
+ <dict>
+ <key>Bounds</key>
+ <string>{{603.74580087231288, 470.3107529903566}, {80, 12}}</string>
+ <key>Class</key>
+ <string>ShapedGraphic</string>
+ <key>FitText</key>
+ <string>YES</string>
+ <key>Flow</key>
+ <string>Resize</string>
+ <key>ID</key>
+ <integer>2117</integer>
+ <key>Line</key>
+ <dict>
+ <key>ID</key>
+ <integer>2116</integer>
+ <key>Position</key>
+ <real>0.47171458601951599</real>
+ <key>RotationType</key>
+ <integer>0</integer>
+ </dict>
+ <key>Shape</key>
+ <string>Rectangle</string>
+ <key>Style</key>
+ <dict>
<key>shadow</key>
<dict>
<key>Draws</key>
@@ -350,20 +735,63 @@
</dict>
<key>Text</key>
<dict>
+ <key>Align</key>
+ <integer>0</integer>
<key>Text</key>
- <string>{\rtf1\ansi\ansicpg1252\cocoartf1038\cocoasubrtf360
-{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
{\colortbl;\red255\green255\blue255;}
-\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\qc\pardirnatural
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720
-\f0\fs24 \cf0 alembic.op}</string>
+\f0\fs20 \cf0 &lt;&lt;configures&gt;&gt;}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
</dict>
<key>Wrap</key>
<string>NO</string>
</dict>
<dict>
+ <key>Class</key>
+ <string>LineGraphic</string>
+ <key>Head</key>
+ <dict>
+ <key>ID</key>
+ <integer>2059</integer>
+ </dict>
+ <key>ID</key>
+ <integer>2116</integer>
+ <key>Points</key>
+ <array>
+ <string>{713.35941696166992, 476.88540101271974}</string>
+ <string>{565.78369522094727, 475.66718967115884}</string>
+ </array>
+ <key>Style</key>
+ <dict>
+ <key>stroke</key>
+ <dict>
+ <key>HeadArrow</key>
+ <string>StickArrow</string>
+ <key>HopLines</key>
+ <true/>
+ <key>HopType</key>
+ <integer>102</integer>
+ <key>Legacy</key>
+ <true/>
+ <key>Pattern</key>
+ <integer>1</integer>
+ <key>TailArrow</key>
+ <string>0</string>
+ </dict>
+ </dict>
+ <key>Tail</key>
+ <dict>
+ <key>ID</key>
+ <integer>2048</integer>
+ </dict>
+ </dict>
+ <dict>
<key>Bounds</key>
- <string>{{385.25, 149.6}, {94, 14}}</string>
+ <string>{{816, 258.37493918977634}, {69, 24}}</string>
<key>Class</key>
<string>ShapedGraphic</string>
<key>FitText</key>
@@ -371,20 +799,73 @@
<key>Flow</key>
<string>Resize</string>
<key>ID</key>
- <integer>2042</integer>
- <key>Magnets</key>
- <array>
- <string>{0.49734, 0.0285711}</string>
- </array>
+ <integer>2113</integer>
+ <key>Line</key>
+ <dict>
+ <key>ID</key>
+ <integer>2109</integer>
+ <key>Position</key>
+ <real>0.46421170234680176</real>
+ <key>RotationType</key>
+ <integer>0</integer>
+ </dict>
<key>Shape</key>
<string>Rectangle</string>
<key>Style</key>
<dict>
- <key>fill</key>
+ <key>shadow</key>
+ <dict>
+ <key>Draws</key>
+ <string>NO</string>
+ </dict>
+ <key>stroke</key>
<dict>
<key>Draws</key>
<string>NO</string>
</dict>
+ </dict>
+ <key>Text</key>
+ <dict>
+ <key>Align</key>
+ <integer>0</integer>
+ <key>Text</key>
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720
+
+\f0\fs20 \cf0 &lt;&lt;generates,\
+renders&gt;&gt;}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
+ </dict>
+ <key>Wrap</key>
+ <string>NO</string>
+ </dict>
+ <dict>
+ <key>Bounds</key>
+ <string>{{705.05227716905051, 191.22492316822797}, {69, 24}}</string>
+ <key>Class</key>
+ <string>ShapedGraphic</string>
+ <key>FitText</key>
+ <string>YES</string>
+ <key>Flow</key>
+ <string>Resize</string>
+ <key>ID</key>
+ <integer>2112</integer>
+ <key>Line</key>
+ <dict>
+ <key>ID</key>
+ <integer>2108</integer>
+ <key>Position</key>
+ <real>0.46593526005744934</real>
+ <key>RotationType</key>
+ <integer>0</integer>
+ </dict>
+ <key>Shape</key>
+ <string>Rectangle</string>
+ <key>Style</key>
+ <dict>
<key>shadow</key>
<dict>
<key>Draws</key>
@@ -398,13 +879,18 @@
</dict>
<key>Text</key>
<dict>
+ <key>Align</key>
+ <integer>0</integer>
<key>Text</key>
- <string>{\rtf1\ansi\ansicpg1252\cocoartf1038\cocoasubrtf360
-{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
{\colortbl;\red255\green255\blue255;}
-\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\qc\pardirnatural
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720
-\f0\fs24 \cf0 alembic.context}</string>
+\f0\fs20 \cf0 &lt;&lt;provides\
+operations&gt;&gt;}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
</dict>
<key>Wrap</key>
<string>NO</string>
@@ -415,14 +901,14 @@
<key>Head</key>
<dict>
<key>ID</key>
- <integer>2038</integer>
+ <integer>2098</integer>
</dict>
<key>ID</key>
- <integer>2040</integer>
+ <integer>2109</integer>
<key>Points</key>
<array>
- <string>{166.088, 336.6}</string>
- <string>{105.686, 336.6}</string>
+ <string>{850.5, 298.10002136230469}</string>
+ <string>{850.50001322861976, 238.37493896484375}</string>
</array>
<key>Style</key>
<dict>
@@ -430,6 +916,12 @@
<dict>
<key>HeadArrow</key>
<string>StickArrow</string>
+ <key>HopLines</key>
+ <true/>
+ <key>HopType</key>
+ <integer>102</integer>
+ <key>Legacy</key>
+ <true/>
<key>Pattern</key>
<integer>1</integer>
<key>TailArrow</key>
@@ -439,46 +931,113 @@
<key>Tail</key>
<dict>
<key>ID</key>
- <integer>41</integer>
+ <integer>2081</integer>
+ </dict>
+ </dict>
+ <dict>
+ <key>Class</key>
+ <string>LineGraphic</string>
+ <key>Head</key>
+ <dict>
+ <key>ID</key>
+ <integer>38</integer>
+ </dict>
+ <key>ID</key>
+ <integer>2108</integer>
+ <key>Points</key>
+ <array>
+ <string>{781.00002098083496, 203.28096591495026}</string>
+ <string>{692.04400634765625, 203.16068579982147}</string>
+ </array>
+ <key>Style</key>
+ <dict>
+ <key>stroke</key>
+ <dict>
+ <key>HeadArrow</key>
+ <string>StickArrow</string>
+ <key>Legacy</key>
+ <true/>
+ <key>Pattern</key>
+ <integer>1</integer>
+ <key>TailArrow</key>
+ <string>0</string>
+ </dict>
+ </dict>
+ <key>Tail</key>
+ <dict>
+ <key>ID</key>
+ <integer>2098</integer>
</dict>
</dict>
<dict>
<key>Bounds</key>
- <string>{{19, 294.6}, {86.1858, 84}}</string>
+ <string>{{623.48996514081955, 291.09998092651369}, {55, 12}}</string>
<key>Class</key>
<string>ShapedGraphic</string>
+ <key>FitText</key>
+ <string>YES</string>
+ <key>Flow</key>
+ <string>Resize</string>
<key>ID</key>
- <integer>2038</integer>
+ <integer>2107</integer>
+ <key>Line</key>
+ <dict>
+ <key>ID</key>
+ <integer>2105</integer>
+ <key>Position</key>
+ <real>0.43473681807518005</real>
+ <key>RotationType</key>
+ <integer>0</integer>
+ </dict>
<key>Shape</key>
- <string>Cylinder</string>
+ <string>Rectangle</string>
+ <key>Style</key>
+ <dict>
+ <key>shadow</key>
+ <dict>
+ <key>Draws</key>
+ <string>NO</string>
+ </dict>
+ <key>stroke</key>
+ <dict>
+ <key>Draws</key>
+ <string>NO</string>
+ </dict>
+ </dict>
<key>Text</key>
<dict>
+ <key>Align</key>
+ <integer>0</integer>
<key>Text</key>
- <string>{\rtf1\ansi\ansicpg1252\cocoartf1038\cocoasubrtf360
-{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
{\colortbl;\red255\green255\blue255;}
-\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\qc\pardirnatural
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720
-\f0\fs24 \cf0 database}</string>
+\f0\fs20 \cf0 &lt;&lt;uses&gt;&gt;}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
</dict>
+ <key>Wrap</key>
+ <string>NO</string>
</dict>
<dict>
<key>Bounds</key>
- <string>{{227.597, 278.569}, {55, 12}}</string>
+ <string>{{513.14304282962803, 197.37493856351756}, {55, 12}}</string>
<key>Class</key>
<string>ShapedGraphic</string>
<key>FitText</key>
<string>YES</string>
+ <key>Flow</key>
+ <string>Resize</string>
<key>ID</key>
- <integer>51</integer>
+ <integer>2106</integer>
<key>Line</key>
<dict>
<key>ID</key>
- <integer>50</integer>
- <key>Offset</key>
- <real>-20</real>
+ <integer>2104</integer>
<key>Position</key>
- <real>0.40689659118652344</real>
+ <real>0.3995765745639801</real>
<key>RotationType</key>
<integer>0</integer>
</dict>
@@ -502,13 +1061,17 @@
<key>Align</key>
<integer>0</integer>
<key>Text</key>
- <string>{\rtf1\ansi\ansicpg1252\cocoartf1038\cocoasubrtf360
-{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
{\colortbl;\red255\green255\blue255;}
-\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\ql\qnatural
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720
\f0\fs20 \cf0 &lt;&lt;uses&gt;&gt;}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
</dict>
+ <key>Wrap</key>
+ <string>NO</string>
</dict>
<dict>
<key>Class</key>
@@ -517,13 +1080,25 @@
<dict>
<key>ID</key>
<integer>41</integer>
+ <key>Info</key>
+ <integer>4</integer>
</dict>
<key>ID</key>
- <integer>50</integer>
+ <integer>2105</integer>
+ <key>OrthogonalBarAutomatic</key>
+ <true/>
+ <key>OrthogonalBarPoint</key>
+ <string>{0, 0}</string>
+ <key>OrthogonalBarPosition</key>
+ <real>5.1000003814697266</real>
<key>Points</key>
<array>
- <string>{234.897, 263.6}</string>
- <string>{235.389, 315.6}</string>
+ <string>{781, 339.20153037537921}</string>
+ <string>{747, 331}</string>
+ <string>{744, 297.09998092651369}</string>
+ <string>{533, 272.33299255371094}</string>
+ <string>{526, 233}</string>
+ <string>{491.30664526513783, 232.60000610351562}</string>
</array>
<key>Style</key>
<dict>
@@ -531,6 +1106,10 @@
<dict>
<key>HeadArrow</key>
<string>StickArrow</string>
+ <key>Legacy</key>
+ <true/>
+ <key>LineType</key>
+ <integer>2</integer>
<key>Pattern</key>
<integer>1</integer>
<key>TailArrow</key>
@@ -540,26 +1119,69 @@
<key>Tail</key>
<dict>
<key>ID</key>
- <integer>33</integer>
+ <integer>2081</integer>
+ <key>Info</key>
+ <integer>2</integer>
+ </dict>
+ </dict>
+ <dict>
+ <key>Class</key>
+ <string>LineGraphic</string>
+ <key>Head</key>
+ <dict>
+ <key>ID</key>
+ <integer>41</integer>
+ </dict>
+ <key>ID</key>
+ <integer>2104</integer>
+ <key>Points</key>
+ <array>
+ <string>{572.95599365234375, 203}</string>
+ <string>{492.0880126953125, 203.93833970103648}</string>
+ </array>
+ <key>Style</key>
+ <dict>
+ <key>stroke</key>
+ <dict>
+ <key>HeadArrow</key>
+ <string>StickArrow</string>
+ <key>HopLines</key>
+ <true/>
+ <key>HopType</key>
+ <integer>102</integer>
+ <key>Legacy</key>
+ <true/>
+ <key>Pattern</key>
+ <integer>1</integer>
+ <key>TailArrow</key>
+ <string>0</string>
+ </dict>
+ </dict>
+ <key>Tail</key>
+ <dict>
+ <key>ID</key>
+ <integer>38</integer>
</dict>
</dict>
<dict>
<key>Bounds</key>
- <string>{{308.265, 310.6}, {55, 12}}</string>
+ <string>{{392.47411627278478, 268.53371033283503}, {84, 12}}</string>
<key>Class</key>
<string>ShapedGraphic</string>
<key>FitText</key>
<string>YES</string>
+ <key>Flow</key>
+ <string>Resize</string>
<key>ID</key>
- <integer>49</integer>
+ <integer>2103</integer>
<key>Line</key>
<dict>
<key>ID</key>
- <integer>9</integer>
+ <integer>2102</integer>
<key>Offset</key>
- <real>-20</real>
+ <real>1</real>
<key>Position</key>
- <real>0.5199354887008667</real>
+ <real>0.46998947858810425</real>
<key>RotationType</key>
<integer>0</integer>
</dict>
@@ -583,13 +1205,17 @@
<key>Align</key>
<integer>0</integer>
<key>Text</key>
- <string>{\rtf1\ansi\ansicpg1252\cocoartf1038\cocoasubrtf360
-{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
{\colortbl;\red255\green255\blue255;}
-\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\ql\qnatural
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720
-\f0\fs20 \cf0 &lt;&lt;uses&gt;&gt;}</string>
+\f0\fs20 \cf0 &lt;&lt;instantiates&gt;&gt;}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
</dict>
+ <key>Wrap</key>
+ <string>NO</string>
</dict>
<dict>
<key>Class</key>
@@ -600,11 +1226,11 @@
<integer>41</integer>
</dict>
<key>ID</key>
- <integer>9</integer>
+ <integer>2102</integer>
<key>Points</key>
<array>
- <string>{368.99, 336.6}</string>
- <string>{305.088, 336.6}</string>
+ <string>{435.00741612193735, 298.09998092651369}</string>
+ <string>{436.00000000000011, 248}</string>
</array>
<key>Style</key>
<dict>
@@ -612,6 +1238,12 @@
<dict>
<key>HeadArrow</key>
<string>StickArrow</string>
+ <key>HopLines</key>
+ <true/>
+ <key>HopType</key>
+ <integer>102</integer>
+ <key>Legacy</key>
+ <true/>
<key>Pattern</key>
<integer>1</integer>
<key>TailArrow</key>
@@ -621,17 +1253,930 @@
<key>Tail</key>
<dict>
<key>ID</key>
- <integer>38</integer>
+ <integer>33</integer>
</dict>
</dict>
<dict>
+ <key>Bounds</key>
+ <string>{{320.83625227212906, 209.28763384458864}, {55, 12}}</string>
+ <key>Class</key>
+ <string>ShapedGraphic</string>
+ <key>FitText</key>
+ <string>YES</string>
+ <key>Flow</key>
+ <string>Resize</string>
+ <key>ID</key>
+ <integer>2101</integer>
+ <key>Line</key>
+ <dict>
+ <key>ID</key>
+ <integer>2040</integer>
+ <key>Position</key>
+ <real>0.39780238270759583</real>
+ <key>RotationType</key>
+ <integer>0</integer>
+ </dict>
+ <key>Shape</key>
+ <string>Rectangle</string>
+ <key>Style</key>
+ <dict>
+ <key>shadow</key>
+ <dict>
+ <key>Draws</key>
+ <string>NO</string>
+ </dict>
+ <key>stroke</key>
+ <dict>
+ <key>Draws</key>
+ <string>NO</string>
+ </dict>
+ </dict>
+ <key>Text</key>
+ <dict>
+ <key>Align</key>
+ <integer>0</integer>
+ <key>Text</key>
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720
+
+\f0\fs20 \cf0 &lt;&lt;uses&gt;&gt;}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
+ </dict>
+ <key>Wrap</key>
+ <string>NO</string>
+ </dict>
+ <dict>
<key>Class</key>
<string>TableGroup</string>
<key>Graphics</key>
<array>
<dict>
<key>Bounds</key>
- <string>{{166.088, 315.6}, {139, 14}}</string>
+ <string>{{781.00002098083496, 168.37493896484375}, {139, 14}}</string>
+ <key>Class</key>
+ <string>ShapedGraphic</string>
+ <key>FitText</key>
+ <string>Vertical</string>
+ <key>Flow</key>
+ <string>Resize</string>
+ <key>ID</key>
+ <integer>2099</integer>
+ <key>Shape</key>
+ <string>Rectangle</string>
+ <key>Style</key>
+ <dict>
+ <key>fill</key>
+ <dict>
+ <key>GradientCenter</key>
+ <string>{-0.29411799999999999, -0.264706}</string>
+ </dict>
+ </dict>
+ <key>Text</key>
+ <dict>
+ <key>Text</key>
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\qc
+
+\f0\b\fs24 \cf0 alembic.operations.op}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
+ </dict>
+ <key>TextPlacement</key>
+ <integer>0</integer>
+ </dict>
+ <dict>
+ <key>Bounds</key>
+ <string>{{781.00002098083496, 182.37493896484375}, {139, 56}}</string>
+ <key>Class</key>
+ <string>ShapedGraphic</string>
+ <key>FitText</key>
+ <string>Vertical</string>
+ <key>Flow</key>
+ <string>Resize</string>
+ <key>ID</key>
+ <integer>2100</integer>
+ <key>Shape</key>
+ <string>Rectangle</string>
+ <key>Style</key>
+ <dict>
+ <key>fill</key>
+ <dict>
+ <key>GradientCenter</key>
+ <string>{-0.29411799999999999, -0.264706}</string>
+ </dict>
+ </dict>
+ <key>Text</key>
+ <dict>
+ <key>Align</key>
+ <integer>0</integer>
+ <key>Text</key>
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720
+
+\f0\fs24 \cf0 CreateTableOp\
+AlterColumnOp\
+AddColumnOp\
+DropColumnOp}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
+ </dict>
+ <key>TextPlacement</key>
+ <integer>0</integer>
+ </dict>
+ </array>
+ <key>GridH</key>
+ <array>
+ <integer>2099</integer>
+ <integer>2100</integer>
+ <array/>
+ </array>
+ <key>GroupConnect</key>
+ <string>YES</string>
+ <key>ID</key>
+ <integer>2098</integer>
+ </dict>
+ <dict>
+ <key>Bounds</key>
+ <string>{{333.24926419826539, 462.28131709379346}, {78, 12}}</string>
+ <key>Class</key>
+ <string>ShapedGraphic</string>
+ <key>FitText</key>
+ <string>YES</string>
+ <key>Flow</key>
+ <string>Resize</string>
+ <key>ID</key>
+ <integer>2090</integer>
+ <key>Line</key>
+ <dict>
+ <key>ID</key>
+ <integer>2068</integer>
+ <key>Position</key>
+ <real>0.44118145108222961</real>
+ <key>RotationType</key>
+ <integer>0</integer>
+ </dict>
+ <key>Shape</key>
+ <string>Rectangle</string>
+ <key>Style</key>
+ <dict>
+ <key>shadow</key>
+ <dict>
+ <key>Draws</key>
+ <string>NO</string>
+ </dict>
+ <key>stroke</key>
+ <dict>
+ <key>Draws</key>
+ <string>NO</string>
+ </dict>
+ </dict>
+ <key>Text</key>
+ <dict>
+ <key>Align</key>
+ <integer>0</integer>
+ <key>Text</key>
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720
+
+\f0\fs20 \cf0 &lt;&lt;read/write&gt;&gt;}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
+ </dict>
+ <key>Wrap</key>
+ <string>NO</string>
+ </dict>
+ <dict>
+ <key>Class</key>
+ <string>TableGroup</string>
+ <key>Graphics</key>
+ <array>
+ <dict>
+ <key>Bounds</key>
+ <string>{{781, 298.10002136230469}, {139, 14}}</string>
+ <key>Class</key>
+ <string>ShapedGraphic</string>
+ <key>FitText</key>
+ <string>Vertical</string>
+ <key>Flow</key>
+ <string>Resize</string>
+ <key>ID</key>
+ <integer>2082</integer>
+ <key>Shape</key>
+ <string>Rectangle</string>
+ <key>Style</key>
+ <dict>
+ <key>fill</key>
+ <dict>
+ <key>GradientCenter</key>
+ <string>{-0.29411799999999999, -0.264706}</string>
+ </dict>
+ </dict>
+ <key>Text</key>
+ <dict>
+ <key>Text</key>
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\qc
+
+\f0\b\fs24 \cf0 alembic.autogenerate}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
+ </dict>
+ <key>TextPlacement</key>
+ <integer>0</integer>
+ </dict>
+ <dict>
+ <key>Bounds</key>
+ <string>{{781, 312.10002136230469}, {139, 70}}</string>
+ <key>Class</key>
+ <string>ShapedGraphic</string>
+ <key>FitText</key>
+ <string>Vertical</string>
+ <key>Flow</key>
+ <string>Resize</string>
+ <key>ID</key>
+ <integer>2083</integer>
+ <key>Shape</key>
+ <string>Rectangle</string>
+ <key>Style</key>
+ <dict>
+ <key>fill</key>
+ <dict>
+ <key>GradientCenter</key>
+ <string>{-0.29411799999999999, -0.264706}</string>
+ </dict>
+ </dict>
+ <key>Text</key>
+ <dict>
+ <key>Align</key>
+ <integer>0</integer>
+ <key>Text</key>
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720
+
+\f0\fs24 \cf0 compare_metadata()\
+produce_migrations()\
+compare\
+render\
+generate}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
+ </dict>
+ <key>TextPlacement</key>
+ <integer>0</integer>
+ </dict>
+ </array>
+ <key>GridH</key>
+ <array>
+ <integer>2082</integer>
+ <integer>2083</integer>
+ <array/>
+ </array>
+ <key>GroupConnect</key>
+ <string>YES</string>
+ <key>ID</key>
+ <integer>2081</integer>
+ <key>Magnets</key>
+ <array>
+ <string>{0.032374100719424703, 0.5}</string>
+ <string>{-0.5071942446043165, -0.010850225176129769}</string>
+ <string>{0.52163523392711664, 0}</string>
+ <string>{0, -0.5}</string>
+ <string>{-0.5, 0.24999999999999911}</string>
+ </array>
+ </dict>
+ <dict>
+ <key>Class</key>
+ <string>TableGroup</string>
+ <key>Graphics</key>
+ <array>
+ <dict>
+ <key>Bounds</key>
+ <string>{{563, 322}, {139, 14}}</string>
+ <key>Class</key>
+ <string>ShapedGraphic</string>
+ <key>FitText</key>
+ <string>Vertical</string>
+ <key>Flow</key>
+ <string>Resize</string>
+ <key>ID</key>
+ <integer>2073</integer>
+ <key>Shape</key>
+ <string>Rectangle</string>
+ <key>Style</key>
+ <dict>
+ <key>fill</key>
+ <dict>
+ <key>GradientCenter</key>
+ <string>{-0.29411799999999999, -0.264706}</string>
+ </dict>
+ </dict>
+ <key>Text</key>
+ <dict>
+ <key>Text</key>
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\qc
+
+\f0\b\fs24 \cf0 alembic.command}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
+ </dict>
+ <key>TextPlacement</key>
+ <integer>0</integer>
+ </dict>
+ <dict>
+ <key>Bounds</key>
+ <string>{{563, 336}, {139, 70}}</string>
+ <key>Class</key>
+ <string>ShapedGraphic</string>
+ <key>FitText</key>
+ <string>Vertical</string>
+ <key>Flow</key>
+ <string>Resize</string>
+ <key>ID</key>
+ <integer>2074</integer>
+ <key>Shape</key>
+ <string>Rectangle</string>
+ <key>Style</key>
+ <dict>
+ <key>fill</key>
+ <dict>
+ <key>GradientCenter</key>
+ <string>{-0.29411799999999999, -0.264706}</string>
+ </dict>
+ </dict>
+ <key>Text</key>
+ <dict>
+ <key>Align</key>
+ <integer>0</integer>
+ <key>Text</key>
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720
+
+\f0\fs24 \cf0 init()\
+revision()\
+upgrade()\
+downgrade()\
+history()}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
+ </dict>
+ <key>TextPlacement</key>
+ <integer>0</integer>
+ </dict>
+ </array>
+ <key>GridH</key>
+ <array>
+ <integer>2073</integer>
+ <integer>2074</integer>
+ <array/>
+ </array>
+ <key>GroupConnect</key>
+ <string>YES</string>
+ <key>ID</key>
+ <integer>2072</integer>
+ <key>Magnets</key>
+ <array>
+ <string>{0.032374100719424703, 0.5}</string>
+ <string>{-0.5071942446043165, -0.010850225176129769}</string>
+ <string>{0.26978417266187105, 0.50105453672863209}</string>
+ <string>{0.16675024238421798, -0.51583989461263036}</string>
+ <string>{0.5, 0.24999999999999911}</string>
+ <string>{0.50000000000000089, -0.010696321272922305}</string>
+ <string>{-0.50719424460431561, -0.28571428571428559}</string>
+ </array>
+ </dict>
+ <dict>
+ <key>Class</key>
+ <string>LineGraphic</string>
+ <key>Head</key>
+ <dict>
+ <key>ID</key>
+ <integer>2067</integer>
+ </dict>
+ <key>ID</key>
+ <integer>2068</integer>
+ <key>Points</key>
+ <array>
+ <string>{426.78369522094727, 467.79283450278251}</string>
+ <string>{303.17371368408192, 468.90004920959467}</string>
+ </array>
+ <key>Style</key>
+ <dict>
+ <key>stroke</key>
+ <dict>
+ <key>HeadArrow</key>
+ <string>StickArrow</string>
+ <key>HopLines</key>
+ <true/>
+ <key>HopType</key>
+ <integer>102</integer>
+ <key>Legacy</key>
+ <true/>
+ <key>Pattern</key>
+ <integer>1</integer>
+ <key>TailArrow</key>
+ <string>0</string>
+ </dict>
+ </dict>
+ <key>Tail</key>
+ <dict>
+ <key>ID</key>
+ <integer>2059</integer>
+ </dict>
+ </dict>
+ <dict>
+ <key>Class</key>
+ <string>Group</string>
+ <key>Graphics</key>
+ <array>
+ <dict>
+ <key>Bounds</key>
+ <string>{{218.92971038818359, 448.71651649475098}, {74.487998962402344, 46}}</string>
+ <key>Class</key>
+ <string>ShapedGraphic</string>
+ <key>FitText</key>
+ <string>Vertical</string>
+ <key>Flow</key>
+ <string>Resize</string>
+ <key>FontInfo</key>
+ <dict>
+ <key>Font</key>
+ <string>Helvetica</string>
+ <key>Size</key>
+ <real>10</real>
+ </dict>
+ <key>ID</key>
+ <integer>2066</integer>
+ <key>Shape</key>
+ <string>Rectangle</string>
+ <key>Style</key>
+ <dict/>
+ <key>Text</key>
+ <dict>
+ <key>Align</key>
+ <integer>0</integer>
+ <key>Pad</key>
+ <integer>1</integer>
+ <key>Text</key>
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural
+
+\f0\fs20 \cf0 \expnd0\expndtw0\kerning0
+/versions/a.py\
+/versions/b.py\
+/versions/...}</string>
+ </dict>
+ </dict>
+ <dict>
+ <key>Bounds</key>
+ <string>{{209.17371368408203, 424.9000186920166}, {94, 84}}</string>
+ <key>Class</key>
+ <string>ShapedGraphic</string>
+ <key>ID</key>
+ <integer>2067</integer>
+ <key>Magnets</key>
+ <array>
+ <string>{0.49999999999999911, -0.30952344621930905}</string>
+ <string>{0.49999999999999911, 0.023809887114024875}</string>
+ </array>
+ <key>Shape</key>
+ <string>Rectangle</string>
+ <key>Style</key>
+ <dict/>
+ <key>Text</key>
+ <dict>
+ <key>Text</key>
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\fs24 \cf0 filesystem}</string>
+ </dict>
+ <key>TextPlacement</key>
+ <integer>0</integer>
+ </dict>
+ </array>
+ <key>ID</key>
+ <integer>2065</integer>
+ </dict>
+ <dict>
+ <key>Class</key>
+ <string>TableGroup</string>
+ <key>Graphics</key>
+ <array>
+ <dict>
+ <key>Bounds</key>
+ <string>{{426.78369522094727, 442.76912879943848}, {139, 14}}</string>
+ <key>Class</key>
+ <string>ShapedGraphic</string>
+ <key>FitText</key>
+ <string>Vertical</string>
+ <key>Flow</key>
+ <string>Resize</string>
+ <key>ID</key>
+ <integer>2060</integer>
+ <key>Shape</key>
+ <string>Rectangle</string>
+ <key>Style</key>
+ <dict>
+ <key>fill</key>
+ <dict>
+ <key>GradientCenter</key>
+ <string>{-0.29411799999999999, -0.264706}</string>
+ </dict>
+ </dict>
+ <key>Text</key>
+ <dict>
+ <key>Text</key>
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\qc
+
+\f0\b\fs24 \cf0 ScriptDirectory}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
+ </dict>
+ <key>TextPlacement</key>
+ <integer>0</integer>
+ </dict>
+ <dict>
+ <key>Bounds</key>
+ <string>{{426.78369522094727, 456.76912879943848}, {139, 42}}</string>
+ <key>Class</key>
+ <string>ShapedGraphic</string>
+ <key>FitText</key>
+ <string>Vertical</string>
+ <key>Flow</key>
+ <string>Resize</string>
+ <key>ID</key>
+ <integer>2061</integer>
+ <key>Shape</key>
+ <string>Rectangle</string>
+ <key>Style</key>
+ <dict>
+ <key>fill</key>
+ <dict>
+ <key>GradientCenter</key>
+ <string>{-0.29411799999999999, -0.264706}</string>
+ </dict>
+ </dict>
+ <key>Text</key>
+ <dict>
+ <key>Align</key>
+ <integer>0</integer>
+ <key>Text</key>
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720
+
+\f0\fs24 \cf0 walk_revisions()\
+get_revision()\
+generate_revision()}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
+ </dict>
+ <key>TextPlacement</key>
+ <integer>0</integer>
+ </dict>
+ </array>
+ <key>GridH</key>
+ <array>
+ <integer>2060</integer>
+ <integer>2061</integer>
+ <array/>
+ </array>
+ <key>GroupConnect</key>
+ <string>YES</string>
+ <key>ID</key>
+ <integer>2059</integer>
+ <key>Magnets</key>
+ <array>
+ <string>{0.51040606996823534, 0.089285714285713524}</string>
+ <string>{0.25000000000000044, -0.50000000000000089}</string>
+ <string>{-0.50398241924039766, -0.053571428571430602}</string>
+ <string>{-0.00038529693823985411, 0.5357142857142847}</string>
+ <string>{0.5015561494895886, -0.29944872856140314}</string>
+ </array>
+ </dict>
+ <dict>
+ <key>Class</key>
+ <string>LineGraphic</string>
+ <key>Head</key>
+ <dict>
+ <key>ID</key>
+ <integer>2038</integer>
+ </dict>
+ <key>ID</key>
+ <integer>2058</integer>
+ <key>Points</key>
+ <array>
+ <string>{259.5464429157899, 256.16651000976572}</string>
+ <string>{259.5464429157899, 299.49998778426624}</string>
+ </array>
+ <key>Style</key>
+ <dict>
+ <key>stroke</key>
+ <dict>
+ <key>HeadArrow</key>
+ <string>StickArrow</string>
+ <key>Legacy</key>
+ <true/>
+ <key>Pattern</key>
+ <integer>1</integer>
+ <key>TailArrow</key>
+ <string>0</string>
+ </dict>
+ </dict>
+ <key>Tail</key>
+ <dict>
+ <key>ID</key>
+ <integer>2055</integer>
+ </dict>
+ </dict>
+ <dict>
+ <key>Class</key>
+ <string>TableGroup</string>
+ <key>Graphics</key>
+ <array>
+ <dict>
+ <key>Bounds</key>
+ <string>{{208.09290313720703, 172.16651000976572}, {102.90709686279297, 14}}</string>
+ <key>Class</key>
+ <string>ShapedGraphic</string>
+ <key>FitText</key>
+ <string>Vertical</string>
+ <key>Flow</key>
+ <string>Resize</string>
+ <key>ID</key>
+ <integer>2056</integer>
+ <key>Shape</key>
+ <string>Rectangle</string>
+ <key>Style</key>
+ <dict>
+ <key>fill</key>
+ <dict>
+ <key>GradientCenter</key>
+ <string>{-0.29411799999999999, -0.264706}</string>
+ </dict>
+ </dict>
+ <key>Text</key>
+ <dict>
+ <key>Text</key>
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\qc
+
+\f0\b\fs24 \cf0 DefaultImpl}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
+ </dict>
+ <key>TextPlacement</key>
+ <integer>0</integer>
+ </dict>
+ <dict>
+ <key>Bounds</key>
+ <string>{{208.09290313720703, 186.16651000976572}, {102.90709686279297, 70}}</string>
+ <key>Class</key>
+ <string>ShapedGraphic</string>
+ <key>FitText</key>
+ <string>Vertical</string>
+ <key>Flow</key>
+ <string>Resize</string>
+ <key>ID</key>
+ <integer>2057</integer>
+ <key>Shape</key>
+ <string>Rectangle</string>
+ <key>Style</key>
+ <dict>
+ <key>fill</key>
+ <dict>
+ <key>GradientCenter</key>
+ <string>{-0.29411799999999999, -0.264706}</string>
+ </dict>
+ </dict>
+ <key>Text</key>
+ <dict>
+ <key>Align</key>
+ <integer>0</integer>
+ <key>Text</key>
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720
+
+\f0\fs24 \cf0 execute()\
+create_table()\
+alter_column()\
+add_column()\
+drop_column()}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
+ </dict>
+ <key>TextPlacement</key>
+ <integer>0</integer>
+ </dict>
+ </array>
+ <key>GridH</key>
+ <array>
+ <integer>2056</integer>
+ <integer>2057</integer>
+ <array/>
+ </array>
+ <key>GroupConnect</key>
+ <string>YES</string>
+ <key>ID</key>
+ <integer>2055</integer>
+ </dict>
+ <dict>
+ <key>Class</key>
+ <string>TableGroup</string>
+ <key>Graphics</key>
+ <array>
+ <dict>
+ <key>Bounds</key>
+ <string>{{713.35941696166992, 429.89997863769531}, {119.0880126953125, 14}}</string>
+ <key>Class</key>
+ <string>ShapedGraphic</string>
+ <key>FitText</key>
+ <string>Vertical</string>
+ <key>Flow</key>
+ <string>Resize</string>
+ <key>ID</key>
+ <integer>2049</integer>
+ <key>Shape</key>
+ <string>Rectangle</string>
+ <key>Style</key>
+ <dict>
+ <key>fill</key>
+ <dict>
+ <key>GradientCenter</key>
+ <string>{-0.29411799999999999, -0.264706}</string>
+ </dict>
+ </dict>
+ <key>Text</key>
+ <dict>
+ <key>Text</key>
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\qc
+
+\f0\b\fs24 \cf0 alembic.config}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
+ </dict>
+ <key>TextPlacement</key>
+ <integer>0</integer>
+ </dict>
+ <dict>
+ <key>Bounds</key>
+ <string>{{713.35941696166992, 443.89997863769531}, {119.0880126953125, 42}}</string>
+ <key>Class</key>
+ <string>ShapedGraphic</string>
+ <key>FitText</key>
+ <string>Vertical</string>
+ <key>Flow</key>
+ <string>Resize</string>
+ <key>ID</key>
+ <integer>2050</integer>
+ <key>Shape</key>
+ <string>Rectangle</string>
+ <key>Style</key>
+ <dict>
+ <key>fill</key>
+ <dict>
+ <key>GradientCenter</key>
+ <string>{-0.29411799999999999, -0.264706}</string>
+ </dict>
+ </dict>
+ <key>Text</key>
+ <dict>
+ <key>Align</key>
+ <integer>0</integer>
+ <key>Text</key>
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720
+
+\f0\fs24 \cf0 Config\
+Command\
+main()}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
+ </dict>
+ <key>TextPlacement</key>
+ <integer>0</integer>
+ </dict>
+ </array>
+ <key>GridH</key>
+ <array>
+ <integer>2049</integer>
+ <integer>2050</integer>
+ <array/>
+ </array>
+ <key>GroupConnect</key>
+ <string>YES</string>
+ <key>ID</key>
+ <integer>2048</integer>
+ <key>Magnets</key>
+ <array>
+ <string>{0.5, -4.4408920985006262e-16}</string>
+ <string>{-0.5, -0.25000000000000178}</string>
+ <string>{-0.1138779104937786, -0.5}</string>
+ <string>{-0.49999999999999911, 0.33902539955400712}</string>
+ </array>
+ </dict>
+ <dict>
+ <key>Class</key>
+ <string>LineGraphic</string>
+ <key>Head</key>
+ <dict>
+ <key>ID</key>
+ <integer>2055</integer>
+ </dict>
+ <key>ID</key>
+ <integer>2040</integer>
+ <key>Points</key>
+ <array>
+ <string>{373, 215.59905413254651}</string>
+ <string>{311, 214.81620239134219}</string>
+ </array>
+ <key>Style</key>
+ <dict>
+ <key>stroke</key>
+ <dict>
+ <key>HeadArrow</key>
+ <string>StickArrow</string>
+ <key>Legacy</key>
+ <true/>
+ <key>Pattern</key>
+ <integer>1</integer>
+ <key>TailArrow</key>
+ <string>0</string>
+ </dict>
+ </dict>
+ <key>Tail</key>
+ <dict>
+ <key>ID</key>
+ <integer>41</integer>
+ </dict>
+ </dict>
+ <dict>
+ <key>Bounds</key>
+ <string>{{216.45355606079102, 299.9999877929688}, {86.1858, 84}}</string>
+ <key>Class</key>
+ <string>ShapedGraphic</string>
+ <key>ID</key>
+ <integer>2038</integer>
+ <key>Shape</key>
+ <string>Cylinder</string>
+ <key>Style</key>
+ <dict/>
+ <key>Text</key>
+ <dict>
+ <key>Text</key>
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\fs24 \cf0 database}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
+ </dict>
+ </dict>
+ <dict>
+ <key>Class</key>
+ <string>TableGroup</string>
+ <key>Graphics</key>
+ <array>
+ <dict>
+ <key>Bounds</key>
+ <string>{{373, 180.20000610351565}, {119.0880126953125, 14}}</string>
<key>Class</key>
<string>ShapedGraphic</string>
<key>FitText</key>
@@ -646,28 +2191,28 @@
<dict>
<key>fill</key>
<dict>
- <key>GradientAngle</key>
- <real>304</real>
<key>GradientCenter</key>
- <string>{-0.294118, -0.264706}</string>
+ <string>{-0.29411799999999999, -0.264706}</string>
</dict>
</dict>
<key>Text</key>
<dict>
<key>Text</key>
- <string>{\rtf1\ansi\ansicpg1252\cocoartf1038\cocoasubrtf360
-{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
{\colortbl;\red255\green255\blue255;}
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\qc
\f0\b\fs24 \cf0 MigrationContext}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
</dict>
<key>TextPlacement</key>
<integer>0</integer>
</dict>
<dict>
<key>Bounds</key>
- <string>{{166.088, 329.6}, {139, 28}}</string>
+ <string>{{373, 194.20000610351565}, {119.0880126953125, 56}}</string>
<key>Class</key>
<string>ShapedGraphic</string>
<key>FitText</key>
@@ -682,10 +2227,8 @@
<dict>
<key>fill</key>
<dict>
- <key>GradientAngle</key>
- <real>304</real>
<key>GradientCenter</key>
- <string>{-0.294118, -0.264706}</string>
+ <string>{-0.29411799999999999, -0.264706}</string>
</dict>
</dict>
<key>Text</key>
@@ -693,13 +2236,17 @@
<key>Align</key>
<integer>0</integer>
<key>Text</key>
- <string>{\rtf1\ansi\ansicpg1252\cocoartf1038\cocoasubrtf360
-{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
{\colortbl;\red255\green255\blue255;}
-\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\ql\qnatural\pardirnatural
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural
\f0\fs24 \cf0 connection\
-run_migrations()}</string>
+run_migrations()\
+execute()\
+stamp()}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
</dict>
<key>TextPlacement</key>
<integer>0</integer>
@@ -715,6 +2262,14 @@ run_migrations()}</string>
<string>YES</string>
<key>ID</key>
<integer>41</integer>
+ <key>Magnets</key>
+ <array>
+ <string>{0.5, -0.16088094860684521}</string>
+ <string>{0.0042301604752394972, -0.5514285714285716}</string>
+ <string>{-0.49936690654431892, 0.0057142857142853387}</string>
+ <string>{0.49343873986566722, 0.24857142857142822}</string>
+ <string>{0.029020499831381219, 0.46857134137834766}</string>
+ </array>
</dict>
<dict>
<key>Class</key>
@@ -723,7 +2278,7 @@ run_migrations()}</string>
<array>
<dict>
<key>Bounds</key>
- <string>{{368.99, 294.6}, {139, 14}}</string>
+ <string>{{572.95599365234375, 175.59130477905273}, {119.0880126953125, 14}}</string>
<key>Class</key>
<string>ShapedGraphic</string>
<key>FitText</key>
@@ -738,28 +2293,28 @@ run_migrations()}</string>
<dict>
<key>fill</key>
<dict>
- <key>GradientAngle</key>
- <real>304</real>
<key>GradientCenter</key>
- <string>{-0.294118, -0.264706}</string>
+ <string>{-0.29411799999999999, -0.264706}</string>
</dict>
</dict>
<key>Text</key>
<dict>
<key>Text</key>
- <string>{\rtf1\ansi\ansicpg1252\cocoartf1038\cocoasubrtf360
-{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
{\colortbl;\red255\green255\blue255;}
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\qc
\f0\b\fs24 \cf0 Operations}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
</dict>
<key>TextPlacement</key>
<integer>0</integer>
</dict>
<dict>
<key>Bounds</key>
- <string>{{368.99, 308.6}, {139, 70}}</string>
+ <string>{{572.95599365234375, 189.59130477905273}, {119.0880126953125, 70}}</string>
<key>Class</key>
<string>ShapedGraphic</string>
<key>FitText</key>
@@ -774,10 +2329,8 @@ run_migrations()}</string>
<dict>
<key>fill</key>
<dict>
- <key>GradientAngle</key>
- <real>304</real>
<key>GradientCenter</key>
- <string>{-0.294118, -0.264706}</string>
+ <string>{-0.29411799999999999, -0.264706}</string>
</dict>
</dict>
<key>Text</key>
@@ -785,16 +2338,18 @@ run_migrations()}</string>
<key>Align</key>
<integer>0</integer>
<key>Text</key>
- <string>{\rtf1\ansi\ansicpg1252\cocoartf1038\cocoasubrtf360
-{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
{\colortbl;\red255\green255\blue255;}
-\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\ql\qnatural
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720
\f0\fs24 \cf0 migration_context\
create_table()\
alter_column()\
add_column()\
drop_column()}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
</dict>
<key>TextPlacement</key>
<integer>0</integer>
@@ -810,6 +2365,10 @@ drop_column()}</string>
<string>YES</string>
<key>ID</key>
<integer>38</integer>
+ <key>Magnets</key>
+ <array>
+ <string>{-0.49999999999999911, -0.17370600927443736}</string>
+ </array>
</dict>
<dict>
<key>Class</key>
@@ -818,7 +2377,7 @@ drop_column()}</string>
<array>
<dict>
<key>Bounds</key>
- <string>{{165, 179.6}, {139, 14}}</string>
+ <string>{{367.95599365234375, 298.09998092651369}, {129.176025390625, 14.000003814697266}}</string>
<key>Class</key>
<string>ShapedGraphic</string>
<key>FitText</key>
@@ -833,28 +2392,28 @@ drop_column()}</string>
<dict>
<key>fill</key>
<dict>
- <key>GradientAngle</key>
- <real>304</real>
<key>GradientCenter</key>
- <string>{-0.294118, -0.264706}</string>
+ <string>{-0.29411799999999999, -0.264706}</string>
</dict>
</dict>
<key>Text</key>
<dict>
<key>Text</key>
- <string>{\rtf1\ansi\ansicpg1252\cocoartf1038\cocoasubrtf360
-{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
{\colortbl;\red255\green255\blue255;}
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\qc
\f0\b\fs24 \cf0 EnvironmentContext}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
</dict>
<key>TextPlacement</key>
<integer>0</integer>
</dict>
<dict>
<key>Bounds</key>
- <string>{{165, 193.6}, {139, 70}}</string>
+ <string>{{367.95599365234375, 312.09998855590823}, {129.176025390625, 70.000015258789062}}</string>
<key>Class</key>
<string>ShapedGraphic</string>
<key>FitText</key>
@@ -869,10 +2428,8 @@ drop_column()}</string>
<dict>
<key>fill</key>
<dict>
- <key>GradientAngle</key>
- <real>304</real>
<key>GradientCenter</key>
- <string>{-0.294118, -0.264706}</string>
+ <string>{-0.29411799999999999, -0.264706}</string>
</dict>
</dict>
<key>Text</key>
@@ -880,16 +2437,18 @@ drop_column()}</string>
<key>Align</key>
<integer>0</integer>
<key>Text</key>
- <string>{\rtf1\ansi\ansicpg1252\cocoartf1038\cocoasubrtf360
-{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
{\colortbl;\red255\green255\blue255;}
-\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\ql\qnatural
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720
\f0\fs24 \cf0 migration_context\
configure()\
run_migrations()\
begin_transaction()\
is_offline_mode()}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
</dict>
<key>TextPlacement</key>
<integer>0</integer>
@@ -905,10 +2464,16 @@ is_offline_mode()}</string>
<string>YES</string>
<key>ID</key>
<integer>33</integer>
+ <key>Magnets</key>
+ <array>
+ <string>{0.5, -0.14544617445169949}</string>
+ <string>{0.019251798561151112, 0.50476190476190474}</string>
+ <string>{0.019070177820008194, -0.49999999999999956}</string>
+ </array>
</dict>
<dict>
<key>Bounds</key>
- <string>{{153.176, 149.6}, {164.824, 255}}</string>
+ <string>{{350, 148.9999938964844}, {164.82400000000001, 255.60000610351562}}</string>
<key>Class</key>
<string>ShapedGraphic</string>
<key>ID</key>
@@ -951,12 +2516,14 @@ is_offline_mode()}</string>
<key>Align</key>
<integer>0</integer>
<key>Text</key>
- <string>{\rtf1\ansi\ansicpg1252\cocoartf1038\cocoasubrtf360
-{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
{\colortbl;\red255\green255\blue255;}
-\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\ql\qnatural\pardirnatural
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural
\f0\fs24 \cf0 env.py script}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
</dict>
<key>TextPlacement</key>
<integer>0</integer>
@@ -965,11 +2532,16 @@ is_offline_mode()}</string>
</dict>
<dict>
<key>Bounds</key>
- <string>{{343.99, 259.266}, {189, 145.334}}</string>
+ <string>{{552, 149}, {169, 130.33299255371094}}</string>
<key>Class</key>
<string>ShapedGraphic</string>
<key>ID</key>
<integer>2032</integer>
+ <key>Magnets</key>
+ <array>
+ <string>{-0.43313956596913394, 0.50000000000000044}</string>
+ <string>{0.014211640211639676, 0.49587157857074082}</string>
+ </array>
<key>Shape</key>
<string>Rectangle</string>
<key>Style</key>
@@ -1008,12 +2580,14 @@ is_offline_mode()}</string>
<key>Align</key>
<integer>0</integer>
<key>Text</key>
- <string>{\rtf1\ansi\ansicpg1252\cocoartf1038\cocoasubrtf360
-{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+ <string>{\rtf1\ansi\ansicpg1252\cocoartf1347\cocoasubrtf570
+\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
{\colortbl;\red255\green255\blue255;}
-\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\ql\qnatural\pardirnatural
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural
\f0\fs24 \cf0 migration script}</string>
+ <key>VerticalPad</key>
+ <integer>0</integer>
</dict>
<key>TextPlacement</key>
<integer>0</integer>
@@ -1021,61 +2595,51 @@ is_offline_mode()}</string>
<string>NO</string>
</dict>
<dict>
- <key>Bounds</key>
- <string>{{138.176, 127.6}, {420.824, 293.4}}</string>
<key>Class</key>
- <string>ShapedGraphic</string>
+ <string>LineGraphic</string>
+ <key>Head</key>
+ <dict>
+ <key>ID</key>
+ <integer>2048</integer>
+ </dict>
<key>ID</key>
- <integer>2037</integer>
- <key>Shape</key>
- <string>Rectangle</string>
+ <integer>2139</integer>
+ <key>OrthogonalBarAutomatic</key>
+ <true/>
+ <key>OrthogonalBarPoint</key>
+ <string>{0, 0}</string>
+ <key>OrthogonalBarPosition</key>
+ <real>-1</real>
+ <key>Points</key>
+ <array>
+ <string>{435.00741612193735, 382.10000381469729}</string>
+ <string>{548, 421.9000186920166}</string>
+ <string>{601.38076234099412, 436}</string>
+ <string>{713.35941696166992, 443.8999786376952}</string>
+ </array>
<key>Style</key>
<dict>
- <key>fill</key>
- <dict>
- <key>Draws</key>
- <string>NO</string>
- </dict>
- <key>shadow</key>
- <dict>
- <key>Draws</key>
- <string>NO</string>
- <key>Fuzziness</key>
- <real>0.0</real>
- </dict>
<key>stroke</key>
<dict>
- <key>Color</key>
- <dict>
- <key>b</key>
- <string>0.191506</string>
- <key>g</key>
- <string>0.389204</string>
- <key>r</key>
- <string>0.744565</string>
- </dict>
- <key>CornerRadius</key>
- <real>5</real>
+ <key>HeadArrow</key>
+ <string>StickArrow</string>
+ <key>Legacy</key>
+ <true/>
+ <key>LineType</key>
+ <integer>2</integer>
<key>Pattern</key>
<integer>1</integer>
+ <key>TailArrow</key>
+ <string>0</string>
</dict>
</dict>
- <key>Text</key>
+ <key>Tail</key>
<dict>
- <key>Align</key>
- <integer>0</integer>
- <key>Text</key>
- <string>{\rtf1\ansi\ansicpg1252\cocoartf1038\cocoasubrtf360
-{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
-{\colortbl;\red255\green255\blue255;}
-\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\ql\qnatural\pardirnatural
-
-\f0\fs24 \cf0 alembic command}</string>
+ <key>ID</key>
+ <integer>33</integer>
+ <key>Info</key>
+ <integer>2</integer>
</dict>
- <key>TextPlacement</key>
- <integer>0</integer>
- <key>Wrap</key>
- <string>NO</string>
</dict>
</array>
<key>GridInfo</key>
@@ -1085,11 +2649,9 @@ is_offline_mode()}</string>
<key>GuidesVisible</key>
<string>YES</string>
<key>HPages</key>
- <integer>1</integer>
+ <integer>2</integer>
<key>ImageCounter</key>
<integer>1</integer>
- <key>IsPalette</key>
- <string>NO</string>
<key>KeepToScale</key>
<false/>
<key>Layers</key>
@@ -1106,78 +2668,28 @@ is_offline_mode()}</string>
</dict>
</array>
<key>LayoutInfo</key>
- <dict/>
+ <dict>
+ <key>Animate</key>
+ <string>NO</string>
+ <key>circoMinDist</key>
+ <real>18</real>
+ <key>circoSeparation</key>
+ <real>0.0</real>
+ <key>layoutEngine</key>
+ <string>dot</string>
+ <key>neatoSeparation</key>
+ <real>0.0</real>
+ <key>twopiSeparation</key>
+ <real>0.0</real>
+ </dict>
<key>LinksVisible</key>
<string>NO</string>
<key>MagnetsVisible</key>
<string>NO</string>
- <key>MasterSheet</key>
- <string>Master 1</string>
<key>MasterSheets</key>
- <array>
- <dict>
- <key>ActiveLayerIndex</key>
- <integer>0</integer>
- <key>AutoAdjust</key>
- <true/>
- <key>CanvasColor</key>
- <dict>
- <key>w</key>
- <string>1</string>
- </dict>
- <key>CanvasOrigin</key>
- <string>{0, 0}</string>
- <key>CanvasScale</key>
- <real>1</real>
- <key>ColumnAlign</key>
- <integer>1</integer>
- <key>ColumnSpacing</key>
- <real>36</real>
- <key>DisplayScale</key>
- <string>1 in = 1 in</string>
- <key>GraphicsList</key>
- <array/>
- <key>GridInfo</key>
- <dict/>
- <key>HPages</key>
- <integer>1</integer>
- <key>IsPalette</key>
- <string>NO</string>
- <key>KeepToScale</key>
- <false/>
- <key>Layers</key>
- <array>
- <dict>
- <key>Lock</key>
- <string>NO</string>
- <key>Name</key>
- <string>Layer 1</string>
- <key>Print</key>
- <string>YES</string>
- <key>View</key>
- <string>YES</string>
- </dict>
- </array>
- <key>LayoutInfo</key>
- <dict/>
- <key>Orientation</key>
- <integer>2</integer>
- <key>OutlineStyle</key>
- <string>Basic</string>
- <key>RowAlign</key>
- <integer>1</integer>
- <key>RowSpacing</key>
- <real>36</real>
- <key>SheetTitle</key>
- <string>Master 1</string>
- <key>UniqueID</key>
- <integer>1</integer>
- <key>VPages</key>
- <integer>1</integer>
- </dict>
- </array>
+ <array/>
<key>ModificationDate</key>
- <string>2012-01-24 17:59:01 -0500</string>
+ <string>2015-07-02 23:12:07 +0000</string>
<key>Modifier</key>
<string>classic</string>
<key>NotesVisible</key>
@@ -1189,35 +2701,47 @@ is_offline_mode()}</string>
<key>OutlineStyle</key>
<string>Basic</string>
<key>PageBreaks</key>
- <string>YES</string>
+ <string>NO</string>
<key>PrintInfo</key>
<dict>
<key>NSBottomMargin</key>
<array>
+ <string>float</string>
+ <string>12</string>
+ </array>
+ <key>NSHorizonalPagination</key>
+ <array>
<string>coded</string>
- <string>BAtzdHJlYW10eXBlZIHoA4QBQISEhAhOU051bWJlcgCEhAdOU1ZhbHVlAISECE5TT2JqZWN0AIWEASqEhAFklwCG</string>
+ <string>BAtzdHJlYW10eXBlZIHoA4QBQISEhAhOU051bWJlcgCEhAdOU1ZhbHVlAISECE5TT2JqZWN0AIWEASqEhAFxlwCG</string>
</array>
<key>NSLeftMargin</key>
<array>
- <string>coded</string>
- <string>BAtzdHJlYW10eXBlZIHoA4QBQISEhAhOU051bWJlcgCEhAdOU1ZhbHVlAISECE5TT2JqZWN0AIWEASqEhAFklwCG</string>
+ <string>float</string>
+ <string>12</string>
</array>
<key>NSPaperSize</key>
<array>
<string>size</string>
<string>{612, 792}</string>
</array>
+ <key>NSPrintReverseOrientation</key>
+ <array>
+ <string>int</string>
+ <string>0</string>
+ </array>
<key>NSRightMargin</key>
<array>
- <string>coded</string>
- <string>BAtzdHJlYW10eXBlZIHoA4QBQISEhAhOU051bWJlcgCEhAdOU1ZhbHVlAISECE5TT2JqZWN0AIWEASqEhAFklwCG</string>
+ <string>float</string>
+ <string>12</string>
</array>
<key>NSTopMargin</key>
<array>
- <string>coded</string>
- <string>BAtzdHJlYW10eXBlZIHoA4QBQISEhAhOU051bWJlcgCEhAdOU1ZhbHVlAISECE5TT2JqZWN0AIWEASqEhAFklwCG</string>
+ <string>float</string>
+ <string>12</string>
</array>
</dict>
+ <key>PrintOnePage</key>
+ <false/>
<key>ReadOnly</key>
<string>NO</string>
<key>RowAlign</key>
@@ -1239,25 +2763,33 @@ is_offline_mode()}</string>
<key>WindowInfo</key>
<dict>
<key>CurrentSheet</key>
- <string>0</string>
- <key>DrawerOpen</key>
+ <integer>0</integer>
+ <key>ExpandedCanvases</key>
+ <array/>
+ <key>Frame</key>
+ <string>{{130, 128}, {1193, 852}}</string>
+ <key>ListView</key>
<false/>
- <key>DrawerTab</key>
- <string>Outline</string>
- <key>DrawerWidth</key>
- <real>209</real>
- <key>FitInWindow</key>
+ <key>OutlineWidth</key>
+ <integer>142</integer>
+ <key>RightSidebar</key>
<false/>
- <key>Frame</key>
- <string>{{335, 211}, {760, 817}}</string>
- <key>ShowRuler</key>
+ <key>Sidebar</key>
<false/>
- <key>ShowStatusBar</key>
- <true/>
+ <key>SidebarWidth</key>
+ <integer>138</integer>
<key>VisibleRegion</key>
- <string>{{-84, 0}, {745, 703}}</string>
+ <string>{{-8, 1}, {1193, 755}}</string>
<key>Zoom</key>
- <string>1</string>
+ <real>1</real>
+ <key>ZoomValues</key>
+ <array>
+ <array>
+ <string>Canvas 1</string>
+ <real>1</real>
+ <real>1</real>
+ </array>
+ </array>
</dict>
</dict>
</plist>
diff --git a/docs/build/changelog.rst b/docs/build/changelog.rst
index 193c87f..8fd6293 100644
--- a/docs/build/changelog.rst
+++ b/docs/build/changelog.rst
@@ -4,6 +4,44 @@ Changelog
==========
.. changelog::
+ :version: 0.8.0
+
+ .. change::
+ :tags: feature, operations
+ :tickets: 302
+
+ The internal system for Alembic operations has been reworked to now
+ build upon an extensible system of operation objects. New operations
+ can be added to the ``op.`` namespace, including that they are
+ available in custom autogenerate schemes.
+
+ .. seealso::
+
+ :ref:`operation_plugins`
+
+ .. change::
+ :tags: feature, autogenerate
+ :tickets: 301
+
+ The internal system for autogenerate been reworked to build upon
+ the extensible system of operation objects present in
+ :ticket:`302`. As part of this change, autogenerate now produces
+ a full object graph representing a list of migration scripts to
+ be written as well as operation objects that will render all the
+ Python code within them; a new hook
+ :paramref:`.EnvironmentContext.configure.process_revision_directives`
+ allows end-user code to fully customize what autogenerate will do,
+ including not just full manipulation of the Python steps to take
+ but also what file or files will be written and where. It is also
+ possible to write a system that reads an autogenerate stream and
+ invokes it directly against a database without writing any files.
+
+ .. seealso::
+
+ :ref:`alembic.autogenerate.toplevel`
+
+
+.. changelog::
:version: 0.7.7
.. change::
diff --git a/docs/build/cookbook.rst b/docs/build/cookbook.rst
index 8c1e0d7..541f595 100644
--- a/docs/build/cookbook.rst
+++ b/docs/build/cookbook.rst
@@ -193,7 +193,7 @@ Sharing a Connection with a Series of Migration Commands and Environments
=========================================================================
It is often the case that an application will need to call upon a series
-of commands within :mod:`alembic.command`, where it would be advantageous
+of commands within :ref:`alembic.command.toplevel`, where it would be advantageous
for all operations to proceed along a single transaction. The connectivity
for a migration is typically solely determined within the ``env.py`` script
of a migration environment, which is called within the scope of a command.
diff --git a/docs/build/front.rst b/docs/build/front.rst
index 3270f5c..6e28419 100644
--- a/docs/build/front.rst
+++ b/docs/build/front.rst
@@ -49,25 +49,19 @@ then proceed through the usage of this command.
Dependencies
------------
-Alembic's install process will ensure that `SQLAlchemy <http://www.sqlalchemy.org>`_
+Alembic's install process will ensure that SQLAlchemy_
is installed, in addition to other dependencies. Alembic will work with
-SQLAlchemy as of version **0.7.3**. The latest version of SQLAlchemy within
-the **0.7**, **0.8**, or more recent series is strongly recommended.
+SQLAlchemy as of version **0.7.3**, however more features are available with
+newer versions such as the 0.9 or 1.0 series.
Alembic supports Python versions 2.6 and above.
-.. versionchanged:: 0.5.0
- Support for SQLAlchemy 0.6 has been dropped.
-
-.. versionchanged:: 0.6.0
- Now supporting Python 2.6 and above.
-
Community
=========
Alembic is developed by `Mike Bayer <http://techspot.zzzeek.org>`_, and is
-loosely associated with the `SQLAlchemy <http://www.sqlalchemy.org/>`_ and `Pylons <http://www.pylonsproject.org>`_
-projects.
+loosely associated with the SQLAlchemy_, `Pylons <http://www.pylonsproject.org>`_,
+and `Openstack <http://www.openstack.org>`_ projects.
User issues, discussion of potential bugs and features should be posted
to the Alembic Google Group at `sqlalchemy-alembic <https://groups.google.com/group/sqlalchemy-alembic>`_.
@@ -78,3 +72,6 @@ Bugs
====
Bugs and feature enhancements to Alembic should be reported on the `Bitbucket
issue tracker <https://bitbucket.org/zzzeek/alembic/issues?status=new&status=open>`_.
+
+
+.. _SQLAlchemy: http://www.sqlalchemy.org \ No newline at end of file
diff --git a/docs/build/index.rst b/docs/build/index.rst
index de18f9e..17ffc06 100644
--- a/docs/build/index.rst
+++ b/docs/build/index.rst
@@ -6,7 +6,7 @@ Welcome to Alembic's documentation!
with the `SQLAlchemy <http://www.sqlalchemy.org>`_ Database Toolkit for Python.
.. toctree::
- :maxdepth: 2
+ :maxdepth: 3
front
tutorial
@@ -17,7 +17,7 @@ with the `SQLAlchemy <http://www.sqlalchemy.org>`_ Database Toolkit for Python.
branches
ops
cookbook
- api
+ api/index
changelog
Indices and tables
diff --git a/docs/build/ops.rst b/docs/build/ops.rst
index 1df9d27..49aaef5 100644
--- a/docs/build/ops.rst
+++ b/docs/build/ops.rst
@@ -7,8 +7,8 @@ Operation Reference
This file provides documentation on Alembic migration directives.
The directives here are used within user-defined migration files,
-within the ``upgrade()`` and ``downgrade()`` functions, as well as
-any functions further invoked by those.
+within the ``upgrade()`` and ``downgrade()`` functions, as well as
+any functions further invoked by those.
All directives exist as methods on a class called :class:`.Operations`.
When migration scripts are run, this object is made available
@@ -18,12 +18,15 @@ Currently, ``alembic.op`` is a real Python module, populated
with individual proxies for each method on :class:`.Operations`,
so symbols can be imported safely from the ``alembic.op`` namespace.
-A key design philosophy to the :mod:`alembic.operations` methods is that
-to the greatest degree possible, they internally generate the
+The :class:`.Operations` system is also fully extensible. See
+:ref:`operation_plugins` for details on this.
+
+A key design philosophy to the :ref:`alembic.operations.toplevel` methods is that
+to the greatest degree possible, they internally generate the
appropriate SQLAlchemy metadata, typically involving
:class:`~sqlalchemy.schema.Table` and :class:`~sqlalchemy.schema.Constraint`
-objects. This so that migration instructions can be
-given in terms of just the string names and/or flags involved.
+objects. This so that migration instructions can be
+given in terms of just the string names and/or flags involved.
The exceptions to this
rule include the :meth:`~.Operations.add_column` and :meth:`~.Operations.create_table`
directives, which require full :class:`~sqlalchemy.schema.Column`
@@ -36,6 +39,5 @@ circumstances they are called from an actual migration script, which
itself would be invoked by the :meth:`.EnvironmentContext.run_migrations`
method.
-
.. automodule:: alembic.operations
- :members:
+ :members: Operations, BatchOperations
diff --git a/tests/_autogen_fixtures.py b/tests/_autogen_fixtures.py
new file mode 100644
index 0000000..7ef6cbf
--- /dev/null
+++ b/tests/_autogen_fixtures.py
@@ -0,0 +1,251 @@
+from sqlalchemy import MetaData, Column, Table, Integer, String, Text, \
+ Numeric, CHAR, ForeignKey, Index, UniqueConstraint, CheckConstraint, text
+from sqlalchemy.engine.reflection import Inspector
+
+from alembic import autogenerate
+from alembic.migration import MigrationContext
+from alembic.testing import config
+from alembic.testing.env import staging_env, clear_staging_env
+from alembic.testing import eq_
+from alembic.ddl.base import _fk_spec
+
+names_in_this_test = set()
+
+from sqlalchemy import event
+
+
+@event.listens_for(Table, "after_parent_attach")
+def new_table(table, parent):
+ names_in_this_test.add(table.name)
+
+
+def _default_include_object(obj, name, type_, reflected, compare_to):
+ if type_ == "table":
+ return name in names_in_this_test
+ else:
+ return True
+
+_default_object_filters = [
+ _default_include_object
+]
+
+
+class ModelOne(object):
+ __requires__ = ('unique_constraint_reflection', )
+
+ schema = None
+
+ @classmethod
+ def _get_db_schema(cls):
+ schema = cls.schema
+
+ m = MetaData(schema=schema)
+
+ Table('user', m,
+ Column('id', Integer, primary_key=True),
+ Column('name', String(50)),
+ Column('a1', Text),
+ Column("pw", String(50)),
+ Index('pw_idx', 'pw')
+ )
+
+ Table('address', m,
+ Column('id', Integer, primary_key=True),
+ Column('email_address', String(100), nullable=False),
+ )
+
+ Table('order', m,
+ Column('order_id', Integer, primary_key=True),
+ Column("amount", Numeric(8, 2), nullable=False,
+ server_default=text("0")),
+ CheckConstraint('amount >= 0', name='ck_order_amount')
+ )
+
+ Table('extra', m,
+ Column("x", CHAR),
+ Column('uid', Integer, ForeignKey('user.id'))
+ )
+
+ return m
+
+ @classmethod
+ def _get_model_schema(cls):
+ schema = cls.schema
+
+ m = MetaData(schema=schema)
+
+ Table('user', m,
+ Column('id', Integer, primary_key=True),
+ Column('name', String(50), nullable=False),
+ Column('a1', Text, server_default="x")
+ )
+
+ Table('address', m,
+ Column('id', Integer, primary_key=True),
+ Column('email_address', String(100), nullable=False),
+ Column('street', String(50)),
+ UniqueConstraint('email_address', name="uq_email")
+ )
+
+ Table('order', m,
+ Column('order_id', Integer, primary_key=True),
+ Column('amount', Numeric(10, 2), nullable=True,
+ server_default=text("0")),
+ Column('user_id', Integer, ForeignKey('user.id')),
+ CheckConstraint('amount > -1', name='ck_order_amount'),
+ )
+
+ Table('item', m,
+ Column('id', Integer, primary_key=True),
+ Column('description', String(100)),
+ Column('order_id', Integer, ForeignKey('order.order_id')),
+ CheckConstraint('len(description) > 5')
+ )
+ return m
+
+
+class _ComparesFKs(object):
+ def _assert_fk_diff(
+ self, diff, type_, source_table, source_columns,
+ target_table, target_columns, name=None, conditional_name=None,
+ source_schema=None):
+ # the public API for ForeignKeyConstraint was not very rich
+ # in 0.7, 0.8, so here we use the well-known but slightly
+ # private API to get at its elements
+ (fk_source_schema, fk_source_table,
+ fk_source_columns, fk_target_schema, fk_target_table,
+ fk_target_columns) = _fk_spec(diff[1])
+
+ eq_(diff[0], type_)
+ eq_(fk_source_table, source_table)
+ eq_(fk_source_columns, source_columns)
+ eq_(fk_target_table, target_table)
+ eq_(fk_source_schema, source_schema)
+
+ eq_([elem.column.name for elem in diff[1].elements],
+ target_columns)
+ if conditional_name is not None:
+ if config.requirements.no_fk_names.enabled:
+ eq_(diff[1].name, None)
+ elif conditional_name == 'servergenerated':
+ fks = Inspector.from_engine(self.bind).\
+ get_foreign_keys(source_table)
+ server_fk_name = fks[0]['name']
+ eq_(diff[1].name, server_fk_name)
+ else:
+ eq_(diff[1].name, conditional_name)
+ else:
+ eq_(diff[1].name, name)
+
+
+class AutogenTest(_ComparesFKs):
+
+ def _flatten_diffs(self, diffs):
+ for d in diffs:
+ if isinstance(d, list):
+ for fd in self._flatten_diffs(d):
+ yield fd
+ else:
+ yield d
+
+ @classmethod
+ def _get_bind(cls):
+ return config.db
+
+ configure_opts = {}
+
+ @classmethod
+ def setup_class(cls):
+ staging_env()
+ cls.bind = cls._get_bind()
+ cls.m1 = cls._get_db_schema()
+ cls.m1.create_all(cls.bind)
+ cls.m2 = cls._get_model_schema()
+
+ @classmethod
+ def teardown_class(cls):
+ cls.m1.drop_all(cls.bind)
+ clear_staging_env()
+
+ def setUp(self):
+ self.conn = conn = self.bind.connect()
+ ctx_opts = {
+ 'compare_type': True,
+ 'compare_server_default': True,
+ 'target_metadata': self.m2,
+ 'upgrade_token': "upgrades",
+ 'downgrade_token': "downgrades",
+ 'alembic_module_prefix': 'op.',
+ 'sqlalchemy_module_prefix': 'sa.',
+ }
+ if self.configure_opts:
+ ctx_opts.update(self.configure_opts)
+ self.context = context = MigrationContext.configure(
+ connection=conn,
+ opts=ctx_opts
+ )
+
+ connection = context.bind
+ self.autogen_context = {
+ 'imports': set(),
+ 'connection': connection,
+ 'dialect': connection.dialect,
+ 'context': context
+ }
+
+ def tearDown(self):
+ self.conn.close()
+
+
+class AutogenFixtureTest(_ComparesFKs):
+
+ def _fixture(
+ self, m1, m2, include_schemas=False,
+ opts=None, object_filters=_default_object_filters):
+ self.metadata, model_metadata = m1, m2
+ self.metadata.create_all(self.bind)
+
+ with self.bind.connect() as conn:
+ ctx_opts = {
+ 'compare_type': True,
+ 'compare_server_default': True,
+ 'target_metadata': model_metadata,
+ 'upgrade_token': "upgrades",
+ 'downgrade_token': "downgrades",
+ 'alembic_module_prefix': 'op.',
+ 'sqlalchemy_module_prefix': 'sa.',
+ }
+ if opts:
+ ctx_opts.update(opts)
+ self.context = context = MigrationContext.configure(
+ connection=conn,
+ opts=ctx_opts
+ )
+
+ connection = context.bind
+ autogen_context = {
+ 'imports': set(),
+ 'connection': connection,
+ 'dialect': connection.dialect,
+ 'context': context,
+ 'metadata': model_metadata,
+ 'object_filters': object_filters,
+ 'include_schemas': include_schemas
+ }
+ diffs = []
+ autogenerate._produce_net_changes(
+ autogen_context, diffs
+ )
+ return diffs
+
+ reports_unnamed_constraints = False
+
+ def setUp(self):
+ staging_env()
+ self.bind = config.db
+
+ def tearDown(self):
+ if hasattr(self, 'metadata'):
+ self.metadata.drop_all(self.bind)
+ clear_staging_env()
+
diff --git a/tests/test_autogen_composition.py b/tests/test_autogen_composition.py
new file mode 100644
index 0000000..b1717ab
--- /dev/null
+++ b/tests/test_autogen_composition.py
@@ -0,0 +1,328 @@
+import re
+
+from alembic import autogenerate
+from alembic.migration import MigrationContext
+from alembic.testing import TestBase
+from alembic.testing import eq_
+
+from ._autogen_fixtures import AutogenTest, ModelOne, _default_include_object
+
+
+class AutogenerateDiffTest(ModelOne, AutogenTest, TestBase):
+ __only_on__ = 'sqlite'
+
+ def test_render_nothing(self):
+ context = MigrationContext.configure(
+ connection=self.bind.connect(),
+ opts={
+ 'compare_type': True,
+ 'compare_server_default': True,
+ 'target_metadata': self.m1,
+ 'upgrade_token': "upgrades",
+ 'downgrade_token': "downgrades",
+ }
+ )
+ template_args = {}
+ autogenerate._render_migration_diffs(context, template_args, set())
+
+ eq_(re.sub(r"u'", "'", template_args['upgrades']),
+ """### commands auto generated by Alembic - please adjust! ###
+ pass
+ ### end Alembic commands ###""")
+ eq_(re.sub(r"u'", "'", template_args['downgrades']),
+ """### commands auto generated by Alembic - please adjust! ###
+ pass
+ ### end Alembic commands ###""")
+
+ def test_render_nothing_batch(self):
+ context = MigrationContext.configure(
+ connection=self.bind.connect(),
+ opts={
+ 'compare_type': True,
+ 'compare_server_default': True,
+ 'target_metadata': self.m1,
+ 'upgrade_token': "upgrades",
+ 'downgrade_token': "downgrades",
+ 'alembic_module_prefix': 'op.',
+ 'sqlalchemy_module_prefix': 'sa.',
+ 'render_as_batch': True,
+ 'include_symbol': lambda name, schema: False
+ }
+ )
+ template_args = {}
+ autogenerate._render_migration_diffs(
+ context, template_args, set(),
+
+ )
+ eq_(re.sub(r"u'", "'", template_args['upgrades']),
+ """### commands auto generated by Alembic - please adjust! ###
+ pass
+ ### end Alembic commands ###""")
+ eq_(re.sub(r"u'", "'", template_args['downgrades']),
+ """### commands auto generated by Alembic - please adjust! ###
+ pass
+ ### end Alembic commands ###""")
+
+ def test_render_diffs_standard(self):
+ """test a full render including indentation"""
+
+ template_args = {}
+ autogenerate._render_migration_diffs(
+ self.context, template_args, set())
+ eq_(re.sub(r"u'", "'", template_args['upgrades']),
+ """### commands auto generated by Alembic - please adjust! ###
+ op.create_table('item',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('description', sa.String(length=100), nullable=True),
+ sa.Column('order_id', sa.Integer(), nullable=True),
+ sa.CheckConstraint('len(description) > 5'),
+ sa.ForeignKeyConstraint(['order_id'], ['order.order_id'], ),
+ sa.PrimaryKeyConstraint('id')
+ )
+ op.drop_table('extra')
+ op.add_column('address', sa.Column('street', sa.String(length=50), \
+nullable=True))
+ op.create_unique_constraint('uq_email', 'address', ['email_address'])
+ op.add_column('order', sa.Column('user_id', sa.Integer(), nullable=True))
+ op.alter_column('order', 'amount',
+ existing_type=sa.NUMERIC(precision=8, scale=2),
+ type_=sa.Numeric(precision=10, scale=2),
+ nullable=True,
+ existing_server_default=sa.text('0'))
+ op.create_foreign_key(None, 'order', 'user', ['user_id'], ['id'])
+ op.alter_column('user', 'a1',
+ existing_type=sa.TEXT(),
+ server_default='x',
+ existing_nullable=True)
+ op.alter_column('user', 'name',
+ existing_type=sa.VARCHAR(length=50),
+ nullable=False)
+ op.drop_index('pw_idx', table_name='user')
+ op.drop_column('user', 'pw')
+ ### end Alembic commands ###""")
+
+ eq_(re.sub(r"u'", "'", template_args['downgrades']),
+ """### commands auto generated by Alembic - please adjust! ###
+ op.add_column('user', sa.Column('pw', sa.VARCHAR(length=50), \
+nullable=True))
+ op.create_index('pw_idx', 'user', ['pw'], unique=False)
+ op.alter_column('user', 'name',
+ existing_type=sa.VARCHAR(length=50),
+ nullable=True)
+ op.alter_column('user', 'a1',
+ existing_type=sa.TEXT(),
+ server_default=None,
+ existing_nullable=True)
+ op.drop_constraint(None, 'order', type_='foreignkey')
+ op.alter_column('order', 'amount',
+ existing_type=sa.Numeric(precision=10, scale=2),
+ type_=sa.NUMERIC(precision=8, scale=2),
+ nullable=False,
+ existing_server_default=sa.text('0'))
+ op.drop_column('order', 'user_id')
+ op.drop_constraint('uq_email', 'address', type_='unique')
+ op.drop_column('address', 'street')
+ op.create_table('extra',
+ sa.Column('x', sa.CHAR(), nullable=True),
+ sa.Column('uid', sa.INTEGER(), nullable=True),
+ sa.ForeignKeyConstraint(['uid'], ['user.id'], )
+ )
+ op.drop_table('item')
+ ### end Alembic commands ###""")
+
+ def test_render_diffs_batch(self):
+ """test a full render in batch mode including indentation"""
+
+ template_args = {}
+ self.context.opts['render_as_batch'] = True
+ autogenerate._render_migration_diffs(
+ self.context, template_args, set())
+
+ eq_(re.sub(r"u'", "'", template_args['upgrades']),
+ """### commands auto generated by Alembic - please adjust! ###
+ op.create_table('item',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('description', sa.String(length=100), nullable=True),
+ sa.Column('order_id', sa.Integer(), nullable=True),
+ sa.CheckConstraint('len(description) > 5'),
+ sa.ForeignKeyConstraint(['order_id'], ['order.order_id'], ),
+ sa.PrimaryKeyConstraint('id')
+ )
+ op.drop_table('extra')
+ with op.batch_alter_table('address', schema=None) as batch_op:
+ batch_op.add_column(sa.Column('street', sa.String(length=50), nullable=True))
+ batch_op.create_unique_constraint('uq_email', ['email_address'])
+
+ with op.batch_alter_table('order', schema=None) as batch_op:
+ batch_op.add_column(sa.Column('user_id', sa.Integer(), nullable=True))
+ batch_op.alter_column('amount',
+ existing_type=sa.NUMERIC(precision=8, scale=2),
+ type_=sa.Numeric(precision=10, scale=2),
+ nullable=True,
+ existing_server_default=sa.text('0'))
+ batch_op.create_foreign_key(None, 'order', 'user', ['user_id'], ['id'])
+
+ with op.batch_alter_table('user', schema=None) as batch_op:
+ batch_op.alter_column('a1',
+ existing_type=sa.TEXT(),
+ server_default='x',
+ existing_nullable=True)
+ batch_op.alter_column('name',
+ existing_type=sa.VARCHAR(length=50),
+ nullable=False)
+ batch_op.drop_index('pw_idx')
+ batch_op.drop_column('pw')
+
+ ### end Alembic commands ###""")
+
+ eq_(re.sub(r"u'", "'", template_args['downgrades']),
+ """### commands auto generated by Alembic - please adjust! ###
+ with op.batch_alter_table('user', schema=None) as batch_op:
+ batch_op.add_column(sa.Column('pw', sa.VARCHAR(length=50), nullable=True))
+ batch_op.create_index('pw_idx', ['pw'], unique=False)
+ batch_op.alter_column('name',
+ existing_type=sa.VARCHAR(length=50),
+ nullable=True)
+ batch_op.alter_column('a1',
+ existing_type=sa.TEXT(),
+ server_default=None,
+ existing_nullable=True)
+
+ with op.batch_alter_table('order', schema=None) as batch_op:
+ batch_op.drop_constraint(None, type_='foreignkey')
+ batch_op.alter_column('amount',
+ existing_type=sa.Numeric(precision=10, scale=2),
+ type_=sa.NUMERIC(precision=8, scale=2),
+ nullable=False,
+ existing_server_default=sa.text('0'))
+ batch_op.drop_column('user_id')
+
+ with op.batch_alter_table('address', schema=None) as batch_op:
+ batch_op.drop_constraint('uq_email', type_='unique')
+ batch_op.drop_column('street')
+
+ op.create_table('extra',
+ sa.Column('x', sa.CHAR(), nullable=True),
+ sa.Column('uid', sa.INTEGER(), nullable=True),
+ sa.ForeignKeyConstraint(['uid'], ['user.id'], )
+ )
+ op.drop_table('item')
+ ### end Alembic commands ###""")
+
+
+class AutogenerateDiffTestWSchema(ModelOne, AutogenTest, TestBase):
+ __only_on__ = 'postgresql'
+ schema = "test_schema"
+
+ def test_render_nothing(self):
+ context = MigrationContext.configure(
+ connection=self.bind.connect(),
+ opts={
+ 'compare_type': True,
+ 'compare_server_default': True,
+ 'target_metadata': self.m1,
+ 'upgrade_token': "upgrades",
+ 'downgrade_token': "downgrades",
+ 'alembic_module_prefix': 'op.',
+ 'sqlalchemy_module_prefix': 'sa.',
+ 'include_symbol': lambda name, schema: False
+ }
+ )
+ template_args = {}
+ autogenerate._render_migration_diffs(
+ context, template_args, set(),
+
+ )
+ eq_(re.sub(r"u'", "'", template_args['upgrades']),
+ """### commands auto generated by Alembic - please adjust! ###
+ pass
+ ### end Alembic commands ###""")
+ eq_(re.sub(r"u'", "'", template_args['downgrades']),
+ """### commands auto generated by Alembic - please adjust! ###
+ pass
+ ### end Alembic commands ###""")
+
+ def test_render_diffs_extras(self):
+ """test a full render including indentation (include and schema)"""
+
+ template_args = {}
+ self.context.opts.update({
+ 'include_object': _default_include_object,
+ 'include_schemas': True
+ })
+ autogenerate._render_migration_diffs(
+ self.context, template_args, set()
+ )
+
+ eq_(re.sub(r"u'", "'", template_args['upgrades']),
+ """### commands auto generated by Alembic - please adjust! ###
+ op.create_table('item',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('description', sa.String(length=100), nullable=True),
+ sa.Column('order_id', sa.Integer(), nullable=True),
+ sa.CheckConstraint('len(description) > 5'),
+ sa.ForeignKeyConstraint(['order_id'], ['%(schema)s.order.order_id'], ),
+ sa.PrimaryKeyConstraint('id'),
+ schema='%(schema)s'
+ )
+ op.drop_table('extra', schema='%(schema)s')
+ op.add_column('address', sa.Column('street', sa.String(length=50), \
+nullable=True), schema='%(schema)s')
+ op.create_unique_constraint('uq_email', 'address', ['email_address'], \
+schema='test_schema')
+ op.add_column('order', sa.Column('user_id', sa.Integer(), nullable=True), \
+schema='%(schema)s')
+ op.alter_column('order', 'amount',
+ existing_type=sa.NUMERIC(precision=8, scale=2),
+ type_=sa.Numeric(precision=10, scale=2),
+ nullable=True,
+ existing_server_default=sa.text('0'),
+ schema='%(schema)s')
+ op.create_foreign_key(None, 'order', 'user', ['user_id'], ['id'], \
+source_schema='%(schema)s', referent_schema='%(schema)s')
+ op.alter_column('user', 'a1',
+ existing_type=sa.TEXT(),
+ server_default='x',
+ existing_nullable=True,
+ schema='%(schema)s')
+ op.alter_column('user', 'name',
+ existing_type=sa.VARCHAR(length=50),
+ nullable=False,
+ schema='%(schema)s')
+ op.drop_index('pw_idx', table_name='user', schema='test_schema')
+ op.drop_column('user', 'pw', schema='%(schema)s')
+ ### end Alembic commands ###""" % {"schema": self.schema})
+
+ eq_(re.sub(r"u'", "'", template_args['downgrades']),
+ """### commands auto generated by Alembic - please adjust! ###
+ op.add_column('user', sa.Column('pw', sa.VARCHAR(length=50), \
+autoincrement=False, nullable=True), schema='%(schema)s')
+ op.create_index('pw_idx', 'user', ['pw'], unique=False, schema='%(schema)s')
+ op.alter_column('user', 'name',
+ existing_type=sa.VARCHAR(length=50),
+ nullable=True,
+ schema='%(schema)s')
+ op.alter_column('user', 'a1',
+ existing_type=sa.TEXT(),
+ server_default=None,
+ existing_nullable=True,
+ schema='%(schema)s')
+ op.drop_constraint(None, 'order', schema='%(schema)s', type_='foreignkey')
+ op.alter_column('order', 'amount',
+ existing_type=sa.Numeric(precision=10, scale=2),
+ type_=sa.NUMERIC(precision=8, scale=2),
+ nullable=False,
+ existing_server_default=sa.text('0'),
+ schema='%(schema)s')
+ op.drop_column('order', 'user_id', schema='%(schema)s')
+ op.drop_constraint('uq_email', 'address', schema='test_schema', type_='unique')
+ op.drop_column('address', 'street', schema='%(schema)s')
+ op.create_table('extra',
+ sa.Column('x', sa.CHAR(length=1), autoincrement=False, nullable=True),
+ sa.Column('uid', sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.ForeignKeyConstraint(['uid'], ['%(schema)s.user.id'], \
+name='extra_uid_fkey'),
+ schema='%(schema)s'
+ )
+ op.drop_table('item', schema='%(schema)s')
+ ### end Alembic commands ###""" % {"schema": self.schema})
diff --git a/tests/test_autogenerate.py b/tests/test_autogen_diffs.py
index a089b42..f32fd84 100644
--- a/tests/test_autogenerate.py
+++ b/tests/test_autogen_diffs.py
@@ -1,4 +1,3 @@
-import re
import sys
from sqlalchemy import MetaData, Column, Table, Integer, String, Text, \
@@ -13,170 +12,13 @@ from alembic.testing import TestBase
from alembic.testing import config
from alembic.testing import assert_raises_message
from alembic.testing.mock import Mock
-from alembic.testing.env import staging_env, clear_staging_env
from alembic.testing import eq_
-from alembic.ddl.base import _fk_spec
from alembic.util import CommandError
+from ._autogen_fixtures import \
+ AutogenTest, AutogenFixtureTest, _default_object_filters
py3k = sys.version_info >= (3, )
-names_in_this_test = set()
-
-
-def _default_include_object(obj, name, type_, reflected, compare_to):
- if type_ == "table":
- return name in names_in_this_test
- else:
- return True
-
-_default_object_filters = [
- _default_include_object
-]
-from sqlalchemy import event
-
-
-@event.listens_for(Table, "after_parent_attach")
-def new_table(table, parent):
- names_in_this_test.add(table.name)
-
-
-class _ComparesFKs(object):
- def _assert_fk_diff(
- self, diff, type_, source_table, source_columns,
- target_table, target_columns, name=None, conditional_name=None,
- source_schema=None):
- # the public API for ForeignKeyConstraint was not very rich
- # in 0.7, 0.8, so here we use the well-known but slightly
- # private API to get at its elements
- (fk_source_schema, fk_source_table,
- fk_source_columns, fk_target_schema, fk_target_table,
- fk_target_columns) = _fk_spec(diff[1])
-
- eq_(diff[0], type_)
- eq_(fk_source_table, source_table)
- eq_(fk_source_columns, source_columns)
- eq_(fk_target_table, target_table)
- eq_(fk_source_schema, source_schema)
-
- eq_([elem.column.name for elem in diff[1].elements],
- target_columns)
- if conditional_name is not None:
- if config.requirements.no_fk_names.enabled:
- eq_(diff[1].name, None)
- elif conditional_name == 'servergenerated':
- fks = Inspector.from_engine(self.bind).\
- get_foreign_keys(source_table)
- server_fk_name = fks[0]['name']
- eq_(diff[1].name, server_fk_name)
- else:
- eq_(diff[1].name, conditional_name)
- else:
- eq_(diff[1].name, name)
-
-
-class AutogenTest(_ComparesFKs):
-
- @classmethod
- def _get_bind(cls):
- return config.db
-
- configure_opts = {}
-
- @classmethod
- def setup_class(cls):
- staging_env()
- cls.bind = cls._get_bind()
- cls.m1 = cls._get_db_schema()
- cls.m1.create_all(cls.bind)
- cls.m2 = cls._get_model_schema()
-
- @classmethod
- def teardown_class(cls):
- cls.m1.drop_all(cls.bind)
- clear_staging_env()
-
- def setUp(self):
- self.conn = conn = self.bind.connect()
- ctx_opts = {
- 'compare_type': True,
- 'compare_server_default': True,
- 'target_metadata': self.m2,
- 'upgrade_token': "upgrades",
- 'downgrade_token': "downgrades",
- 'alembic_module_prefix': 'op.',
- 'sqlalchemy_module_prefix': 'sa.',
- }
- if self.configure_opts:
- ctx_opts.update(self.configure_opts)
- self.context = context = MigrationContext.configure(
- connection=conn,
- opts=ctx_opts
- )
-
- connection = context.bind
- self.autogen_context = {
- 'imports': set(),
- 'connection': connection,
- 'dialect': connection.dialect,
- 'context': context
- }
-
- def tearDown(self):
- self.conn.close()
-
-
-class AutogenFixtureTest(_ComparesFKs):
-
- def _fixture(
- self, m1, m2, include_schemas=False,
- opts=None, object_filters=_default_object_filters):
- self.metadata, model_metadata = m1, m2
- self.metadata.create_all(self.bind)
-
- with self.bind.connect() as conn:
- ctx_opts = {
- 'compare_type': True,
- 'compare_server_default': True,
- 'target_metadata': model_metadata,
- 'upgrade_token': "upgrades",
- 'downgrade_token': "downgrades",
- 'alembic_module_prefix': 'op.',
- 'sqlalchemy_module_prefix': 'sa.',
- }
- if opts:
- ctx_opts.update(opts)
- self.context = context = MigrationContext.configure(
- connection=conn,
- opts=ctx_opts
- )
-
- connection = context.bind
- autogen_context = {
- 'imports': set(),
- 'connection': connection,
- 'dialect': connection.dialect,
- 'context': context
- }
- diffs = []
- autogenerate._produce_net_changes(
- connection, model_metadata, diffs,
- autogen_context,
- object_filters=object_filters,
- include_schemas=include_schemas
- )
- return diffs
-
- reports_unnamed_constraints = False
-
- def setUp(self):
- staging_env()
- self.bind = config.db
-
- def tearDown(self):
- if hasattr(self, 'metadata'):
- self.metadata.drop_all(self.bind)
- clear_staging_env()
-
class AutogenCrossSchemaTest(AutogenTest, TestBase):
__only_on__ = 'postgresql'
@@ -221,8 +63,6 @@ class AutogenCrossSchemaTest(AutogenTest, TestBase):
return m
def test_default_schema_omitted_upgrade(self):
- metadata = self.m2
- connection = self.context.bind
diffs = []
def include_object(obj, name, type_, reflected, compare_to):
@@ -230,17 +70,17 @@ class AutogenCrossSchemaTest(AutogenTest, TestBase):
return name == "t3"
else:
return True
- autogenerate._produce_net_changes(connection, metadata, diffs,
- self.autogen_context,
- object_filters=[include_object],
- include_schemas=True
- )
+ self.autogen_context.update({
+ 'object_filters': [include_object],
+ 'include_schemas': True,
+ 'metadata': self.m2
+ })
+ autogenerate._produce_net_changes(self.autogen_context, diffs)
+
eq_(diffs[0][0], "add_table")
eq_(diffs[0][1].schema, None)
def test_alt_schema_included_upgrade(self):
- metadata = self.m2
- connection = self.context.bind
diffs = []
def include_object(obj, name, type_, reflected, compare_to):
@@ -248,17 +88,18 @@ class AutogenCrossSchemaTest(AutogenTest, TestBase):
return name == "t4"
else:
return True
- autogenerate._produce_net_changes(connection, metadata, diffs,
- self.autogen_context,
- object_filters=[include_object],
- include_schemas=True
- )
+
+ self.autogen_context.update({
+ 'object_filters': [include_object],
+ 'include_schemas': True,
+ 'metadata': self.m2
+ })
+ autogenerate._produce_net_changes(self.autogen_context, diffs)
+
eq_(diffs[0][0], "add_table")
eq_(diffs[0][1].schema, config.test_schema)
def test_default_schema_omitted_downgrade(self):
- metadata = self.m2
- connection = self.context.bind
diffs = []
def include_object(obj, name, type_, reflected, compare_to):
@@ -266,17 +107,17 @@ class AutogenCrossSchemaTest(AutogenTest, TestBase):
return name == "t1"
else:
return True
- autogenerate._produce_net_changes(connection, metadata, diffs,
- self.autogen_context,
- object_filters=[include_object],
- include_schemas=True
- )
+ self.autogen_context.update({
+ 'object_filters': [include_object],
+ 'include_schemas': True,
+ 'metadata': self.m2
+ })
+ autogenerate._produce_net_changes(self.autogen_context, diffs)
+
eq_(diffs[0][0], "remove_table")
eq_(diffs[0][1].schema, None)
def test_alt_schema_included_downgrade(self):
- metadata = self.m2
- connection = self.context.bind
diffs = []
def include_object(obj, name, type_, reflected, compare_to):
@@ -284,11 +125,12 @@ class AutogenCrossSchemaTest(AutogenTest, TestBase):
return name == "t2"
else:
return True
- autogenerate._produce_net_changes(connection, metadata, diffs,
- self.autogen_context,
- object_filters=[include_object],
- include_schemas=True
- )
+ self.autogen_context.update({
+ 'object_filters': [include_object],
+ 'include_schemas': True,
+ 'metadata': self.m2
+ })
+ autogenerate._produce_net_changes(self.autogen_context, diffs)
eq_(diffs[0][0], "remove_table")
eq_(diffs[0][1].schema, config.test_schema)
@@ -426,12 +268,12 @@ class AutogenerateDiffTest(ModelOne, AutogenTest, TestBase):
"""test generation of diff rules"""
metadata = self.m2
- connection = self.context.bind
diffs = []
+ ctx = self.autogen_context.copy()
+ ctx['metadata'] = self.m2
+ ctx['object_filters'] = _default_object_filters
autogenerate._produce_net_changes(
- connection, metadata, diffs,
- self.autogen_context,
- object_filters=_default_object_filters,
+ ctx, diffs
)
eq_(
@@ -484,228 +326,31 @@ class AutogenerateDiffTest(ModelOne, AutogenTest, TestBase):
eq_(diffs[10][0], 'remove_column')
eq_(diffs[10][3].name, 'pw')
- def test_render_nothing(self):
- context = MigrationContext.configure(
- connection=self.bind.connect(),
- opts={
- 'compare_type': True,
- 'compare_server_default': True,
- 'target_metadata': self.m1,
- 'upgrade_token': "upgrades",
- 'downgrade_token': "downgrades",
- }
- )
- template_args = {}
- autogenerate._produce_migration_diffs(context, template_args, set())
-
- eq_(re.sub(r"u'", "'", template_args['upgrades']),
- """### commands auto generated by Alembic - please adjust! ###
- pass
- ### end Alembic commands ###""")
- eq_(re.sub(r"u'", "'", template_args['downgrades']),
- """### commands auto generated by Alembic - please adjust! ###
- pass
- ### end Alembic commands ###""")
-
- def test_render_nothing_batch(self):
- context = MigrationContext.configure(
- connection=self.bind.connect(),
- opts={
- 'compare_type': True,
- 'compare_server_default': True,
- 'target_metadata': self.m1,
- 'upgrade_token': "upgrades",
- 'downgrade_token': "downgrades",
- 'alembic_module_prefix': 'op.',
- 'sqlalchemy_module_prefix': 'sa.',
- 'render_as_batch': True
- }
- )
- template_args = {}
- autogenerate._produce_migration_diffs(
- context, template_args, set(),
- include_symbol=lambda name, schema: False
- )
- eq_(re.sub(r"u'", "'", template_args['upgrades']),
- """### commands auto generated by Alembic - please adjust! ###
- pass
- ### end Alembic commands ###""")
- eq_(re.sub(r"u'", "'", template_args['downgrades']),
- """### commands auto generated by Alembic - please adjust! ###
- pass
- ### end Alembic commands ###""")
-
- def test_render_diffs_standard(self):
- """test a full render including indentation"""
-
- template_args = {}
- autogenerate._produce_migration_diffs(
- self.context, template_args, set())
- eq_(re.sub(r"u'", "'", template_args['upgrades']),
- """### commands auto generated by Alembic - please adjust! ###
- op.create_table('item',
- sa.Column('id', sa.Integer(), nullable=False),
- sa.Column('description', sa.String(length=100), nullable=True),
- sa.Column('order_id', sa.Integer(), nullable=True),
- sa.CheckConstraint('len(description) > 5'),
- sa.ForeignKeyConstraint(['order_id'], ['order.order_id'], ),
- sa.PrimaryKeyConstraint('id')
- )
- op.drop_table('extra')
- op.add_column('address', sa.Column('street', sa.String(length=50), \
-nullable=True))
- op.create_unique_constraint('uq_email', 'address', ['email_address'])
- op.add_column('order', sa.Column('user_id', sa.Integer(), nullable=True))
- op.alter_column('order', 'amount',
- existing_type=sa.NUMERIC(precision=8, scale=2),
- type_=sa.Numeric(precision=10, scale=2),
- nullable=True,
- existing_server_default=sa.text('0'))
- op.create_foreign_key(None, 'order', 'user', ['user_id'], ['id'])
- op.alter_column('user', 'a1',
- existing_type=sa.TEXT(),
- server_default='x',
- existing_nullable=True)
- op.alter_column('user', 'name',
- existing_type=sa.VARCHAR(length=50),
- nullable=False)
- op.drop_index('pw_idx', table_name='user')
- op.drop_column('user', 'pw')
- ### end Alembic commands ###""")
-
- eq_(re.sub(r"u'", "'", template_args['downgrades']),
- """### commands auto generated by Alembic - please adjust! ###
- op.add_column('user', sa.Column('pw', sa.VARCHAR(length=50), \
-nullable=True))
- op.create_index('pw_idx', 'user', ['pw'], unique=False)
- op.alter_column('user', 'name',
- existing_type=sa.VARCHAR(length=50),
- nullable=True)
- op.alter_column('user', 'a1',
- existing_type=sa.TEXT(),
- server_default=None,
- existing_nullable=True)
- op.drop_constraint(None, 'order', type_='foreignkey')
- op.alter_column('order', 'amount',
- existing_type=sa.Numeric(precision=10, scale=2),
- type_=sa.NUMERIC(precision=8, scale=2),
- nullable=False,
- existing_server_default=sa.text('0'))
- op.drop_column('order', 'user_id')
- op.drop_constraint('uq_email', 'address', type_='unique')
- op.drop_column('address', 'street')
- op.create_table('extra',
- sa.Column('x', sa.CHAR(), nullable=True),
- sa.Column('uid', sa.INTEGER(), nullable=True),
- sa.ForeignKeyConstraint(['uid'], ['user.id'], )
- )
- op.drop_table('item')
- ### end Alembic commands ###""")
-
- def test_render_diffs_batch(self):
- """test a full render in batch mode including indentation"""
-
- template_args = {}
- self.context.opts['render_as_batch'] = True
- autogenerate._produce_migration_diffs(
- self.context, template_args, set())
-
- eq_(re.sub(r"u'", "'", template_args['upgrades']),
- """### commands auto generated by Alembic - please adjust! ###
- op.create_table('item',
- sa.Column('id', sa.Integer(), nullable=False),
- sa.Column('description', sa.String(length=100), nullable=True),
- sa.Column('order_id', sa.Integer(), nullable=True),
- sa.CheckConstraint('len(description) > 5'),
- sa.ForeignKeyConstraint(['order_id'], ['order.order_id'], ),
- sa.PrimaryKeyConstraint('id')
- )
- op.drop_table('extra')
- with op.batch_alter_table('address', schema=None) as batch_op:
- batch_op.add_column(sa.Column('street', sa.String(length=50), nullable=True))
- batch_op.create_unique_constraint('uq_email', ['email_address'])
-
- with op.batch_alter_table('order', schema=None) as batch_op:
- batch_op.add_column(sa.Column('user_id', sa.Integer(), nullable=True))
- batch_op.alter_column('amount',
- existing_type=sa.NUMERIC(precision=8, scale=2),
- type_=sa.Numeric(precision=10, scale=2),
- nullable=True,
- existing_server_default=sa.text('0'))
- batch_op.create_foreign_key(None, 'order', 'user', ['user_id'], ['id'])
-
- with op.batch_alter_table('user', schema=None) as batch_op:
- batch_op.alter_column('a1',
- existing_type=sa.TEXT(),
- server_default='x',
- existing_nullable=True)
- batch_op.alter_column('name',
- existing_type=sa.VARCHAR(length=50),
- nullable=False)
- batch_op.drop_index('pw_idx')
- batch_op.drop_column('pw')
-
- ### end Alembic commands ###""")
-
- eq_(re.sub(r"u'", "'", template_args['downgrades']),
- """### commands auto generated by Alembic - please adjust! ###
- with op.batch_alter_table('user', schema=None) as batch_op:
- batch_op.add_column(sa.Column('pw', sa.VARCHAR(length=50), nullable=True))
- batch_op.create_index('pw_idx', ['pw'], unique=False)
- batch_op.alter_column('name',
- existing_type=sa.VARCHAR(length=50),
- nullable=True)
- batch_op.alter_column('a1',
- existing_type=sa.TEXT(),
- server_default=None,
- existing_nullable=True)
-
- with op.batch_alter_table('order', schema=None) as batch_op:
- batch_op.drop_constraint(None, type_='foreignkey')
- batch_op.alter_column('amount',
- existing_type=sa.Numeric(precision=10, scale=2),
- type_=sa.NUMERIC(precision=8, scale=2),
- nullable=False,
- existing_server_default=sa.text('0'))
- batch_op.drop_column('user_id')
-
- with op.batch_alter_table('address', schema=None) as batch_op:
- batch_op.drop_constraint('uq_email', type_='unique')
- batch_op.drop_column('street')
-
- op.create_table('extra',
- sa.Column('x', sa.CHAR(), nullable=True),
- sa.Column('uid', sa.INTEGER(), nullable=True),
- sa.ForeignKeyConstraint(['uid'], ['user.id'], )
- )
- op.drop_table('item')
- ### end Alembic commands ###""")
-
def test_include_symbol(self):
+
+ diffs = []
+
+ def include_symbol(name, schema=None):
+ return name in ('address', 'order')
+
context = MigrationContext.configure(
connection=self.bind.connect(),
opts={
'compare_type': True,
'compare_server_default': True,
'target_metadata': self.m2,
- 'include_symbol': lambda name, schema=None:
- name in ('address', 'order'),
- 'upgrade_token': "upgrades",
- 'downgrade_token': "downgrades",
- 'alembic_module_prefix': 'op.',
- 'sqlalchemy_module_prefix': 'sa.',
+ 'include_symbol': include_symbol,
}
)
- template_args = {}
- autogenerate._produce_migration_diffs(context, template_args, set())
- template_args['upgrades'] = \
- template_args['upgrades'].replace("u'", "'")
- template_args['downgrades'] = template_args['downgrades'].\
- replace("u'", "'")
- assert "alter_column('user'" not in template_args['upgrades']
- assert "alter_column('user'" not in template_args['downgrades']
- assert "alter_column('order'" in template_args['upgrades']
- assert "alter_column('order'" in template_args['downgrades']
+
+ diffs = autogenerate.compare_metadata(
+ context, context.opts['target_metadata'])
+
+ alter_cols = set([
+ d[2] for d in self._flatten_diffs(diffs)
+ if d[0].startswith('modify')
+ ])
+ eq_(alter_cols, set(['order']))
def test_include_object(self):
def include_object(obj, name, type_, reflected, compare_to):
@@ -732,28 +377,23 @@ nullable=True))
'compare_server_default': True,
'target_metadata': self.m2,
'include_object': include_object,
- 'upgrade_token': "upgrades",
- 'downgrade_token': "downgrades",
- 'alembic_module_prefix': 'op.',
- 'sqlalchemy_module_prefix': 'sa.',
}
)
- template_args = {}
- autogenerate._produce_migration_diffs(context, template_args, set())
-
- template_args['upgrades'] = \
- template_args['upgrades'].replace("u'", "'")
- template_args['downgrades'] = template_args['downgrades'].\
- replace("u'", "'")
- assert "op.create_table('item'" not in template_args['upgrades']
- assert "op.create_table('item'" not in template_args['downgrades']
-
- assert "alter_column('user'" in template_args['upgrades']
- assert "alter_column('user'" in template_args['downgrades']
- assert "'street'" not in template_args['upgrades']
- assert "'street'" not in template_args['downgrades']
- assert "alter_column('order'" in template_args['upgrades']
- assert "alter_column('order'" in template_args['downgrades']
+
+ diffs = autogenerate.compare_metadata(
+ context, context.opts['target_metadata'])
+
+ alter_cols = set([
+ d[2] for d in self._flatten_diffs(diffs)
+ if d[0].startswith('modify')
+ ]).union(
+ d[3].name for d in self._flatten_diffs(diffs)
+ if d[0] == 'add_column'
+ ).union(
+ d[1].name for d in self._flatten_diffs(diffs)
+ if d[0] == 'add_table'
+ )
+ eq_(alter_cols, set(['user_id', 'order', 'user']))
def test_skip_null_type_comparison_reflected(self):
diff = []
@@ -841,14 +481,14 @@ class AutogenerateDiffTestWSchema(ModelOne, AutogenTest, TestBase):
"""test generation of diff rules"""
metadata = self.m2
- connection = self.context.bind
diffs = []
- autogenerate._produce_net_changes(
- connection, metadata, diffs,
- self.autogen_context,
- object_filters=_default_object_filters,
- include_schemas=True
- )
+
+ self.autogen_context.update({
+ 'object_filters': _default_object_filters,
+ 'include_schemas': True,
+ 'metadata': self.m2
+ })
+ autogenerate._produce_net_changes(self.autogen_context, diffs)
eq_(
diffs[0],
@@ -901,116 +541,6 @@ class AutogenerateDiffTestWSchema(ModelOne, AutogenTest, TestBase):
eq_(diffs[10][0], 'remove_column')
eq_(diffs[10][3].name, 'pw')
- def test_render_nothing(self):
- context = MigrationContext.configure(
- connection=self.bind.connect(),
- opts={
- 'compare_type': True,
- 'compare_server_default': True,
- 'target_metadata': self.m1,
- 'upgrade_token': "upgrades",
- 'downgrade_token': "downgrades",
- 'alembic_module_prefix': 'op.',
- 'sqlalchemy_module_prefix': 'sa.',
- }
- )
- template_args = {}
- autogenerate._produce_migration_diffs(
- context, template_args, set(),
- include_symbol=lambda name, schema: False
- )
- eq_(re.sub(r"u'", "'", template_args['upgrades']),
- """### commands auto generated by Alembic - please adjust! ###
- pass
- ### end Alembic commands ###""")
- eq_(re.sub(r"u'", "'", template_args['downgrades']),
- """### commands auto generated by Alembic - please adjust! ###
- pass
- ### end Alembic commands ###""")
-
- def test_render_diffs_extras(self):
- """test a full render including indentation (include and schema)"""
-
- template_args = {}
- autogenerate._produce_migration_diffs(
- self.context, template_args, set(),
- include_object=_default_include_object,
- include_schemas=True
- )
-
- eq_(re.sub(r"u'", "'", template_args['upgrades']),
- """### commands auto generated by Alembic - please adjust! ###
- op.create_table('item',
- sa.Column('id', sa.Integer(), nullable=False),
- sa.Column('description', sa.String(length=100), nullable=True),
- sa.Column('order_id', sa.Integer(), nullable=True),
- sa.CheckConstraint('len(description) > 5'),
- sa.ForeignKeyConstraint(['order_id'], ['%(schema)s.order.order_id'], ),
- sa.PrimaryKeyConstraint('id'),
- schema='%(schema)s'
- )
- op.drop_table('extra', schema='%(schema)s')
- op.add_column('address', sa.Column('street', sa.String(length=50), \
-nullable=True), schema='%(schema)s')
- op.create_unique_constraint('uq_email', 'address', ['email_address'], \
-schema='test_schema')
- op.add_column('order', sa.Column('user_id', sa.Integer(), nullable=True), \
-schema='%(schema)s')
- op.alter_column('order', 'amount',
- existing_type=sa.NUMERIC(precision=8, scale=2),
- type_=sa.Numeric(precision=10, scale=2),
- nullable=True,
- existing_server_default=sa.text('0'),
- schema='%(schema)s')
- op.create_foreign_key(None, 'order', 'user', ['user_id'], ['id'], \
-source_schema='%(schema)s', referent_schema='%(schema)s')
- op.alter_column('user', 'a1',
- existing_type=sa.TEXT(),
- server_default='x',
- existing_nullable=True,
- schema='%(schema)s')
- op.alter_column('user', 'name',
- existing_type=sa.VARCHAR(length=50),
- nullable=False,
- schema='%(schema)s')
- op.drop_index('pw_idx', table_name='user', schema='test_schema')
- op.drop_column('user', 'pw', schema='%(schema)s')
- ### end Alembic commands ###""" % {"schema": self.schema})
-
- eq_(re.sub(r"u'", "'", template_args['downgrades']),
- """### commands auto generated by Alembic - please adjust! ###
- op.add_column('user', sa.Column('pw', sa.VARCHAR(length=50), \
-autoincrement=False, nullable=True), schema='%(schema)s')
- op.create_index('pw_idx', 'user', ['pw'], unique=False, schema='%(schema)s')
- op.alter_column('user', 'name',
- existing_type=sa.VARCHAR(length=50),
- nullable=True,
- schema='%(schema)s')
- op.alter_column('user', 'a1',
- existing_type=sa.TEXT(),
- server_default=None,
- existing_nullable=True,
- schema='%(schema)s')
- op.drop_constraint(None, 'order', schema='%(schema)s', type_='foreignkey')
- op.alter_column('order', 'amount',
- existing_type=sa.Numeric(precision=10, scale=2),
- type_=sa.NUMERIC(precision=8, scale=2),
- nullable=False,
- existing_server_default=sa.text('0'),
- schema='%(schema)s')
- op.drop_column('order', 'user_id', schema='%(schema)s')
- op.drop_constraint('uq_email', 'address', schema='test_schema', type_='unique')
- op.drop_column('address', 'street', schema='%(schema)s')
- op.create_table('extra',
- sa.Column('x', sa.CHAR(length=1), autoincrement=False, nullable=True),
- sa.Column('uid', sa.INTEGER(), autoincrement=False, nullable=True),
- sa.ForeignKeyConstraint(['uid'], ['%(schema)s.user.id'], \
-name='extra_uid_fkey'),
- schema='%(schema)s'
- )
- op.drop_table('item', schema='%(schema)s')
- ### end Alembic commands ###""" % {"schema": self.schema})
-
class AutogenerateCustomCompareTypeTest(AutogenTest, TestBase):
__only_on__ = 'sqlite'
@@ -1038,8 +568,9 @@ class AutogenerateCustomCompareTypeTest(AutogenTest, TestBase):
self.context._user_compare_type = my_compare_type
diffs = []
- autogenerate._produce_net_changes(self.context.bind, self.m2,
- diffs, self.autogen_context)
+ ctx = self.autogen_context.copy()
+ ctx['metadata'] = self.m2
+ autogenerate._produce_net_changes(ctx, diffs)
first_table = self.m2.tables['sometable']
first_column = first_table.columns['id']
@@ -1062,8 +593,10 @@ class AutogenerateCustomCompareTypeTest(AutogenTest, TestBase):
self.context._user_compare_type = my_compare_type
diffs = []
- autogenerate._produce_net_changes(self.context.bind, self.m2,
- diffs, self.autogen_context)
+ ctx = self.autogen_context.copy()
+ ctx['metadata'] = self.m2
+ diffs = []
+ autogenerate._produce_net_changes(ctx, diffs)
eq_(diffs, [])
@@ -1072,9 +605,10 @@ class AutogenerateCustomCompareTypeTest(AutogenTest, TestBase):
my_compare_type.return_value = True
self.context._user_compare_type = my_compare_type
+ ctx = self.autogen_context.copy()
+ ctx['metadata'] = self.m2
diffs = []
- autogenerate._produce_net_changes(self.context.bind, self.m2,
- diffs, self.autogen_context)
+ autogenerate._produce_net_changes(ctx, diffs)
eq_(diffs[0][0][0], 'modify_type')
eq_(diffs[1][0][0], 'modify_type')
@@ -1101,14 +635,10 @@ class PKConstraintUpgradesIgnoresNullableTest(AutogenTest, TestBase):
return cls._get_db_schema()
def test_no_change(self):
- metadata = self.m2
- connection = self.context.bind
-
diffs = []
-
- autogenerate._produce_net_changes(connection, metadata, diffs,
- self.autogen_context
- )
+ ctx = self.autogen_context.copy()
+ ctx['metadata'] = self.m2
+ autogenerate._produce_net_changes(ctx, diffs)
eq_(diffs, [])
@@ -1143,15 +673,12 @@ class AutogenKeyTest(AutogenTest, TestBase):
symbols = ['someothertable', 'sometable']
def test_autogen(self):
- metadata = self.m2
- connection = self.context.bind
diffs = []
- autogenerate._produce_net_changes(connection, metadata, diffs,
- self.autogen_context,
- include_schemas=False
- )
+ ctx = self.autogen_context.copy()
+ ctx['metadata'] = self.m2
+ autogenerate._produce_net_changes(ctx, diffs)
eq_(diffs[0][0], "add_table")
eq_(diffs[0][1].name, "sometable")
eq_(diffs[1][0], "add_column")
@@ -1178,8 +705,10 @@ class AutogenVersionTableTest(AutogenTest, TestBase):
def test_no_version_table(self):
diffs = []
- autogenerate._produce_net_changes(self.context.bind, self.m2,
- diffs, self.autogen_context)
+ ctx = self.autogen_context.copy()
+ ctx['metadata'] = self.m2
+
+ autogenerate._produce_net_changes(ctx, diffs)
eq_(diffs, [])
def test_version_table_in_target(self):
@@ -1188,8 +717,9 @@ class AutogenVersionTableTest(AutogenTest, TestBase):
self.version_table_name,
self.m2, Column('x', Integer), schema=self.version_table_schema)
- autogenerate._produce_net_changes(self.context.bind, self.m2,
- diffs, self.autogen_context)
+ ctx = self.autogen_context.copy()
+ ctx['metadata'] = self.m2
+ autogenerate._produce_net_changes(ctx, diffs)
eq_(diffs, [])
@@ -1239,13 +769,10 @@ class AutogenerateDiffOrderTest(AutogenTest, TestBase):
before their parent tables
"""
- metadata = self.m2
- connection = self.context.bind
+ ctx = self.autogen_context.copy()
+ ctx['metadata'] = self.m2
diffs = []
-
- autogenerate._produce_net_changes(connection, metadata, diffs,
- self.autogen_context
- )
+ autogenerate._produce_net_changes(ctx, diffs)
eq_(diffs[0][0], 'add_table')
eq_(diffs[0][1].name, "parent")
diff --git a/tests/test_autogen_fks.py b/tests/test_autogen_fks.py
index 90d25c4..525bed5 100644
--- a/tests/test_autogen_fks.py
+++ b/tests/test_autogen_fks.py
@@ -1,5 +1,5 @@
import sys
-from alembic.testing import TestBase, config
+from alembic.testing import TestBase
from sqlalchemy import MetaData, Column, Table, Integer, String, \
ForeignKeyConstraint
@@ -7,7 +7,7 @@ from alembic.testing import eq_
py3k = sys.version_info >= (3, )
-from .test_autogenerate import AutogenFixtureTest
+from ._autogen_fixtures import AutogenFixtureTest
class AutogenerateForeignKeysTest(AutogenFixtureTest, TestBase):
diff --git a/tests/test_autogen_indexes.py b/tests/test_autogen_indexes.py
index 1f92649..8ee33bc 100644
--- a/tests/test_autogen_indexes.py
+++ b/tests/test_autogen_indexes.py
@@ -12,7 +12,7 @@ from alembic.testing.env import staging_env
py3k = sys.version_info >= (3, )
-from .test_autogenerate import AutogenFixtureTest
+from ._autogen_fixtures import AutogenFixtureTest
class NoUqReflection(object):
diff --git a/tests/test_autogen_render.py b/tests/test_autogen_render.py
index 52f3601..4a49d5c 100644
--- a/tests/test_autogen_render.py
+++ b/tests/test_autogen_render.py
@@ -2,6 +2,7 @@ import re
import sys
from alembic.testing import TestBase, exclusions
+from alembic.operations import ops
from sqlalchemy import MetaData, Column, Table, String, \
Numeric, CHAR, ForeignKey, DATETIME, Integer, \
CheckConstraint, Unicode, Enum, cast,\
@@ -16,7 +17,8 @@ from sqlalchemy.sql import and_, column, literal_column, false
from alembic.testing.mock import patch
-from alembic import autogenerate, util, compat
+from alembic import autogenerate, util
+from alembic.util import compat
from alembic.testing import eq_, eq_ignore_whitespace, config
from alembic.testing.fixtures import op_fixture
@@ -58,8 +60,9 @@ class AutogenRenderTest(TestBase):
Column('code', String(255)),
)
idx = Index('test_active_code_idx', t.c.active, t.c.code)
+ op_obj = ops.CreateIndexOp.from_index(idx)
eq_ignore_whitespace(
- autogenerate.render._add_index(idx, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_index('test_active_code_idx', 'test', "
"['active', 'code'], unique=False)"
)
@@ -76,8 +79,9 @@ class AutogenRenderTest(TestBase):
schema='CamelSchema'
)
idx = Index('test_active_code_idx', t.c.active, t.c.code)
+ op_obj = ops.CreateIndexOp.from_index(idx)
eq_ignore_whitespace(
- autogenerate.render._add_index(idx, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_index('test_active_code_idx', 'test', "
"['active', 'code'], unique=False, schema='CamelSchema')"
)
@@ -94,16 +98,18 @@ class AutogenRenderTest(TestBase):
idx = Index('foo_idx', t.c.x, t.c.y,
postgresql_where=(t.c.y == 'something'))
+ op_obj = ops.CreateIndexOp.from_index(idx)
+
if compat.sqla_08:
eq_ignore_whitespace(
- autogenerate.render._add_index(idx, autogen_context),
+ autogenerate.render_op_text(autogen_context, op_obj),
"""op.create_index('foo_idx', 't', \
['x', 'y'], unique=False, """
"""postgresql_where=sa.text(!U"t.y = 'something'"))"""
)
else:
eq_ignore_whitespace(
- autogenerate.render._add_index(idx, autogen_context),
+ autogenerate.render_op_text(autogen_context, op_obj),
"""op.create_index('foo_idx', 't', ['x', 'y'], \
unique=False, """
"""postgresql_where=sa.text(!U't.y = %(y_1)s'))"""
@@ -118,8 +124,10 @@ unique=False, """
Column('code', String(255))
)
idx = Index('test_lower_code_idx', func.lower(t.c.code))
+ op_obj = ops.CreateIndexOp.from_index(idx)
+
eq_ignore_whitespace(
- autogenerate.render._add_index(idx, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_index('test_lower_code_idx', 'test', "
"[sa.text(!U'lower(test.code)')], unique=False)"
)
@@ -133,8 +141,9 @@ unique=False, """
Column('code', String(255))
)
idx = Index('test_lower_code_idx', cast(t.c.code, String))
+ op_obj = ops.CreateIndexOp.from_index(idx)
eq_ignore_whitespace(
- autogenerate.render._add_index(idx, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_index('test_lower_code_idx', 'test', "
"[sa.text(!U'CAST(test.code AS CHAR)')], unique=False)"
)
@@ -148,8 +157,9 @@ unique=False, """
Column('code', String(255))
)
idx = Index('test_desc_code_idx', t.c.code.desc())
+ op_obj = ops.CreateIndexOp.from_index(idx)
eq_ignore_whitespace(
- autogenerate.render._add_index(idx, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_index('test_desc_code_idx', 'test', "
"[sa.text(!U'test.code DESC')], unique=False)"
)
@@ -165,8 +175,9 @@ unique=False, """
Column('code', String(255)),
)
idx = Index('test_active_code_idx', t.c.active, t.c.code)
+ op_obj = ops.DropIndexOp.from_index(idx)
eq_ignore_whitespace(
- autogenerate.render._drop_index(idx, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.drop_index('test_active_code_idx', table_name='test')"
)
@@ -182,8 +193,9 @@ unique=False, """
schema='CamelSchema'
)
idx = Index('test_active_code_idx', t.c.active, t.c.code)
+ op_obj = ops.DropIndexOp.from_index(idx)
eq_ignore_whitespace(
- autogenerate.render._drop_index(idx, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.drop_index('test_active_code_idx', " +
"table_name='test', schema='CamelSchema')"
)
@@ -199,9 +211,9 @@ unique=False, """
Column('code', String(255)),
)
uq = UniqueConstraint(t.c.code, name='uq_test_code')
+ op_obj = ops.AddConstraintOp.from_constraint(uq)
eq_ignore_whitespace(
- autogenerate.render._add_unique_constraint(
- uq, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_unique_constraint('uq_test_code', 'test', ['code'])"
)
@@ -217,9 +229,9 @@ unique=False, """
schema='CamelSchema'
)
uq = UniqueConstraint(t.c.code, name='uq_test_code')
+ op_obj = ops.AddConstraintOp.from_constraint(uq)
eq_ignore_whitespace(
- autogenerate.render._add_unique_constraint(
- uq, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_unique_constraint('uq_test_code', 'test', "
"['code'], schema='CamelSchema')"
)
@@ -235,8 +247,9 @@ unique=False, """
Column('code', String(255)),
)
uq = UniqueConstraint(t.c.code, name='uq_test_code')
+ op_obj = ops.DropConstraintOp.from_constraint(uq)
eq_ignore_whitespace(
- autogenerate.render._drop_constraint(uq, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.drop_constraint('uq_test_code', 'test', type_='unique')"
)
@@ -252,8 +265,9 @@ unique=False, """
schema='CamelSchema'
)
uq = UniqueConstraint(t.c.code, name='uq_test_code')
+ op_obj = ops.DropConstraintOp.from_constraint(uq)
eq_ignore_whitespace(
- autogenerate.render._drop_constraint(uq, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.drop_constraint('uq_test_code', 'test', "
"schema='CamelSchema', type_='unique')"
)
@@ -264,8 +278,9 @@ unique=False, """
b = Table('b', m, Column('a_id', Integer, ForeignKey('a.id')))
fk = ForeignKeyConstraint(['a_id'], ['a.id'], name='fk_a_id')
b.append_constraint(fk)
+ op_obj = ops.AddConstraintOp.from_constraint(fk)
eq_ignore_whitespace(
- autogenerate.render._add_fk_constraint(fk, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_foreign_key('fk_a_id', 'b', 'a', ['a_id'], ['id'])"
)
@@ -281,11 +296,12 @@ unique=False, """
# SQLA 0.9 generates a u'' here for remote cols while 0.8 does not,
# so just whack out "'u" here from the generated
+ op_obj = ops.AddConstraintOp.from_constraint(fk)
eq_ignore_whitespace(
re.sub(
r"u'", "'",
- autogenerate.render._add_fk_constraint(
- fk, self.autogen_context)),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
+ ),
"op.create_foreign_key(None, 't', 't2', ['c'], ['c_rem'], "
"onupdate='CASCADE')"
)
@@ -294,11 +310,12 @@ unique=False, """
if not util.sqla_08:
t1.append_constraint(fk)
+ op_obj = ops.AddConstraintOp.from_constraint(fk)
eq_ignore_whitespace(
re.sub(
r"u'", "'",
- autogenerate.render._add_fk_constraint(
- fk, self.autogen_context)),
+ autogenerate.render_op_text(self.autogen_context, op_obj)
+ ),
"op.create_foreign_key(None, 't', 't2', ['c'], ['c_rem'], "
"ondelete='CASCADE')"
)
@@ -306,11 +323,11 @@ unique=False, """
fk = ForeignKeyConstraint([t1.c.c], [t2.c.c_rem], deferrable=True)
if not util.sqla_08:
t1.append_constraint(fk)
+ op_obj = ops.AddConstraintOp.from_constraint(fk)
eq_ignore_whitespace(
re.sub(
r"u'", "'",
- autogenerate.render._add_fk_constraint(
- fk, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj)
),
"op.create_foreign_key(None, 't', 't2', ['c'], ['c_rem'], "
"deferrable=True)"
@@ -319,11 +336,11 @@ unique=False, """
fk = ForeignKeyConstraint([t1.c.c], [t2.c.c_rem], initially="XYZ")
if not util.sqla_08:
t1.append_constraint(fk)
+ op_obj = ops.AddConstraintOp.from_constraint(fk)
eq_ignore_whitespace(
re.sub(
r"u'", "'",
- autogenerate.render._add_fk_constraint(
- fk, self.autogen_context)
+ autogenerate.render_op_text(self.autogen_context, op_obj),
),
"op.create_foreign_key(None, 't', 't2', ['c'], ['c_rem'], "
"initially='XYZ')"
@@ -334,11 +351,11 @@ unique=False, """
initially="XYZ", ondelete="CASCADE", deferrable=True)
if not util.sqla_08:
t1.append_constraint(fk)
+ op_obj = ops.AddConstraintOp.from_constraint(fk)
eq_ignore_whitespace(
re.sub(
r"u'", "'",
- autogenerate.render._add_fk_constraint(
- fk, self.autogen_context)
+ autogenerate.render_op_text(self.autogen_context, op_obj)
),
"op.create_foreign_key(None, 't', 't2', ['c'], ['c_rem'], "
"ondelete='CASCADE', initially='XYZ', deferrable=True)"
@@ -351,7 +368,8 @@ unique=False, """
'b', m,
Column('a_id', Integer, ForeignKey('a.aid'), key='baid'))
- py_code = autogenerate.render._add_table(b, self.autogen_context)
+ op_obj = ops.CreateTableOp.from_table(b)
+ py_code = autogenerate.render_op_text(self.autogen_context, op_obj)
eq_ignore_whitespace(
py_code,
@@ -373,7 +391,8 @@ unique=False, """
fk = ForeignKeyConstraint(['baid'], ['a.aid'], name='fk_a_id')
b.append_constraint(fk)
- py_code = autogenerate.render._add_table(b, self.autogen_context)
+ op_obj = ops.CreateTableOp.from_table(b)
+ py_code = autogenerate.render_op_text(self.autogen_context, op_obj)
eq_ignore_whitespace(
py_code,
@@ -389,14 +408,16 @@ unique=False, """
"fk_a_id FOREIGN KEY(a_id) REFERENCES a (id))")
context = op_fixture()
- py_code = autogenerate.render._add_fk_constraint(
- fk, self.autogen_context)
+
+ op_obj = ops.AddConstraintOp.from_constraint(fk)
eq_ignore_whitespace(
- autogenerate.render._add_fk_constraint(fk, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_foreign_key('fk_a_id', 'b', 'a', ['a_id'], ['id'])"
)
+ py_code = autogenerate.render_op_text(self.autogen_context, op_obj)
+
eval(py_code)
context.assert_(
"ALTER TABLE b ADD CONSTRAINT fk_a_id "
@@ -414,8 +435,9 @@ unique=False, """
["a_id"],
["CamelSchemaTwo.a.id"], name='fk_a_id')
b.append_constraint(fk)
+ op_obj = ops.AddConstraintOp.from_constraint(fk)
eq_ignore_whitespace(
- autogenerate.render._add_fk_constraint(fk, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_foreign_key('fk_a_id', 'b', 'a', ['a_id'], ['id'],"
" source_schema='CamelSchemaOne', "
"referent_schema='CamelSchemaTwo')"
@@ -427,8 +449,9 @@ unique=False, """
b = Table('b', m, Column('a_id', Integer, ForeignKey('a.id')))
fk = ForeignKeyConstraint(['a_id'], ['a.id'], name='fk_a_id')
b.append_constraint(fk)
+ op_obj = ops.DropConstraintOp.from_constraint(fk)
eq_ignore_whitespace(
- autogenerate.render._drop_constraint(fk, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.drop_constraint('fk_a_id', 'b', type_='foreignkey')"
)
@@ -444,9 +467,10 @@ unique=False, """
["a_id"],
["CamelSchemaTwo.a.id"], name='fk_a_id')
b.append_constraint(fk)
+ op_obj = ops.DropConstraintOp.from_constraint(fk)
eq_ignore_whitespace(
- autogenerate.render._drop_constraint(fk, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.drop_constraint('fk_a_id', 'b', schema='CamelSchemaOne', "
"type_='foreignkey')"
)
@@ -462,8 +486,10 @@ unique=False, """
UniqueConstraint("name", name="uq_name"),
UniqueConstraint("timestamp"),
)
+
+ op_obj = ops.CreateTableOp.from_table(t)
eq_ignore_whitespace(
- autogenerate.render._add_table(t, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_table('test',"
"sa.Column('id', sa.Integer(), nullable=False),"
"sa.Column('name', sa.Unicode(length=255), nullable=True),"
@@ -487,8 +513,9 @@ unique=False, """
Column('q', Integer, ForeignKey('address.id')),
schema='foo'
)
+ op_obj = ops.CreateTableOp.from_table(t)
eq_ignore_whitespace(
- autogenerate.render._add_table(t, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_table('test',"
"sa.Column('id', sa.Integer(), nullable=False),"
"sa.Column('q', sa.Integer(), nullable=True),"
@@ -503,8 +530,9 @@ unique=False, """
t = Table(compat.ue('\u0411\u0435\u0437'), m,
Column('id', Integer, primary_key=True),
)
+ op_obj = ops.CreateTableOp.from_table(t)
eq_ignore_whitespace(
- autogenerate.render._add_table(t, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_table(%r,"
"sa.Column('id', sa.Integer(), nullable=False),"
"sa.PrimaryKeyConstraint('id'))" % compat.ue('\u0411\u0435\u0437')
@@ -516,8 +544,9 @@ unique=False, """
Column('id', Integer, primary_key=True),
schema=compat.ue('\u0411\u0435\u0437')
)
+ op_obj = ops.CreateTableOp.from_table(t)
eq_ignore_whitespace(
- autogenerate.render._add_table(t, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_table('test',"
"sa.Column('id', sa.Integer(), nullable=False),"
"sa.PrimaryKeyConstraint('id'),"
@@ -534,8 +563,9 @@ unique=False, """
Column('c', Integer),
Column('d', Integer),
)
+ op_obj = ops.CreateTableOp.from_table(t)
eq_ignore_whitespace(
- autogenerate.render._add_table(t, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_table('test',"
"*[sa.Column('a', sa.Integer(), nullable=True),"
"sa.Column('b', sa.Integer(), nullable=True),"
@@ -549,9 +579,10 @@ unique=False, """
Column('b', Integer),
Column('c', Integer),
)
+ op_obj = ops.CreateTableOp.from_table(t2)
eq_ignore_whitespace(
- autogenerate.render._add_table(t2, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_table('test2',"
"sa.Column('a', sa.Integer(), nullable=True),"
"sa.Column('b', sa.Integer(), nullable=True),"
@@ -564,8 +595,9 @@ unique=False, """
Column('id', Integer, primary_key=True),
Column('q', Integer, ForeignKey('foo.address.id')),
)
+ op_obj = ops.CreateTableOp.from_table(t)
eq_ignore_whitespace(
- autogenerate.render._add_table(t, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_table('test',"
"sa.Column('id', sa.Integer(), nullable=False),"
"sa.Column('q', sa.Integer(), nullable=True),"
@@ -580,10 +612,11 @@ unique=False, """
Column('id', Integer, primary_key=True),
Column('q', Integer, ForeignKey('address.id')),
)
+ op_obj = ops.CreateTableOp.from_table(t)
eq_ignore_whitespace(
re.sub(
r"u'", "'",
- autogenerate.render._add_table(t, self.autogen_context)
+ autogenerate.render_op_text(self.autogen_context, op_obj)
),
"op.create_table('test',"
"sa.Column('id', sa.Integer(), nullable=False),"
@@ -600,8 +633,9 @@ unique=False, """
Column('id', Integer, primary_key=True),
Column('q', Integer, ForeignKey('bar.address.id')),
)
+ op_obj = ops.CreateTableOp.from_table(t)
eq_ignore_whitespace(
- autogenerate.render._add_table(t, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_table('test',"
"sa.Column('id', sa.Integer(), nullable=False),"
"sa.Column('q', sa.Integer(), nullable=True),"
@@ -618,8 +652,9 @@ unique=False, """
Column('q', Integer, ForeignKey('bar.address.id')),
sqlite_autoincrement=True, mysql_engine="InnoDB"
)
+ op_obj = ops.CreateTableOp.from_table(t)
eq_ignore_whitespace(
- autogenerate.render._add_table(t, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_table('test',"
"sa.Column('id', sa.Integer(), nullable=False),"
"sa.Column('q', sa.Integer(), nullable=True),"
@@ -629,17 +664,20 @@ unique=False, """
)
def test_render_drop_table(self):
+ op_obj = ops.DropTableOp.from_table(
+ Table("sometable", MetaData())
+ )
eq_ignore_whitespace(
- autogenerate.render._drop_table(Table("sometable", MetaData()),
- self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.drop_table('sometable')"
)
def test_render_drop_table_w_schema(self):
+ op_obj = ops.DropTableOp.from_table(
+ Table("sometable", MetaData(), schema='foo')
+ )
eq_ignore_whitespace(
- autogenerate.render._drop_table(
- Table("sometable", MetaData(), schema='foo'),
- self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.drop_table('sometable', schema='foo')"
)
@@ -647,8 +685,9 @@ unique=False, """
m = MetaData()
t = Table('test', m, Column('x', Boolean()))
+ op_obj = ops.CreateTableOp.from_table(t)
eq_ignore_whitespace(
- autogenerate.render._add_table(t, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_table('test',"
"sa.Column('x', sa.Boolean(), nullable=True))"
)
@@ -658,52 +697,53 @@ unique=False, """
t1 = Table('t1', m, Column('x', Integer))
t2 = Table('t2', m, Column('x', Integer, primary_key=True))
+ op_obj = ops.CreateTableOp.from_table(t1)
eq_ignore_whitespace(
- autogenerate.render._add_table(t1, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_table('t1',"
"sa.Column('x', sa.Integer(), nullable=True))"
)
+ op_obj = ops.CreateTableOp.from_table(t2)
eq_ignore_whitespace(
- autogenerate.render._add_table(t2, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_table('t2',"
"sa.Column('x', sa.Integer(), nullable=False),"
"sa.PrimaryKeyConstraint('x'))"
)
def test_render_add_column(self):
+ op_obj = ops.AddColumnOp(
+ "foo", Column("x", Integer, server_default="5"))
eq_ignore_whitespace(
- autogenerate.render._add_column(
- None, "foo", Column("x", Integer, server_default="5"),
- self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.add_column('foo', sa.Column('x', sa.Integer(), "
"server_default='5', nullable=True))"
)
def test_render_add_column_w_schema(self):
+ op_obj = ops.AddColumnOp(
+ "bar", Column("x", Integer, server_default="5"),
+ schema="foo")
eq_ignore_whitespace(
- autogenerate.render._add_column(
- "foo", "bar", Column("x", Integer, server_default="5"),
- self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.add_column('bar', sa.Column('x', sa.Integer(), "
"server_default='5', nullable=True), schema='foo')"
)
def test_render_drop_column(self):
+ op_obj = ops.DropColumnOp.from_column_and_tablename(
+ None, "foo", Column("x", Integer, server_default="5"))
eq_ignore_whitespace(
- autogenerate.render._drop_column(
- None, "foo", Column("x", Integer, server_default="5"),
- self.autogen_context),
-
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.drop_column('foo', 'x')"
)
def test_render_drop_column_w_schema(self):
+ op_obj = ops.DropColumnOp.from_column_and_tablename(
+ "foo", "bar", Column("x", Integer, server_default="5"))
eq_ignore_whitespace(
- autogenerate.render._drop_column(
- "foo", "bar", Column("x", Integer, server_default="5"),
- self.autogen_context),
-
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.drop_column('bar', 'x', schema='foo')"
)
@@ -783,9 +823,8 @@ unique=False, """
PrimaryKeyConstraint('x'),
ForeignKeyConstraint(['x'], ['y'])
)
- result = autogenerate.render._add_table(
- t, autogen_context
- )
+ op_obj = ops.CreateTableOp.from_table(t)
+ result = autogenerate.render_op_text(autogen_context, op_obj)
eq_ignore_whitespace(
result,
"sa.create_table('t',"
@@ -794,45 +833,50 @@ unique=False, """
)
def test_render_modify_type(self):
+ op_obj = ops.AlterColumnOp(
+ "sometable", "somecolumn",
+ modify_type=CHAR(10), existing_type=CHAR(20)
+ )
eq_ignore_whitespace(
- autogenerate.render._modify_col(
- "sometable", "somecolumn",
- self.autogen_context,
- type_=CHAR(10), existing_type=CHAR(20)),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.alter_column('sometable', 'somecolumn', "
"existing_type=sa.CHAR(length=20), type_=sa.CHAR(length=10))"
)
def test_render_modify_type_w_schema(self):
+ op_obj = ops.AlterColumnOp(
+ "sometable", "somecolumn",
+ modify_type=CHAR(10), existing_type=CHAR(20),
+ schema='foo'
+ )
eq_ignore_whitespace(
- autogenerate.render._modify_col(
- "sometable", "somecolumn",
- self.autogen_context,
- type_=CHAR(10), existing_type=CHAR(20),
- schema='foo'),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.alter_column('sometable', 'somecolumn', "
"existing_type=sa.CHAR(length=20), type_=sa.CHAR(length=10), "
"schema='foo')"
)
def test_render_modify_nullable(self):
+ op_obj = ops.AlterColumnOp(
+ "sometable", "somecolumn",
+ existing_type=Integer(),
+ modify_nullable=True
+ )
eq_ignore_whitespace(
- autogenerate.render._modify_col(
- "sometable", "somecolumn",
- self.autogen_context,
- existing_type=Integer(),
- nullable=True),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.alter_column('sometable', 'somecolumn', "
"existing_type=sa.Integer(), nullable=True)"
)
def test_render_modify_nullable_w_schema(self):
+ op_obj = ops.AlterColumnOp(
+ "sometable", "somecolumn",
+ existing_type=Integer(),
+ modify_nullable=True, schema='foo'
+ )
+
eq_ignore_whitespace(
- autogenerate.render._modify_col(
- "sometable", "somecolumn",
- self.autogen_context,
- existing_type=Integer(),
- nullable=True, schema='foo'),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.alter_column('sometable', 'somecolumn', "
"existing_type=sa.Integer(), nullable=True, schema='foo')"
)
@@ -993,23 +1037,22 @@ unique=False, """
't', m, Column('c', Integer),
schema=compat.ue('\u0411\u0435\u0437')
)
+ op_obj = ops.AddConstraintOp.from_constraint(UniqueConstraint(t.c.c))
eq_ignore_whitespace(
- autogenerate.render._add_unique_constraint(
- UniqueConstraint(t.c.c),
- self.autogen_context
- ),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_unique_constraint(None, 't', ['c'], "
"schema=%r)" % compat.ue('\u0411\u0435\u0437')
)
def test_render_modify_nullable_w_default(self):
+ op_obj = ops.AlterColumnOp(
+ "sometable", "somecolumn",
+ existing_type=Integer(),
+ existing_server_default="5",
+ modify_nullable=True
+ )
eq_ignore_whitespace(
- autogenerate.render._modify_col(
- "sometable", "somecolumn",
- self.autogen_context,
- existing_type=Integer(),
- existing_server_default="5",
- nullable=True),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.alter_column('sometable', 'somecolumn', "
"existing_type=sa.Integer(), nullable=True, "
"existing_server_default='5')"
@@ -1236,13 +1279,14 @@ unique=False, """
)
def test_render_modify_reflected_int_server_default(self):
+ op_obj = ops.AlterColumnOp(
+ "sometable", "somecolumn",
+ existing_type=Integer(),
+ existing_server_default=DefaultClause(text("5")),
+ modify_nullable=True
+ )
eq_ignore_whitespace(
- autogenerate.render._modify_col(
- "sometable", "somecolumn",
- self.autogen_context,
- existing_type=Integer(),
- existing_server_default=DefaultClause(text("5")),
- nullable=True),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.alter_column('sometable', 'somecolumn', "
"existing_type=sa.Integer(), nullable=True, "
"existing_server_default=sa.text(!U'5'))"
@@ -1280,10 +1324,9 @@ class RenderNamingConventionTest(TestBase):
def test_schema_type_boolean(self):
t = Table('t', self.metadata, Column('c', Boolean(name='xyz')))
+ op_obj = ops.AddColumnOp.from_column(t.c.c)
eq_ignore_whitespace(
- autogenerate.render._add_column(
- None, "t", t.c.c,
- self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.add_column('t', "
"sa.Column('c', sa.Boolean(name='xyz'), nullable=True))"
)
@@ -1316,8 +1359,9 @@ class RenderNamingConventionTest(TestBase):
Column('code', String(255)),
)
idx = Index(None, t.c.active, t.c.code)
+ op_obj = ops.CreateIndexOp.from_index(idx)
eq_ignore_whitespace(
- autogenerate.render._add_index(idx, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_index(op.f('ix_ct_test_active'), 'test', "
"['active', 'code'], unique=False)"
)
@@ -1329,8 +1373,9 @@ class RenderNamingConventionTest(TestBase):
Column('code', String(255)),
)
idx = Index(None, t.c.active, t.c.code)
+ op_obj = ops.DropIndexOp.from_index(idx)
eq_ignore_whitespace(
- autogenerate.render._drop_index(idx, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.drop_index(op.f('ix_ct_test_active'), table_name='test')"
)
@@ -1342,8 +1387,9 @@ class RenderNamingConventionTest(TestBase):
schema='CamelSchema'
)
idx = Index(None, t.c.active, t.c.code)
+ op_obj = ops.CreateIndexOp.from_index(idx)
eq_ignore_whitespace(
- autogenerate.render._add_index(idx, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_index(op.f('ix_ct_CamelSchema_test_active'), 'test', "
"['active', 'code'], unique=False, schema='CamelSchema')"
)
@@ -1360,8 +1406,9 @@ class RenderNamingConventionTest(TestBase):
def test_inline_pk_constraint(self):
t = Table('t', self.metadata, Column('c', Integer, primary_key=True))
+ op_obj = ops.CreateTableOp.from_table(t)
eq_ignore_whitespace(
- autogenerate.render._add_table(t, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_table('t',sa.Column('c', sa.Integer(), nullable=False),"
"sa.PrimaryKeyConstraint('c', name=op.f('pk_ct_t')))"
)
@@ -1369,16 +1416,18 @@ class RenderNamingConventionTest(TestBase):
def test_inline_ck_constraint(self):
t = Table(
't', self.metadata, Column('c', Integer), CheckConstraint("c > 5"))
+ op_obj = ops.CreateTableOp.from_table(t)
eq_ignore_whitespace(
- autogenerate.render._add_table(t, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_table('t',sa.Column('c', sa.Integer(), nullable=True),"
"sa.CheckConstraint(!U'c > 5', name=op.f('ck_ct_t')))"
)
def test_inline_fk(self):
t = Table('t', self.metadata, Column('c', Integer, ForeignKey('q.id')))
+ op_obj = ops.CreateTableOp.from_table(t)
eq_ignore_whitespace(
- autogenerate.render._add_table(t, self.autogen_context),
+ autogenerate.render_op_text(self.autogen_context, op_obj),
"op.create_table('t',sa.Column('c', sa.Integer(), nullable=True),"
"sa.ForeignKeyConstraint(['c'], ['q.id'], "
"name=op.f('fk_ct_t_c_q')))"
diff --git a/tests/test_batch.py b/tests/test_batch.py
index a498c36..41d1957 100644
--- a/tests/test_batch.py
+++ b/tests/test_batch.py
@@ -1,15 +1,13 @@
from contextlib import contextmanager
import re
-import io
-
from alembic.testing import exclusions
from alembic.testing import TestBase, eq_, config
from alembic.testing.fixtures import op_fixture
from alembic.testing import mock
from alembic.operations import Operations
-from alembic.batch import ApplyBatchImpl
-from alembic.migration import MigrationContext
+from alembic.operations.batch import ApplyBatchImpl
+from alembic.runtime.migration import MigrationContext
from sqlalchemy import Integer, Table, Column, String, MetaData, ForeignKey, \
@@ -330,7 +328,7 @@ class BatchApplyTest(TestBase):
impl = self._simple_fixture()
col = Column('g', Integer)
# operations.add_column produces a table
- t = self.op._table('tname', col) # noqa
+ t = self.op.schema_obj.table('tname', col) # noqa
impl.add_column('tname', col)
new_table = self._assert_impl(impl, colnames=['id', 'x', 'y', 'g'])
eq_(new_table.c.g.name, 'g')
@@ -420,7 +418,7 @@ class BatchApplyTest(TestBase):
def test_add_fk(self):
impl = self._simple_fixture()
impl.add_column('tname', Column('user_id', Integer))
- fk = self.op._foreign_key_constraint(
+ fk = self.op.schema_obj.foreign_key_constraint(
'fk1', 'tname', 'user',
['user_id'], ['id'])
impl.add_constraint(fk)
@@ -447,7 +445,7 @@ class BatchApplyTest(TestBase):
def test_add_uq(self):
impl = self._simple_fixture()
- uq = self.op._unique_constraint(
+ uq = self.op.schema_obj.unique_constraint(
'uq1', 'tname', ['y']
)
@@ -459,7 +457,7 @@ class BatchApplyTest(TestBase):
def test_drop_uq(self):
impl = self._uq_fixture()
- uq = self.op._unique_constraint(
+ uq = self.op.schema_obj.unique_constraint(
'uq1', 'tname', ['y']
)
impl.drop_constraint(uq)
@@ -469,7 +467,7 @@ class BatchApplyTest(TestBase):
def test_create_index(self):
impl = self._simple_fixture()
- ix = self.op._index('ix1', 'tname', ['y'])
+ ix = self.op.schema_obj.index('ix1', 'tname', ['y'])
impl.create_index(ix)
self._assert_impl(
@@ -479,7 +477,7 @@ class BatchApplyTest(TestBase):
def test_drop_index(self):
impl = self._ix_fixture()
- ix = self.op._index('ix1', 'tname', ['y'])
+ ix = self.op.schema_obj.index('ix1', 'tname', ['y'])
impl.drop_index(ix)
self._assert_impl(
impl, colnames=['id', 'x', 'y'],
@@ -498,12 +496,14 @@ class BatchAPITest(TestBase):
@contextmanager
def _fixture(self, schema=None):
- migration_context = mock.Mock(opts={})
+ migration_context = mock.Mock(
+ opts={}, impl=mock.MagicMock(__dialect__='sqlite'))
op = Operations(migration_context)
batch = op.batch_alter_table(
'tname', recreate='never', schema=schema).__enter__()
- with mock.patch("alembic.operations.sa_schema") as mock_schema:
+ mock_schema = mock.MagicMock()
+ with mock.patch("alembic.operations.schemaobj.sa_schema", mock_schema):
yield batch
batch.impl.flush()
self.mock_schema = mock_schema
diff --git a/tests/test_config.py b/tests/test_config.py
index db37456..da0b413 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -1,9 +1,8 @@
#!coding: utf-8
-import os
-import tempfile
-from alembic import config, util, compat
+from alembic import config, util
+from alembic.util import compat
from alembic.migration import MigrationContext
from alembic.operations import Operations
from alembic.script import ScriptDirectory
diff --git a/tests/test_op.py b/tests/test_op.py
index 7d5f83e..9c14e49 100644
--- a/tests/test_op.py
+++ b/tests/test_op.py
@@ -524,7 +524,8 @@ class OpTest(TestBase):
def test_add_foreign_key_dialect_kw(self):
op_fixture()
with mock.patch(
- "alembic.operations.sa_schema.ForeignKeyConstraint") as fkc:
+ "sqlalchemy.schema.ForeignKeyConstraint"
+ ) as fkc:
op.create_foreign_key('fk_test', 't1', 't2',
['foo', 'bar'], ['bat', 'hoho'],
foobar_arg='xyz')
@@ -808,12 +809,6 @@ class OpTest(TestBase):
op.drop_constraint("f1", "t1", type_="foreignkey")
context.assert_("ALTER TABLE t1 DROP FOREIGN KEY f1")
- assert_raises_message(
- TypeError,
- r"Unknown arguments: badarg\d, badarg\d",
- op.alter_column, "t", "c", badarg1="x", badarg2="y"
- )
-
@config.requirements.fail_before_sqla_084
def test_naming_changes_drop_idx(self):
context = op_fixture('mssql')
@@ -856,4 +851,32 @@ class SQLModeOpTest(TestBase):
context.assert_(
"CREATE TABLE some_table (id INTEGER NOT NULL, st_id INTEGER, "
"PRIMARY KEY (id), FOREIGN KEY(st_id) REFERENCES some_table (id))"
- ) \ No newline at end of file
+ )
+
+
+class CustomOpTest(TestBase):
+ def test_custom_op(self):
+ from alembic.operations import Operations, MigrateOperation
+
+ @Operations.register_operation("create_sequence")
+ class CreateSequenceOp(MigrateOperation):
+ """Create a SEQUENCE."""
+
+ def __init__(self, sequence_name, **kw):
+ self.sequence_name = sequence_name
+ self.kw = kw
+
+ @classmethod
+ def create_sequence(cls, operations, sequence_name, **kw):
+ """Issue a "CREATE SEQUENCE" instruction."""
+
+ op = CreateSequenceOp(sequence_name, **kw)
+ return operations.invoke(op)
+
+ @Operations.implementation_for(CreateSequenceOp)
+ def create_sequence(operations, operation):
+ operations.execute("CREATE SEQUENCE %s" % operation.sequence_name)
+
+ context = op_fixture()
+ op.create_sequence('foob')
+ context.assert_("CREATE SEQUENCE foob")
diff --git a/tests/test_revision.py b/tests/test_revision.py
index d73316d..0a515de 100644
--- a/tests/test_revision.py
+++ b/tests/test_revision.py
@@ -1,6 +1,6 @@
from alembic.testing.fixtures import TestBase
from alembic.testing import eq_, assert_raises_message
-from alembic.revision import RevisionMap, Revision, MultipleHeads, \
+from alembic.script.revision import RevisionMap, Revision, MultipleHeads, \
RevisionError
diff --git a/tests/test_script_consumption.py b/tests/test_script_consumption.py
index 11b8080..c2eef0a 100644
--- a/tests/test_script_consumption.py
+++ b/tests/test_script_consumption.py
@@ -3,7 +3,8 @@
import os
import re
-from alembic import command, util, compat
+from alembic import command, util
+from alembic.util import compat
from alembic.script import ScriptDirectory, Script
from alembic.testing.env import clear_staging_env, staging_env, \
_sqlite_testing_config, write_script, _sqlite_file_db, \
diff --git a/tests/test_script_production.py b/tests/test_script_production.py
index 1f380ab..3ce6200 100644
--- a/tests/test_script_production.py
+++ b/tests/test_script_production.py
@@ -1,15 +1,20 @@
from alembic.testing.fixtures import TestBase
-from alembic.testing import eq_, ne_, is_, assert_raises_message
+from alembic.testing import eq_, ne_, assert_raises_message
from alembic.testing.env import clear_staging_env, staging_env, \
_get_staging_directory, _no_sql_testing_config, env_file_fixture, \
script_file_fixture, _testing_config, _sqlite_testing_config, \
- three_rev_fixture, _multi_dir_testing_config
+ three_rev_fixture, _multi_dir_testing_config, write_script,\
+ _sqlite_file_db
from alembic import command
from alembic.script import ScriptDirectory
from alembic.environment import EnvironmentContext
+from alembic.testing import mock
from alembic import util
+from alembic.operations import ops
import os
import datetime
+import sqlalchemy as sa
+from sqlalchemy.engine.reflection import Inspector
env, abc, def_ = None, None, None
@@ -214,6 +219,174 @@ class RevisionCommandTest(TestBase):
)
+class CustomizeRevisionTest(TestBase):
+ def setUp(self):
+ self.env = staging_env()
+ self.cfg = _multi_dir_testing_config()
+ self.cfg.set_main_option("revision_environment", "true")
+
+ script = ScriptDirectory.from_config(self.cfg)
+ # MARKMARK
+ self.model1 = util.rev_id()
+ self.model2 = util.rev_id()
+ self.model3 = util.rev_id()
+ for model, name in [
+ (self.model1, "model1"),
+ (self.model2, "model2"),
+ (self.model3, "model3"),
+ ]:
+ script.generate_revision(
+ model, name, refresh=True,
+ version_path=os.path.join(_get_staging_directory(), name),
+ head="base")
+
+ write_script(script, model, """\
+"%s"
+revision = '%s'
+down_revision = None
+branch_labels = ['%s']
+
+from alembic import op
+
+def upgrade():
+ pass
+
+def downgrade():
+ pass
+
+""" % (name, model, name))
+
+ def tearDown(self):
+ clear_staging_env()
+
+ def _env_fixture(self, fn, target_metadata):
+ self.engine = engine = _sqlite_file_db()
+
+ def run_env(self):
+ from alembic import context
+
+ with engine.connect() as connection:
+ context.configure(
+ connection=connection,
+ target_metadata=target_metadata,
+ process_revision_directives=fn)
+ with context.begin_transaction():
+ context.run_migrations()
+
+ return mock.patch(
+ "alembic.script.base.ScriptDirectory.run_env",
+ run_env
+ )
+
+ def test_new_locations_no_autogen(self):
+ m = sa.MetaData()
+
+ def process_revision_directives(context, rev, generate_revisions):
+ generate_revisions[:] = [
+ ops.MigrationScript(
+ util.rev_id(),
+ ops.UpgradeOps(),
+ ops.DowngradeOps(),
+ version_path=os.path.join(
+ _get_staging_directory(), "model1"),
+ head="model1@head"
+ ),
+ ops.MigrationScript(
+ util.rev_id(),
+ ops.UpgradeOps(),
+ ops.DowngradeOps(),
+ version_path=os.path.join(
+ _get_staging_directory(), "model2"),
+ head="model2@head"
+ ),
+ ops.MigrationScript(
+ util.rev_id(),
+ ops.UpgradeOps(),
+ ops.DowngradeOps(),
+ version_path=os.path.join(
+ _get_staging_directory(), "model3"),
+ head="model3@head"
+ ),
+ ]
+
+ with self._env_fixture(process_revision_directives, m):
+ revs = command.revision(self.cfg, message="some message")
+
+ script = ScriptDirectory.from_config(self.cfg)
+
+ for rev, model in [
+ (revs[0], "model1"),
+ (revs[1], "model2"),
+ (revs[2], "model3"),
+ ]:
+ rev_script = script.get_revision(rev.revision)
+ eq_(
+ rev_script.path,
+ os.path.abspath(os.path.join(
+ _get_staging_directory(), model,
+ "%s_.py" % (rev_script.revision, )
+ ))
+ )
+ assert os.path.exists(rev_script.path)
+
+ def test_autogen(self):
+ m = sa.MetaData()
+ sa.Table('t', m, sa.Column('x', sa.Integer))
+
+ def process_revision_directives(context, rev, generate_revisions):
+ existing_upgrades = generate_revisions[0].upgrade_ops
+ existing_downgrades = generate_revisions[0].downgrade_ops
+
+ # model1 will run the upgrades, e.g. create the table,
+ # model2 will run the downgrades as upgrades, e.g. drop
+ # the table again
+
+ generate_revisions[:] = [
+ ops.MigrationScript(
+ util.rev_id(),
+ existing_upgrades,
+ ops.DowngradeOps(),
+ version_path=os.path.join(
+ _get_staging_directory(), "model1"),
+ head="model1@head"
+ ),
+ ops.MigrationScript(
+ util.rev_id(),
+ existing_downgrades,
+ ops.DowngradeOps(),
+ version_path=os.path.join(
+ _get_staging_directory(), "model2"),
+ head="model2@head"
+ )
+ ]
+
+ with self._env_fixture(process_revision_directives, m):
+ command.upgrade(self.cfg, "heads")
+
+ eq_(
+ Inspector.from_engine(self.engine).get_table_names(),
+ ["alembic_version"]
+ )
+
+ command.revision(
+ self.cfg, message="some message",
+ autogenerate=True)
+
+ command.upgrade(self.cfg, "model1@head")
+
+ eq_(
+ Inspector.from_engine(self.engine).get_table_names(),
+ ["alembic_version", "t"]
+ )
+
+ command.upgrade(self.cfg, "model2@head")
+
+ eq_(
+ Inspector.from_engine(self.engine).get_table_names(),
+ ["alembic_version"]
+ )
+
+
class MultiDirRevisionCommandTest(TestBase):
def setUp(self):
self.env = staging_env()