summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--CHANGES17
-rw-r--r--alembic/__init__.py8
-rw-r--r--alembic/autogenerate.py44
-rw-r--r--alembic/command.py32
-rw-r--r--alembic/context.py634
-rw-r--r--alembic/environment.py448
-rw-r--r--alembic/migration.py191
-rw-r--r--alembic/op.py603
-rw-r--r--alembic/operations.py616
-rw-r--r--tests/__init__.py15
-rw-r--r--tests/test_versioning.py18
11 files changed, 1334 insertions, 1292 deletions
diff --git a/CHANGES b/CHANGES
index 05a8d86..07eb753 100644
--- a/CHANGES
+++ b/CHANGES
@@ -1,3 +1,20 @@
+0.2.0
+=====
+- [feature] API rearrangement allows everything
+ Alembic does to be represented by contextual
+ objects, including EnvironmentContext,
+ MigrationContext, and Operations. Other
+ libraries and applications can now use
+ things like "alembic.op" without relying
+ upon global configuration variables.
+ The rearrangement was done such that
+ existing migrations should be OK,
+ as long as they use the pattern
+ of "from alembic import context" and
+ "from alembic import op", as these
+ are now contextual objects, not modules.
+ [#19]
+
0.1.2
=====
- [bug] fix the config.main() function to honor
diff --git a/alembic/__init__.py b/alembic/__init__.py
index 18984e3..e561d0d 100644
--- a/alembic/__init__.py
+++ b/alembic/__init__.py
@@ -1,6 +1,12 @@
from os import path
-__version__ = '0.1.2'
+__version__ = '0.2.0'
package_dir = path.abspath(path.dirname(__file__))
+
+class _OpProxy(object):
+ _proxy = None
+ def __getattr__(self, key):
+ return getattr(self._proxy, key)
+op = _OpProxy()
diff --git a/alembic/autogenerate.py b/alembic/autogenerate.py
index 193882f..728d1e2 100644
--- a/alembic/autogenerate.py
+++ b/alembic/autogenerate.py
@@ -1,8 +1,6 @@
"""Provide the 'autogenerate' feature which can produce migration operations
automatically."""
-from alembic.context import _context_opts, get_bind, get_context
-from alembic import context
from alembic import util
from sqlalchemy.engine.reflection import Inspector
from sqlalchemy import schema, types as sqltypes
@@ -14,8 +12,9 @@ log = logging.getLogger(__name__)
###################################################
# top level
-def produce_migration_diffs(template_args, imports):
- metadata = _context_opts['target_metadata']
+
+def produce_migration_diffs(context, opts, template_args, imports):
+ metadata = opts['target_metadata']
if metadata is None:
raise util.CommandError(
"Can't proceed with --autogenerate option; environment "
@@ -29,12 +28,13 @@ def produce_migration_diffs(template_args, imports):
'imports':imports,
'connection':connection,
'dialect':connection.dialect,
- 'context':get_context()
+ 'context':context,
+ 'opts':opts
}
_produce_net_changes(connection, metadata, diffs, autogen_context)
- template_args[_context_opts['upgrade_token']] = \
+ template_args[opts['upgrade_token']] = \
_indent(_produce_upgrade_commands(diffs, autogen_context))
- template_args[_context_opts['downgrade_token']] = \
+ template_args[opts['downgrade_token']] = \
_indent(_produce_downgrade_commands(diffs, autogen_context))
template_args['imports'] = "\n".join(sorted(imports))
@@ -304,7 +304,7 @@ def _invoke_modify_command(updown, args, autogen_context):
def _add_table(table, autogen_context):
return "%(prefix)screate_table(%(tablename)r,\n%(args)s\n)" % {
'tablename':table.name,
- 'prefix':_alembic_autogenerate_prefix(),
+ 'prefix':_alembic_autogenerate_prefix(autogen_context),
'args':',\n'.join(
[_render_column(col, autogen_context) for col in table.c] +
sorted([rcons for rcons in
@@ -317,20 +317,20 @@ def _add_table(table, autogen_context):
def _drop_table(table, autogen_context):
return "%(prefix)sdrop_table(%(tname)r)" % {
- "prefix":_alembic_autogenerate_prefix(),
+ "prefix":_alembic_autogenerate_prefix(autogen_context),
"tname":table.name
}
def _add_column(tname, column, autogen_context):
return "%(prefix)sadd_column(%(tname)r, %(column)s)" % {
- "prefix":_alembic_autogenerate_prefix(),
+ "prefix":_alembic_autogenerate_prefix(autogen_context),
"tname":tname,
"column":_render_column(column, autogen_context)
}
def _drop_column(tname, column, autogen_context):
return "%(prefix)sdrop_column(%(tname)r, %(cname)r)" % {
- "prefix":_alembic_autogenerate_prefix(),
+ "prefix":_alembic_autogenerate_prefix(autogen_context),
"tname":tname,
"cname":column.name
}
@@ -343,10 +343,10 @@ def _modify_col(tname, cname,
existing_type=None,
existing_nullable=None,
existing_server_default=False):
- sqla_prefix = _sqlalchemy_autogenerate_prefix()
+ sqla_prefix = _sqlalchemy_autogenerate_prefix(autogen_context)
indent = " " * 11
text = "%(prefix)salter_column(%(tname)r, %(cname)r" % {
- 'prefix':_alembic_autogenerate_prefix(),
+ 'prefix':_alembic_autogenerate_prefix(autogen_context),
'tname':tname,
'cname':cname}
text += ", \n%sexisting_type=%s" % (indent,
@@ -372,11 +372,11 @@ def _modify_col(tname, cname,
text += ")"
return text
-def _sqlalchemy_autogenerate_prefix():
- return _context_opts['sqlalchemy_module_prefix'] or ''
+def _sqlalchemy_autogenerate_prefix(autogen_context):
+ return autogen_context['opts']['sqlalchemy_module_prefix'] or ''
-def _alembic_autogenerate_prefix():
- return _context_opts['alembic_module_prefix'] or ''
+def _alembic_autogenerate_prefix(autogen_context):
+ return autogen_context['opts']['alembic_module_prefix'] or ''
def _render_column(column, autogen_context):
opts = []
@@ -388,9 +388,9 @@ def _render_column(column, autogen_context):
# TODO: for non-ascii colname, assign a "key"
return "%(prefix)sColumn(%(name)r, %(type)s, %(kw)s)" % {
- 'prefix':_sqlalchemy_autogenerate_prefix(),
+ 'prefix':_sqlalchemy_autogenerate_prefix(autogen_context),
'name':column.name,
- 'type':_repr_type(_sqlalchemy_autogenerate_prefix(), column.type, autogen_context),
+ 'type':_repr_type(_sqlalchemy_autogenerate_prefix(autogen_context), column.type, autogen_context),
'kw':", ".join(["%s=%s" % (kwname, val) for kwname, val in opts])
}
@@ -432,7 +432,7 @@ def _render_primary_key(constraint):
if constraint.name:
opts.append(("name", repr(constraint.name)))
return "%(prefix)sPrimaryKeyConstraint(%(args)s)" % {
- "prefix":_sqlalchemy_autogenerate_prefix(),
+ "prefix":_sqlalchemy_autogenerate_prefix(autogen_context),
"args":", ".join(
[repr(c.key) for c in constraint.columns] +
["%s=%s" % (kwname, val) for kwname, val in opts]
@@ -445,7 +445,7 @@ def _render_foreign_key(constraint):
opts.append(("name", repr(constraint.name)))
# TODO: deferrable, initially, etc.
return "%(prefix)sForeignKeyConstraint([%(cols)s], [%(refcols)s], %(args)s)" % {
- "prefix":_sqlalchemy_autogenerate_prefix(),
+ "prefix":_sqlalchemy_autogenerate_prefix(autogen_context),
"cols":", ".join("'%s'" % f.parent.key for f in constraint.elements),
"refcols":", ".join(repr(f._get_colspec()) for f in constraint.elements),
"args":", ".join(
@@ -458,7 +458,7 @@ def _render_check_constraint(constraint):
if constraint.name:
opts.append(("name", repr(constraint.name)))
return "%(prefix)sCheckConstraint('TODO')" % {
- "prefix":_sqlalchemy_autogenerate_prefix()
+ "prefix":_sqlalchemy_autogenerate_prefix(autogen_context)
}
_constraint_renderers = {
diff --git a/alembic/command.py b/alembic/command.py
index f7075b1..b743f3b 100644
--- a/alembic/command.py
+++ b/alembic/command.py
@@ -1,5 +1,5 @@
from alembic.script import ScriptDirectory
-from alembic import util, ddl, context, autogenerate as autogen
+from alembic import util, ddl, autogenerate as autogen, environment
import os
import functools
@@ -73,12 +73,12 @@ def revision(config, message=None, autogenerate=False):
autogen.produce_migration_diffs(template_args, imports)
return []
- context._opts(
+ with environment.configure(
config,
script,
fn = retrieve_migrations
- )
- script.run_env()
+ ):
+ script.run_env()
script.generate_rev(util.rev_id(), message, **template_args)
@@ -92,7 +92,7 @@ def upgrade(config, revision, sql=False, tag=None):
if not sql:
raise util.CommandError("Range revision not allowed")
starting_rev, revision = revision.split(':', 2)
- context._opts(
+ with environment.configure(
config,
script,
fn = functools.partial(script.upgrade_from, revision),
@@ -100,8 +100,8 @@ def upgrade(config, revision, sql=False, tag=None):
starting_rev = starting_rev,
destination_rev = revision,
tag = tag
- )
- script.run_env()
+ ):
+ script.run_env()
def downgrade(config, revision, sql=False, tag=None):
"""Revert to a previous version."""
@@ -114,7 +114,7 @@ def downgrade(config, revision, sql=False, tag=None):
raise util.CommandError("Range revision not allowed")
starting_rev, revision = revision.split(':', 2)
- context._opts(
+ with environment.configure(
config,
script,
fn = functools.partial(script.downgrade_to, revision),
@@ -122,8 +122,8 @@ def downgrade(config, revision, sql=False, tag=None):
starting_rev = starting_rev,
destination_rev = revision,
tag = tag
- )
- script.run_env()
+ ):
+ script.run_env()
def history(config):
"""List changeset scripts in chronological order."""
@@ -157,12 +157,12 @@ def current(config):
script._get_rev(rev))
return []
- context._opts(
+ with environment.configure(
config,
script,
fn = display_version
- )
- script.run_env()
+ ):
+ script.run_env()
def stamp(config, revision, sql=False, tag=None):
"""'stamp' the revision table with the given revision; don't
@@ -179,15 +179,15 @@ def stamp(config, revision, sql=False, tag=None):
dest = dest.revision
context.get_context()._update_current_rev(current, dest)
return []
- context._opts(
+ with environment.configure(
config,
script,
fn = do_stamp,
as_sql = sql,
destination_rev = revision,
tag = tag
- )
- script.run_env()
+ ):
+ script.run_env()
def splice(config, parent, child):
"""'splice' two branches, creating a new revision file.
diff --git a/alembic/context.py b/alembic/context.py
deleted file mode 100644
index 130ce3a..0000000
--- a/alembic/context.py
+++ /dev/null
@@ -1,634 +0,0 @@
-from alembic import util
-from sqlalchemy import MetaData, Table, Column, String, literal_column, \
- text
-from sqlalchemy import create_engine
-from sqlalchemy.engine import url as sqla_url
-from alembic import ddl
-import sys
-from contextlib import contextmanager
-
-import logging
-log = logging.getLogger(__name__)
-
-_meta = MetaData()
-_version = Table('alembic_version', _meta,
- Column('version_num', String(32), nullable=False)
- )
-
-class Context(object):
- """Maintains state throughout the migration running process.
-
- Mediates the relationship between an ``env.py`` environment script,
- a :class:`.ScriptDirectory` instance, and a :class:`.DefaultImpl` instance.
-
- The :class:`.Context` is available directly via the :func:`.get_context` function,
- though usually it is referenced behind the scenes by the various module level functions
- within the :mod:`alembic.context` module.
-
- """
- def __init__(self, dialect, script, connection, fn,
- as_sql=False,
- output_buffer=None,
- transactional_ddl=None,
- starting_rev=None,
- compare_type=False,
- compare_server_default=False):
- self.dialect = dialect
- self.script = script
- if as_sql:
- self.connection = self._stdout_connection(connection)
- assert self.connection is not None
- else:
- self.connection = connection
- self._migrations_fn = fn
- self.as_sql = as_sql
- self.output_buffer = output_buffer if output_buffer else sys.stdout
-
- self._user_compare_type = compare_type
- self._user_compare_server_default = compare_server_default
-
- self._start_from_rev = starting_rev
- self.impl = ddl.DefaultImpl.get_by_dialect(dialect)(
- dialect, self.connection, self.as_sql,
- transactional_ddl,
- self.output_buffer,
- _context_opts
- )
- log.info("Context impl %s.", self.impl.__class__.__name__)
- if self.as_sql:
- log.info("Generating static SQL")
- log.info("Will assume %s DDL.",
- "transactional" if self.impl.transactional_ddl
- else "non-transactional")
-
- def _current_rev(self):
- if self.as_sql:
- return self._start_from_rev
- else:
- if self._start_from_rev:
- raise util.CommandError(
- "Can't specify current_rev to context "
- "when using a database connection")
- _version.create(self.connection, checkfirst=True)
- return self.connection.scalar(_version.select())
-
- def _update_current_rev(self, old, new):
- if old == new:
- return
- if new is None:
- self.impl._exec(_version.delete())
- elif old is None:
- self.impl._exec(_version.insert().
- values(version_num=literal_column("'%s'" % new))
- )
- else:
- self.impl._exec(_version.update().
- values(version_num=literal_column("'%s'" % new))
- )
-
- def run_migrations(self, **kw):
-
- current_rev = rev = False
- self.impl.start_migrations()
- for change, prev_rev, rev in self._migrations_fn(
- self._current_rev()):
- if current_rev is False:
- current_rev = prev_rev
- if self.as_sql and not current_rev:
- _version.create(self.connection)
- log.info("Running %s %s -> %s", change.__name__, prev_rev, rev)
- if self.as_sql:
- self.impl.static_output(
- "-- Running %s %s -> %s" %
- (change.__name__, prev_rev, rev)
- )
- change(**kw)
- if not self.impl.transactional_ddl:
- self._update_current_rev(prev_rev, rev)
- prev_rev = rev
-
- if rev is not False:
- if self.impl.transactional_ddl:
- self._update_current_rev(current_rev, rev)
-
- if self.as_sql and not rev:
- _version.drop(self.connection)
-
- def execute(self, sql):
- self.impl._exec(sql)
-
- def _stdout_connection(self, connection):
- def dump(construct, *multiparams, **params):
- self.impl._exec(construct)
-
- return create_engine("%s://" % self.dialect.name,
- strategy="mock", executor=dump)
-
- @property
- def bind(self):
- """Return the current "bind".
-
- In online mode, this is an instance of
- :class:`sqlalchemy.engine.base.Connection`, and is suitable
- for ad-hoc execution of any kind of usage described
- in :ref:`sqlexpression_toplevel` as well as
- for usage with the :meth:`sqlalchemy.schema.Table.create`
- and :meth:`sqlalchemy.schema.MetaData.create_all` methods
- of :class:`.Table`, :class:`.MetaData`.
-
- Note that when "standard output" mode is enabled,
- this bind will be a "mock" connection handler that cannot
- return results and is only appropriate for a very limited
- subset of commands.
-
- """
- return self.connection
-
- def compare_type(self, inspector_column, metadata_column):
- if self._user_compare_type is False:
- return False
-
- if callable(self._user_compare_type):
- user_value = self._user_compare_type(
- self,
- inspector_column,
- metadata_column,
- inspector_column['type'],
- metadata_column.type
- )
- if user_value is not None:
- return user_value
-
- return self.impl.compare_type(
- inspector_column,
- metadata_column)
-
- def compare_server_default(self, inspector_column,
- metadata_column,
- rendered_metadata_default):
-
- if self._user_compare_server_default is False:
- return False
-
- if callable(self._user_compare_server_default):
- user_value = self._user_compare_server_default(
- self,
- inspector_column,
- metadata_column,
- inspector_column['default'],
- metadata_column.server_default,
- rendered_metadata_default
- )
- if user_value is not None:
- return user_value
-
- return self.impl.compare_server_default(
- inspector_column,
- metadata_column,
- rendered_metadata_default)
-
-config = None
-"""The current :class:`.Config` object.
-
-This is the gateway to the ``alembic.ini`` or other
-.ini file in use for the current command.
-
-This function does not require that the :class:`.Context`
-has been configured.
-
-"""
-
-_context_opts = {}
-_context = None
-_script = None
-
-def _opts(cfg, script, **kw):
- """Set up options that will be used by the :func:`.configure`
- function.
-
- This basically sets some global variables.
-
- """
- global config, _script
- _context_opts.update(kw)
- _script = script
- config = cfg
-
-def _clear():
- global _context_opts, _context, _script
- _context = _script = None
- _context_opts = {}
-
-def is_offline_mode():
- """Return True if the current migrations environment
- is running in "offline mode".
-
- This is ``True`` or ``False`` depending
- on the the ``--sql`` flag passed.
-
- This function does not require that the :class:`.Context`
- has been configured.
-
- """
- return _context_opts.get('as_sql', False)
-
-def is_transactional_ddl():
- """Return True if the context is configured to expect a
- transactional DDL capable backend.
-
- This defaults to the type of database in use, and
- can be overridden by the ``transactional_ddl`` argument
- to :func:`.configure`
-
- This function requires that a :class:`.Context` has first been
- made available via :func:`.configure`.
-
- """
- return get_context().impl.transactional_ddl
-
-def requires_connection():
- return not is_offline_mode()
-
-def get_head_revision():
- """Return the hex identifier of the 'head' revision.
-
- This function does not require that the :class:`.Context`
- has been configured.
-
- """
- return _script._as_rev_number("head")
-
-def get_starting_revision_argument():
- """Return the 'starting revision' argument,
- if the revision was passed using ``start:end``.
-
- This is only meaningful in "offline" mode.
- Returns ``None`` if no value is available
- or was configured.
-
- This function does not require that the :class:`.Context`
- has been configured.
-
- """
- if _context is not None:
- return _script._as_rev_number(get_context()._start_from_rev)
- elif 'starting_rev' in _context_opts:
- return _script._as_rev_number(_context_opts['starting_rev'])
- else:
- raise util.CommandError("No starting revision argument is available.")
-
-def get_revision_argument():
- """Get the 'destination' revision argument.
-
- This is typically the argument passed to the
- ``upgrade`` or ``downgrade`` command.
-
- If it was specified as ``head``, the actual
- version number is returned; if specified
- as ``base``, ``None`` is returned.
-
- This function does not require that the :class:`.Context`
- has been configured.
-
- """
- return _script._as_rev_number(_context_opts['destination_rev'])
-
-def get_tag_argument():
- """Return the value passed for the ``--tag`` argument, if any.
-
- The ``--tag`` argument is not used directly by Alembic,
- but is available for custom ``env.py`` configurations that
- wish to use it; particularly for offline generation scripts
- that wish to generate tagged filenames.
-
- This function does not require that the :class:`.Context`
- has been configured.
-
- """
- return _context_opts.get('tag', None)
-
-def configure(
- connection=None,
- url=None,
- dialect_name=None,
- transactional_ddl=None,
- output_buffer=None,
- starting_rev=None,
- tag=None,
- target_metadata=None,
- compare_type=False,
- compare_server_default=False,
- upgrade_token="upgrades",
- downgrade_token="downgrades",
- alembic_module_prefix="op.",
- sqlalchemy_module_prefix="sa.",
- **kw
- ):
- """Configure the migration environment.
-
- The important thing needed here is first a way to figure out
- what kind of "dialect" is in use. The second is to pass
- an actual database connection, if one is required.
-
- If the :func:`.is_offline_mode` function returns ``True``,
- then no connection is needed here. Otherwise, the
- ``connection`` parameter should be present as an
- instance of :class:`sqlalchemy.engine.base.Connection`.
-
- This function is typically called from the ``env.py``
- script within a migration environment. It can be called
- multiple times for an invocation. The most recent :class:`~sqlalchemy.engine.base.Connection`
- for which it was called is the one that will be operated upon
- by the next call to :func:`.run_migrations`.
-
- General parameters:
-
- :param connection: a :class:`~sqlalchemy.engine.base.Connection` to use
- for SQL execution in "online" mode. When present, is also used to
- determine the type of dialect in use.
- :param url: a string database url, or a :class:`sqlalchemy.engine.url.URL` object.
- The type of dialect to be used will be derived from this if ``connection`` is
- not passed.
- :param dialect_name: string name of a dialect, such as "postgresql", "mssql", etc.
- The type of dialect to be used will be derived from this if ``connection``
- and ``url`` are not passed.
- :param transactional_ddl: Force the usage of "transactional" DDL on or off;
- this otherwise defaults to whether or not the dialect in use supports it.
- :param output_buffer: a file-like object that will be used for textual output
- when the ``--sql`` option is used to generate SQL scripts. Defaults to
- ``sys.stdout`` if not passed here and also not present on the :class:`.Config`
- object. The value here overrides that of the :class:`.Config` object.
- :param starting_rev: Override the "starting revision" argument when using
- ``--sql`` mode.
- :param tag: a string tag for usage by custom ``env.py`` scripts. Set via
- the ``--tag`` option, can be overridden here.
-
- Parameters specific to the autogenerate feature, when ``alembic revision``
- is run with the ``--autogenerate`` feature:
-
- :param target_metadata: a :class:`sqlalchemy.schema.MetaData` object that
- will be consulted during autogeneration. The tables present will be compared against
- what is locally available on the target :class:`~sqlalchemy.engine.base.Connection`
- to produce candidate upgrade/downgrade operations.
-
- :param compare_type: Indicates type comparison behavior during an autogenerate
- operation. Defaults to ``False`` which disables type comparison. Set to
- ``True`` to turn on default type comparison, which has varied accuracy depending
- on backend.
-
- To customize type comparison behavior, a callable may be specified which
- can filter type comparisons during an autogenerate operation. The format of
- this callable is::
-
- def my_compare_type(context, inspected_column,
- metadata_column, inspected_type, metadata_type):
- # return True if the types are different,
- # False if not, or None to allow the default implementation
- # to compare these types
- pass
-
- ``inspected_column`` is a dictionary structure as returned by
- :meth:`sqlalchemy.engine.reflection.Inspector.get_columns`, whereas
- ``metadata_column`` is a :class:`sqlalchemy.schema.Column` from
- the local model environment.
-
- A return value of ``None`` indicates to allow default type comparison to
- proceed.
-
- :param compare_server_default: Indicates server default comparison behavior during
- an autogenerate operation. Defaults to ``False`` which disables server default
- comparison. Set to ``True`` to turn on server default comparison, which has
- varied accuracy depending on backend.
-
- To customize server default comparison behavior, a callable may be specified
- which can filter server default comparisons during an autogenerate operation.
- defaults during an autogenerate operation. The format of this callable is::
-
- def my_compare_server_default(context, inspected_column,
- metadata_column, inspected_default, metadata_default,
- rendered_metadata_default):
- # return True if the defaults are different,
- # False if not, or None to allow the default implementation
- # to compare these defaults
- pass
-
- ``inspected_column`` is a dictionary structure as returned by
- :meth:`sqlalchemy.engine.reflection.Inspector.get_columns`, whereas
- ``metadata_column`` is a :class:`sqlalchemy.schema.Column` from
- the local model environment.
-
- A return value of ``None`` indicates to allow default server default comparison
- to proceed. Note that some backends such as Postgresql actually execute
- the two defaults on the database side to compare for equivalence.
-
- :param upgrade_token: When autogenerate completes, the text of the
- candidate upgrade operations will be present in this template
- variable when ``script.py.mako`` is rendered. Defaults to ``upgrades``.
- :param downgrade_token: When autogenerate completes, the text of the
- candidate downgrade operations will be present in this
- template variable when ``script.py.mako`` is rendered. Defaults to
- ``downgrades``.
-
- :param alembic_module_prefix: When autogenerate refers to Alembic
- :mod:`alembic.op` constructs, this prefix will be used
- (i.e. ``op.create_table``) Defaults to "``op.``".
- Can be ``None`` to indicate no prefix.
-
- :param sqlalchemy_module_prefix: When autogenerate refers to SQLAlchemy
- :class:`~sqlalchemy.schema.Column` or type classes, this prefix will be used
- (i.e. ``sa.Column("somename", sa.Integer)``) Defaults to "``sa.``".
- Can be ``None`` to indicate no prefix.
- Note that when dialect-specific types are rendered, autogenerate
- will render them using the dialect module name, i.e. ``mssql.BIT()``,
- ``postgresql.UUID()``.
-
- Parameters specific to individual backends:
-
- :param mssql_batch_separator: The "batch separator" which will be placed
- between each statement when generating offline SQL Server
- migrations. Defaults to ``GO``. Note this is in addition to the customary
- semicolon ``;`` at the end of each statement; SQL Server considers
- the "batch separator" to denote the end of an individual statement
- execution, and cannot group certain dependent operations in
- one step.
-
- """
-
- if connection:
- dialect = connection.dialect
- elif url:
- url = sqla_url.make_url(url)
- dialect = url.get_dialect()()
- elif dialect_name:
- url = sqla_url.make_url("%s://" % dialect_name)
- dialect = url.get_dialect()()
- else:
- raise Exception("Connection, url, or dialect_name is required.")
-
- global _context
- opts = _context_opts
- if transactional_ddl is not None:
- opts["transactional_ddl"] = transactional_ddl
- if output_buffer is not None:
- opts["output_buffer"] = output_buffer
- elif config.output_buffer is not None:
- opts["output_buffer"] = config.output_buffer
- if starting_rev:
- opts['starting_rev'] = starting_rev
- if tag:
- opts['tag'] = tag
- opts['target_metadata'] = target_metadata
- opts['upgrade_token'] = upgrade_token
- opts['downgrade_token'] = downgrade_token
- opts['sqlalchemy_module_prefix'] = sqlalchemy_module_prefix
- opts['alembic_module_prefix'] = alembic_module_prefix
- opts.update(kw)
-
- _context = Context(
- dialect, _script, connection,
- opts['fn'],
- as_sql=opts.get('as_sql', False),
- output_buffer=opts.get("output_buffer"),
- transactional_ddl=opts.get("transactional_ddl"),
- starting_rev=opts.get("starting_rev"),
- compare_type=compare_type,
- compare_server_default=compare_server_default,
- )
-
-def configure_connection(connection):
- """Deprecated; use :func:`alembic.context.configure`."""
- configure(connection=connection)
-
-def run_migrations(**kw):
- """Run migrations as determined by the current command line configuration
- as well as versioning information present (or not) in the current
- database connection (if one is present).
-
- The function accepts optional ``**kw`` arguments. If these are
- passed, they are sent directly to the ``upgrade()`` and ``downgrade()``
- functions within each target revision file. By modifying the
- ``script.py.mako`` file so that the ``upgrade()`` and ``downgrade()``
- functions accept arguments, parameters can be passed here so that
- contextual information, usually information to identify a particular
- database in use, can be passed from a custom ``env.py`` script
- to the migration functions.
-
- This function requires that a :class:`.Context` has first been
- made available via :func:`.configure`.
-
- """
- get_context().run_migrations(**kw)
-
-def execute(sql):
- """Execute the given SQL using the current change context.
-
- The behavior of :func:`.context.execute` is the same
- as that of :func:`.op.execute`. Please see that
- function's documentation for full detail including
- caveats and limitations.
-
- This function requires that a :class:`.Context` has first been
- made available via :func:`.configure`.
-
- """
- get_context().execute(sql)
-
-def static_output(text):
- """Emit text directly to the "offline" SQL stream.
-
- Typically this is for emitting comments that
- start with --. The statement is not treated
- as a SQL execution, no ; or batch separator
- is added, etc.
-
- """
- get_context().impl.static_output(text)
-
-def begin_transaction():
- """Return a context manager that will
- enclose an operation within a "transaction",
- as defined by the environment's offline
- and transactional DDL settings.
-
- e.g.::
-
- with context.begin_transaction():
- context.run_migrations()
-
- :func:`.begin_transaction` is intended to
- "do the right thing" regardless of
- calling context:
-
- * If :func:`.is_transactional_ddl` is ``False``,
- returns a "do nothing" context manager
- which otherwise produces no transactional
- state or directives.
- * If :func:`.is_offline_mode` is ``True``,
- returns a context manager that will
- invoke the :meth:`.DefaultImpl.emit_begin`
- and :meth:`.DefaultImpl.emit_commit`
- methods, which will produce the string
- directives ``BEGIN`` and ``COMMIT`` on
- the output stream, as rendered by the
- target backend (e.g. SQL Server would
- emit ``BEGIN TRANSACTION``).
- * Otherwise, calls :meth:`sqlalchemy.engine.base.Connection.begin`
- on the current online connection, which
- returns a :class:`sqlalchemy.engine.base.Transaction`
- object. This object demarcates a real
- transaction and is itself a context manager,
- which will roll back if an exception
- is raised.
-
- Note that a custom ``env.py`` script which
- has more specific transactional needs can of course
- manipulate the :class:`~sqlalchemy.engine.base.Connection`
- directly to produce transactional state in "online"
- mode.
-
- """
- if not is_transactional_ddl():
- @contextmanager
- def do_nothing():
- yield
- return do_nothing()
- elif is_offline_mode():
- @contextmanager
- def begin_commit():
- get_context().impl.emit_begin()
- yield
- get_context().impl.emit_commit()
- return begin_commit()
- else:
- return get_bind().begin()
-
-
-def get_context():
- """Return the current :class:`.Context` object.
-
- If :func:`.configure` has not been called yet, raises
- an exception.
-
- Generally, env.py scripts should access the module-level functions
- in :mod:`alebmic.context` to get at this object's functionality.
-
- """
- if _context is None:
- raise Exception("No context has been configured yet.")
- return _context
-
-def get_bind():
- """Return the current 'bind'.
-
- In "online" mode, this is the
- :class:`sqlalchemy.engine.Connection` currently being used
- to emit SQL to the database.
-
- This function requires that a :class:`.Context` has first been
- made available via :func:`.configure`.
-
- """
- return get_context().bind
-
-def get_impl():
- return get_context().impl \ No newline at end of file
diff --git a/alembic/environment.py b/alembic/environment.py
new file mode 100644
index 0000000..1251879
--- /dev/null
+++ b/alembic/environment.py
@@ -0,0 +1,448 @@
+import alembic
+from alembic.operations import Operations
+from alembic.migration import MigrationContext
+from alembic import util
+from sqlalchemy.engine import url as sqla_url
+
+class EnvironmentContext(object):
+ """Represent the state made available to an env.py script."""
+
+ _migration_context = None
+
+ def __init__(self, config, script, **kw):
+ self.config = config
+ self.script = script
+ self.context_opts = kw
+
+ def __enter__(self):
+ """Establish a context which provides a
+ :class:`.EnvironmentContext` object to
+ env.py scripts.
+
+ The :class:`.EnvironmentContext` will
+ be made available as ``from alembic import context``.
+
+ """
+ alembic.context = self
+ return self
+
+ def __exit__(self, *arg, **kw):
+ del alembic.context
+ alembic.op._proxy = None
+
+ def is_offline_mode(self):
+ """Return True if the current migrations environment
+ is running in "offline mode".
+
+ This is ``True`` or ``False`` depending
+ on the the ``--sql`` flag passed.
+
+ This function does not require that the :class:`.MigrationContext`
+ has been configured.
+
+ """
+ return self.context_opts.get('as_sql', False)
+
+ def is_transactional_ddl(self):
+ """Return True if the context is configured to expect a
+ transactional DDL capable backend.
+
+ This defaults to the type of database in use, and
+ can be overridden by the ``transactional_ddl`` argument
+ to :meth:`.configure`
+
+ This function requires that a :class:`.MigrationContext` has first been
+ made available via :meth:`.configure`.
+
+ """
+ return self.migration_context.impl.transactional_ddl
+
+ def requires_connection(self):
+ return not self.is_offline_mode()
+
+ def get_head_revision(self):
+ """Return the hex identifier of the 'head' revision.
+
+ This function does not require that the :class:`.MigrationContext`
+ has been configured.
+
+ """
+ return self.script._as_rev_number("head")
+
+ def get_starting_revision_argument(self):
+ """Return the 'starting revision' argument,
+ if the revision was passed using ``start:end``.
+
+ This is only meaningful in "offline" mode.
+ Returns ``None`` if no value is available
+ or was configured.
+
+ This function does not require that the :class:`.MigrationContext`
+ has been configured.
+
+ """
+ if self._migration_context is not None:
+ return self.script._as_rev_number(self.migration_context._start_from_rev)
+ elif 'starting_rev' in self.context_opts:
+ return self.script._as_rev_number(self.context_opts['starting_rev'])
+ else:
+ raise util.CommandError("No starting revision argument is available.")
+
+ def get_revision_argument(self):
+ """Get the 'destination' revision argument.
+
+ This is typically the argument passed to the
+ ``upgrade`` or ``downgrade`` command.
+
+ If it was specified as ``head``, the actual
+ version number is returned; if specified
+ as ``base``, ``None`` is returned.
+
+ This function does not require that the :class:`.MigrationContext`
+ has been configured.
+
+ """
+ return self.script._as_rev_number(self.context_opts['destination_rev'])
+
+ def get_tag_argument(self):
+ """Return the value passed for the ``--tag`` argument, if any.
+
+ The ``--tag`` argument is not used directly by Alembic,
+ but is available for custom ``env.py`` configurations that
+ wish to use it; particularly for offline generation scripts
+ that wish to generate tagged filenames.
+
+ This function does not require that the :class:`.MigrationContext`
+ has been configured.
+
+ """
+ return self.context_opts.get('tag', None)
+
+ def configure(self,
+ connection=None,
+ url=None,
+ dialect_name=None,
+ transactional_ddl=None,
+ output_buffer=None,
+ starting_rev=None,
+ tag=None,
+ target_metadata=None,
+ compare_type=False,
+ compare_server_default=False,
+ upgrade_token="upgrades",
+ downgrade_token="downgrades",
+ alembic_module_prefix="op.",
+ sqlalchemy_module_prefix="sa.",
+ **kw
+ ):
+ """Configure the migration environment.
+
+ The important thing needed here is first a way to figure out
+ what kind of "dialect" is in use. The second is to pass
+ an actual database connection, if one is required.
+
+ If the :func:`.is_offline_mode` function returns ``True``,
+ then no connection is needed here. Otherwise, the
+ ``connection`` parameter should be present as an
+ instance of :class:`sqlalchemy.engine.base.Connection`.
+
+ This function is typically called from the ``env.py``
+ script within a migration environment. It can be called
+ multiple times for an invocation. The most recent :class:`~sqlalchemy.engine.base.Connection`
+ for which it was called is the one that will be operated upon
+ by the next call to :func:`.run_migrations`.
+
+ General parameters:
+
+ :param connection: a :class:`~sqlalchemy.engine.base.Connection` to use
+ for SQL execution in "online" mode. When present, is also used to
+ determine the type of dialect in use.
+ :param url: a string database url, or a :class:`sqlalchemy.engine.url.URL` object.
+ The type of dialect to be used will be derived from this if ``connection`` is
+ not passed.
+ :param dialect_name: string name of a dialect, such as "postgresql", "mssql", etc.
+ The type of dialect to be used will be derived from this if ``connection``
+ and ``url`` are not passed.
+ :param transactional_ddl: Force the usage of "transactional" DDL on or off;
+ this otherwise defaults to whether or not the dialect in use supports it.
+ :param output_buffer: a file-like object that will be used for textual output
+ when the ``--sql`` option is used to generate SQL scripts. Defaults to
+ ``sys.stdout`` if not passed here and also not present on the :class:`.Config`
+ object. The value here overrides that of the :class:`.Config` object.
+ :param starting_rev: Override the "starting revision" argument when using
+ ``--sql`` mode.
+ :param tag: a string tag for usage by custom ``env.py`` scripts. Set via
+ the ``--tag`` option, can be overridden here.
+
+ Parameters specific to the autogenerate feature, when ``alembic revision``
+ is run with the ``--autogenerate`` feature:
+
+ :param target_metadata: a :class:`sqlalchemy.schema.MetaData` object that
+ will be consulted during autogeneration. The tables present will be compared against
+ what is locally available on the target :class:`~sqlalchemy.engine.base.Connection`
+ to produce candidate upgrade/downgrade operations.
+
+ :param compare_type: Indicates type comparison behavior during an autogenerate
+ operation. Defaults to ``False`` which disables type comparison. Set to
+ ``True`` to turn on default type comparison, which has varied accuracy depending
+ on backend.
+
+ To customize type comparison behavior, a callable may be specified which
+ can filter type comparisons during an autogenerate operation. The format of
+ this callable is::
+
+ def my_compare_type(context, inspected_column,
+ metadata_column, inspected_type, metadata_type):
+ # return True if the types are different,
+ # False if not, or None to allow the default implementation
+ # to compare these types
+ pass
+
+ ``inspected_column`` is a dictionary structure as returned by
+ :meth:`sqlalchemy.engine.reflection.Inspector.get_columns`, whereas
+ ``metadata_column`` is a :class:`sqlalchemy.schema.Column` from
+ the local model environment.
+
+ A return value of ``None`` indicates to allow default type comparison to
+ proceed.
+
+ :param compare_server_default: Indicates server default comparison behavior during
+ an autogenerate operation. Defaults to ``False`` which disables server default
+ comparison. Set to ``True`` to turn on server default comparison, which has
+ varied accuracy depending on backend.
+
+ To customize server default comparison behavior, a callable may be specified
+ which can filter server default comparisons during an autogenerate operation.
+ defaults during an autogenerate operation. The format of this callable is::
+
+ def my_compare_server_default(context, inspected_column,
+ metadata_column, inspected_default, metadata_default,
+ rendered_metadata_default):
+ # return True if the defaults are different,
+ # False if not, or None to allow the default implementation
+ # to compare these defaults
+ pass
+
+ ``inspected_column`` is a dictionary structure as returned by
+ :meth:`sqlalchemy.engine.reflection.Inspector.get_columns`, whereas
+ ``metadata_column`` is a :class:`sqlalchemy.schema.Column` from
+ the local model environment.
+
+ A return value of ``None`` indicates to allow default server default comparison
+ to proceed. Note that some backends such as Postgresql actually execute
+ the two defaults on the database side to compare for equivalence.
+
+ :param upgrade_token: When autogenerate completes, the text of the
+ candidate upgrade operations will be present in this template
+ variable when ``script.py.mako`` is rendered. Defaults to ``upgrades``.
+ :param downgrade_token: When autogenerate completes, the text of the
+ candidate downgrade operations will be present in this
+ template variable when ``script.py.mako`` is rendered. Defaults to
+ ``downgrades``.
+
+ :param alembic_module_prefix: When autogenerate refers to Alembic
+ :mod:`alembic.op` constructs, this prefix will be used
+ (i.e. ``op.create_table``) Defaults to "``op.``".
+ Can be ``None`` to indicate no prefix.
+
+ :param sqlalchemy_module_prefix: When autogenerate refers to SQLAlchemy
+ :class:`~sqlalchemy.schema.Column` or type classes, this prefix will be used
+ (i.e. ``sa.Column("somename", sa.Integer)``) Defaults to "``sa.``".
+ Can be ``None`` to indicate no prefix.
+ Note that when dialect-specific types are rendered, autogenerate
+ will render them using the dialect module name, i.e. ``mssql.BIT()``,
+ ``postgresql.UUID()``.
+
+ Parameters specific to individual backends:
+
+ :param mssql_batch_separator: The "batch separator" which will be placed
+ between each statement when generating offline SQL Server
+ migrations. Defaults to ``GO``. Note this is in addition to the customary
+ semicolon ``;`` at the end of each statement; SQL Server considers
+ the "batch separator" to denote the end of an individual statement
+ execution, and cannot group certain dependent operations in
+ one step.
+
+ """
+
+ if connection:
+ dialect = connection.dialect
+ elif url:
+ url = sqla_url.make_url(url)
+ dialect = url.get_dialect()()
+ elif dialect_name:
+ url = sqla_url.make_url("%s://" % dialect_name)
+ dialect = url.get_dialect()()
+ else:
+ raise Exception("Connection, url, or dialect_name is required.")
+
+ opts = self.context_opts
+ if transactional_ddl is not None:
+ opts["transactional_ddl"] = transactional_ddl
+ if output_buffer is not None:
+ opts["output_buffer"] = output_buffer
+ elif self.config.output_buffer is not None:
+ opts["output_buffer"] = self.config.output_buffer
+ if starting_rev:
+ opts['starting_rev'] = starting_rev
+ if tag:
+ opts['tag'] = tag
+ opts['target_metadata'] = target_metadata
+ opts['upgrade_token'] = upgrade_token
+ opts['downgrade_token'] = downgrade_token
+ opts['sqlalchemy_module_prefix'] = sqlalchemy_module_prefix
+ opts['alembic_module_prefix'] = alembic_module_prefix
+ opts.update(kw)
+
+ self._migration_context = MigrationContext(
+ dialect, self.script, connection,
+ opts,
+ as_sql=opts.get('as_sql', False),
+ output_buffer=opts.get("output_buffer"),
+ transactional_ddl=opts.get("transactional_ddl"),
+ starting_rev=opts.get("starting_rev"),
+ compare_type=compare_type,
+ compare_server_default=compare_server_default,
+ )
+ alembic.op._proxy = Operations(self._migration_context)
+
+ def run_migrations(self, **kw):
+ """Run migrations as determined by the current command line configuration
+ as well as versioning information present (or not) in the current
+ database connection (if one is present).
+
+ The function accepts optional ``**kw`` arguments. If these are
+ passed, they are sent directly to the ``upgrade()`` and ``downgrade()``
+ functions within each target revision file. By modifying the
+ ``script.py.mako`` file so that the ``upgrade()`` and ``downgrade()``
+ functions accept arguments, parameters can be passed here so that
+ contextual information, usually information to identify a particular
+ database in use, can be passed from a custom ``env.py`` script
+ to the migration functions.
+
+ This function requires that a :class:`.MigrationContext` has first been
+ made available via :func:`.configure`.
+
+ """
+ self.migration_context.run_migrations(**kw)
+
+ def execute(self, sql):
+ """Execute the given SQL using the current change context.
+
+ The behavior of :func:`.context.execute` is the same
+ as that of :func:`.op.execute`. Please see that
+ function's documentation for full detail including
+ caveats and limitations.
+
+ This function requires that a :class:`.MigrationContext` has first been
+ made available via :func:`.configure`.
+
+ """
+ self.migration_context.execute(sql)
+
+ def static_output(self, text):
+ """Emit text directly to the "offline" SQL stream.
+
+ Typically this is for emitting comments that
+ start with --. The statement is not treated
+ as a SQL execution, no ; or batch separator
+ is added, etc.
+
+ """
+ self.migration_context.impl.static_output(text)
+
+ def begin_transaction(self):
+ """Return a context manager that will
+ enclose an operation within a "transaction",
+ as defined by the environment's offline
+ and transactional DDL settings.
+
+ e.g.::
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+ :meth:`.begin_transaction` is intended to
+ "do the right thing" regardless of
+ calling context:
+
+ * If :meth:`.is_transactional_ddl` is ``False``,
+ returns a "do nothing" context manager
+ which otherwise produces no transactional
+ state or directives.
+ * If :meth:`.is_offline_mode` is ``True``,
+ returns a context manager that will
+ invoke the :meth:`.DefaultImpl.emit_begin`
+ and :meth:`.DefaultImpl.emit_commit`
+ methods, which will produce the string
+ directives ``BEGIN`` and ``COMMIT`` on
+ the output stream, as rendered by the
+ target backend (e.g. SQL Server would
+ emit ``BEGIN TRANSACTION``).
+ * Otherwise, calls :meth:`sqlalchemy.engine.base.Connection.begin`
+ on the current online connection, which
+ returns a :class:`sqlalchemy.engine.base.Transaction`
+ object. This object demarcates a real
+ transaction and is itself a context manager,
+ which will roll back if an exception
+ is raised.
+
+ Note that a custom ``env.py`` script which
+ has more specific transactional needs can of course
+ manipulate the :class:`~sqlalchemy.engine.base.Connection`
+ directly to produce transactional state in "online"
+ mode.
+
+ """
+ if not self.is_transactional_ddl():
+ @contextmanager
+ def do_nothing():
+ yield
+ return do_nothing()
+ elif self.is_offline_mode():
+ @contextmanager
+ def begin_commit():
+ self.migration_context.impl.emit_begin()
+ yield
+ self.migration_context.impl.emit_commit()
+ return begin_commit()
+ else:
+ return self.get_bind().begin()
+
+ @property
+ def migration_context(self):
+ """Return the current :class:`.MigrationContext` object.
+
+ If :meth:`.EnvironmentContext.configure` has not been called yet, raises
+ an exception.
+
+ Generally, env.py scripts should access the module-level functions
+ in :mod:`alebmic.context` to get at this object's functionality.
+
+ """
+ if self._migration_context is None:
+ raise Exception("No context has been configured yet.")
+ return self._migration_context
+
+ def get_context(self):
+ """A synonym for :attr:`.EnvironmentContext.migration_context`."""
+
+ return self.migration_context
+
+ def get_bind(self):
+ """Return the current 'bind'.
+
+ In "online" mode, this is the
+ :class:`sqlalchemy.engine.Connection` currently being used
+ to emit SQL to the database.
+
+ This function requires that a :class:`.MigrationContext` has first been
+ made available via :meth:`.configure`.
+
+ """
+ return self.migration_context.bind
+
+ def get_impl(self):
+ return self.migration_context.impl
+
+configure = EnvironmentContext
diff --git a/alembic/migration.py b/alembic/migration.py
new file mode 100644
index 0000000..69e6930
--- /dev/null
+++ b/alembic/migration.py
@@ -0,0 +1,191 @@
+from alembic import util
+from sqlalchemy import MetaData, Table, Column, String, literal_column, \
+ text
+from sqlalchemy import create_engine
+from alembic import ddl
+import sys
+from contextlib import contextmanager
+
+import logging
+log = logging.getLogger(__name__)
+
+_meta = MetaData()
+_version = Table('alembic_version', _meta,
+ Column('version_num', String(32), nullable=False)
+ )
+
+class MigrationContext(object):
+ """Represent the state made available to a migration script,
+ or otherwise a series of migration operations.
+
+ Mediates the relationship between an ``env.py`` environment script,
+ a :class:`.ScriptDirectory` instance, and a :class:`.DefaultImpl` instance.
+
+ The :class:`.Context` is available directly via the :func:`.get_context` function,
+ though usually it is referenced behind the scenes by the various module level functions
+ within the :mod:`alembic.context` module.
+
+ """
+ def __init__(self, dialect, script, connection,
+ opts,
+ as_sql=False,
+ output_buffer=None,
+ transactional_ddl=None,
+ starting_rev=None,
+ compare_type=False,
+ compare_server_default=False):
+ self.dialect = dialect
+ # TODO: need this ?
+ self.script = script
+ if as_sql:
+ self.connection = self._stdout_connection(connection)
+ assert self.connection is not None
+ else:
+ self.connection = connection
+ self._migrations_fn = opts.get('fn')
+ self.as_sql = as_sql
+ self.output_buffer = output_buffer if output_buffer else sys.stdout
+
+ self._user_compare_type = compare_type
+ self._user_compare_server_default = compare_server_default
+
+ self._start_from_rev = starting_rev
+ self.impl = ddl.DefaultImpl.get_by_dialect(dialect)(
+ dialect, self.connection, self.as_sql,
+ transactional_ddl,
+ self.output_buffer,
+ opts
+ )
+ log.info("Context impl %s.", self.impl.__class__.__name__)
+ if self.as_sql:
+ log.info("Generating static SQL")
+ log.info("Will assume %s DDL.",
+ "transactional" if self.impl.transactional_ddl
+ else "non-transactional")
+
+ def _current_rev(self):
+ if self.as_sql:
+ return self._start_from_rev
+ else:
+ if self._start_from_rev:
+ raise util.CommandError(
+ "Can't specify current_rev to context "
+ "when using a database connection")
+ _version.create(self.connection, checkfirst=True)
+ return self.connection.scalar(_version.select())
+
+ def _update_current_rev(self, old, new):
+ if old == new:
+ return
+ if new is None:
+ self.impl._exec(_version.delete())
+ elif old is None:
+ self.impl._exec(_version.insert().
+ values(version_num=literal_column("'%s'" % new))
+ )
+ else:
+ self.impl._exec(_version.update().
+ values(version_num=literal_column("'%s'" % new))
+ )
+
+ def run_migrations(self, **kw):
+
+ current_rev = rev = False
+ self.impl.start_migrations()
+ for change, prev_rev, rev in self._migrations_fn(
+ self._current_rev()):
+ if current_rev is False:
+ current_rev = prev_rev
+ if self.as_sql and not current_rev:
+ _version.create(self.connection)
+ log.info("Running %s %s -> %s", change.__name__, prev_rev, rev)
+ if self.as_sql:
+ self.impl.static_output(
+ "-- Running %s %s -> %s" %
+ (change.__name__, prev_rev, rev)
+ )
+ change(**kw)
+ if not self.impl.transactional_ddl:
+ self._update_current_rev(prev_rev, rev)
+ prev_rev = rev
+
+ if rev is not False:
+ if self.impl.transactional_ddl:
+ self._update_current_rev(current_rev, rev)
+
+ if self.as_sql and not rev:
+ _version.drop(self.connection)
+
+ def execute(self, sql):
+ self.impl._exec(sql)
+
+ def _stdout_connection(self, connection):
+ def dump(construct, *multiparams, **params):
+ self.impl._exec(construct)
+
+ return create_engine("%s://" % self.dialect.name,
+ strategy="mock", executor=dump)
+
+ @property
+ def bind(self):
+ """Return the current "bind".
+
+ In online mode, this is an instance of
+ :class:`sqlalchemy.engine.base.Connection`, and is suitable
+ for ad-hoc execution of any kind of usage described
+ in :ref:`sqlexpression_toplevel` as well as
+ for usage with the :meth:`sqlalchemy.schema.Table.create`
+ and :meth:`sqlalchemy.schema.MetaData.create_all` methods
+ of :class:`.Table`, :class:`.MetaData`.
+
+ Note that when "standard output" mode is enabled,
+ this bind will be a "mock" connection handler that cannot
+ return results and is only appropriate for a very limited
+ subset of commands.
+
+ """
+ return self.connection
+
+ def compare_type(self, inspector_column, metadata_column):
+ if self._user_compare_type is False:
+ return False
+
+ if callable(self._user_compare_type):
+ user_value = self._user_compare_type(
+ self,
+ inspector_column,
+ metadata_column,
+ inspector_column['type'],
+ metadata_column.type
+ )
+ if user_value is not None:
+ return user_value
+
+ return self.impl.compare_type(
+ inspector_column,
+ metadata_column)
+
+ def compare_server_default(self, inspector_column,
+ metadata_column,
+ rendered_metadata_default):
+
+ if self._user_compare_server_default is False:
+ return False
+
+ if callable(self._user_compare_server_default):
+ user_value = self._user_compare_server_default(
+ self,
+ inspector_column,
+ metadata_column,
+ inspector_column['default'],
+ metadata_column.server_default,
+ rendered_metadata_default
+ )
+ if user_value is not None:
+ return user_value
+
+ return self.impl.compare_server_default(
+ inspector_column,
+ metadata_column,
+ rendered_metadata_default)
+
diff --git a/alembic/op.py b/alembic/op.py
deleted file mode 100644
index 13b057d..0000000
--- a/alembic/op.py
+++ /dev/null
@@ -1,603 +0,0 @@
-from alembic import util
-from alembic.ddl import impl
-from alembic.context import get_impl, get_context
-from sqlalchemy.types import NULLTYPE, Integer
-from sqlalchemy import schema, sql
-
-__all__ = sorted([
- 'alter_column',
- 'add_column',
- 'drop_column',
- 'drop_constraint',
- 'create_foreign_key',
- 'create_table',
- 'drop_table',
- 'drop_index',
- 'create_index',
- 'inline_literal',
- 'bulk_insert',
- 'rename_table',
- 'create_unique_constraint',
- 'create_check_constraint',
- 'get_context',
- 'get_bind',
- 'execute'])
-
-def _foreign_key_constraint(name, source, referent, local_cols, remote_cols):
- m = schema.MetaData()
- t1 = schema.Table(source, m,
- *[schema.Column(n, NULLTYPE) for n in local_cols])
- t2 = schema.Table(referent, m,
- *[schema.Column(n, NULLTYPE) for n in remote_cols])
-
- f = schema.ForeignKeyConstraint(local_cols,
- ["%s.%s" % (referent, n)
- for n in remote_cols],
- name=name
- )
- t1.append_constraint(f)
-
- return f
-
-def _unique_constraint(name, source, local_cols, **kw):
- t = schema.Table(source, schema.MetaData(),
- *[schema.Column(n, NULLTYPE) for n in local_cols])
- kw['name'] = name
- uq = schema.UniqueConstraint(*t.c, **kw)
- # TODO: need event tests to ensure the event
- # is fired off here
- t.append_constraint(uq)
- return uq
-
-def _check_constraint(name, source, condition, **kw):
- t = schema.Table(source, schema.MetaData(),
- schema.Column('x', Integer))
- ck = schema.CheckConstraint(condition, name=name, **kw)
- t.append_constraint(ck)
- return ck
-
-def _table(name, *columns, **kw):
- m = schema.MetaData()
- t = schema.Table(name, m, *columns, **kw)
- for f in t.foreign_keys:
- _ensure_table_for_fk(m, f)
- return t
-
-def _column(name, type_, **kw):
- return schema.Column(name, type_, **kw)
-
-def _index(name, tablename, columns, **kw):
- t = schema.Table(tablename, schema.MetaData(),
- *[schema.Column(n, NULLTYPE) for n in columns]
- )
- return schema.Index(name, *list(t.c), **kw)
-
-def _ensure_table_for_fk(metadata, fk):
- """create a placeholder Table object for the referent of a
- ForeignKey.
-
- """
- if isinstance(fk._colspec, basestring):
- table_key, cname = fk._colspec.split('.')
- if '.' in table_key:
- tokens = tname.split('.')
- sname = ".".join(tokens[0:-1])
- tname = tokens[-1]
- else:
- tname = table_key
- sname = None
- if table_key not in metadata.tables:
- rel_t = schema.Table(tname, metadata, schema=sname)
- else:
- rel_t = metadata.tables[table_key]
- if cname not in rel_t.c:
- rel_t.append_column(schema.Column(cname, NULLTYPE))
-
-def rename_table(old_table_name, new_table_name, schema=None):
- """Emit an ALTER TABLE to rename a table.
-
- :param old_table_name: old name.
- :param new_table_name: new name.
- :param schema: Optional, name of schema to operate within.
-
- """
- get_impl().rename_table(
- old_table_name,
- new_table_name,
- schema=schema
- )
-
-def alter_column(table_name, column_name,
- nullable=None,
- server_default=False,
- name=None,
- type_=None,
- existing_type=None,
- existing_server_default=False,
- existing_nullable=None,
-):
- """Issue an "alter column" instruction using the
- current change context.
-
- Generally, only that aspect of the column which
- is being changed, i.e. name, type, nullability,
- default, needs to be specified. Multiple changes
- can also be specified at once and the backend should
- "do the right thing", emitting each change either
- separately or together as the backend allows.
-
- MySQL has special requirements here, since MySQL
- cannot ALTER a column without a full specification.
- When producing MySQL-compatible migration files,
- it is recommended that the ``existing_type``,
- ``existing_server_default``, and ``existing_nullable``
- parameters be present, if not being altered.
-
- Type changes which are against the SQLAlchemy
- "schema" types :class:`~sqlalchemy.types.Boolean`
- and :class:`~sqlalchemy.types.Enum` may also
- add or drop constraints which accompany those
- types on backends that don't support them natively.
- The ``existing_server_default`` argument is
- used in this case as well to remove a previous
- constraint.
-
- :param table_name: string name of the target table.
- :param column_name: string name of the target column,
- as it exists before the operation begins.
- :param nullable: Optional; specify ``True`` or ``False``
- to alter the column's nullability.
- :param server_default: Optional; specify a string
- SQL expression, :func:`~sqlalchemy.sql.expression.text`,
- or :class:`~sqlalchemy.schema.DefaultClause` to indicate
- an alteration to the column's default value.
- Set to ``None`` to have the default removed.
- :param name: Optional; specify a string name here to
- indicate the new name within a column rename operation.
- :param type_: Optional; a :class:`~sqlalchemy.types.TypeEngine`
- type object to specify a change to the column's type.
- For SQLAlchemy types that also indicate a constraint (i.e.
- :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`),
- the constraint is also generated.
- :param existing_type: Optional; a :class:`~sqlalchemy.types.TypeEngine`
- type object to specify the previous type. This
- is required for all MySQL column alter operations that
- don't otherwise specify a new type, as well as for
- when nullability is being changed on a SQL Server
- column. It is also used if the type is a so-called
- SQLlchemy "schema" type which
- may define a constraint (i.e.
- :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`),
- so that the constraint can be dropped.
- :param existing_server_default: Optional; The existing
- default value of the column. Required on MySQL if
- an existing default is not being changed; else MySQL
- removes the default.
- :param existing_nullable: Optional; the existing nullability
- of the column. Required on MySQL if the existing nullability
- is not being changed; else MySQL sets this to NULL.
- """
-
- if existing_type:
- t = _table(table_name, schema.Column(column_name, existing_type))
- for constraint in t.constraints:
- if not isinstance(constraint, schema.PrimaryKeyConstraint):
- get_impl().drop_constraint(constraint)
-
- get_impl().alter_column(table_name, column_name,
- nullable=nullable,
- server_default=server_default,
- name=name,
- type_=type_,
- existing_type=existing_type,
- existing_server_default=existing_server_default,
- existing_nullable=existing_nullable,
- )
-
- if type_:
- t = _table(table_name, schema.Column(column_name, type_))
- for constraint in t.constraints:
- if not isinstance(constraint, schema.PrimaryKeyConstraint):
- get_impl().add_constraint(constraint)
-
-def add_column(table_name, column):
- """Issue an "add column" instruction using the current change context.
-
- e.g.::
-
- from alembic.op import add_column
- from sqlalchemy import Column, String
-
- add_column('organization',
- Column('name', String())
- )
-
- The provided :class:`~sqlalchemy.schema.Column` object can also
- specify a :class:`~sqlalchemy.schema.ForeignKey`, referencing
- a remote table name. Alembic will automatically generate a stub
- "referenced" table and emit a second ALTER statement in order
- to add the constraint separately::
-
- from alembic.op import add_column
- from sqlalchemy import Column, INTEGER, ForeignKey
-
- add_column('organization',
- Column('account_id', INTEGER, ForeignKey('accounts.id'))
- )
-
- :param table_name: String name of the parent table.
- :param column: a :class:`sqlalchemy.schema.Column` object
- representing the new column.
-
- """
-
- t = _table(table_name, column)
- get_impl().add_column(
- table_name,
- column
- )
- for constraint in t.constraints:
- if not isinstance(constraint, schema.PrimaryKeyConstraint):
- get_impl().add_constraint(constraint)
-
-def drop_column(table_name, column_name, **kw):
- """Issue a "drop column" instruction using the current change context.
-
- e.g.::
-
- drop_column('organization', 'account_id')
-
- :param table_name: name of table
- :param column_name: name of column
- :param mssql_drop_check: Optional boolean. When ``True``, on
- Microsoft SQL Server only, first
- drop the CHECK constraint on the column using a SQL-script-compatible
- block that selects into a @variable from sys.check_constraints,
- then exec's a separate DROP CONSTRAINT for that constraint.
- :param mssql_drop_default: Optional boolean. When ``True``, on
- Microsoft SQL Server only, first
- drop the DEFAULT constraint on the column using a SQL-script-compatible
- block that selects into a @variable from sys.default_constraints,
- then exec's a separate DROP CONSTRAINT for that default.
-
- """
-
- get_impl().drop_column(
- table_name,
- _column(column_name, NULLTYPE),
- **kw
- )
-
-
-def create_foreign_key(name, source, referent, local_cols, remote_cols):
- """Issue a "create foreign key" instruction using the
- current change context.
-
- e.g.::
-
- from alembic.op import create_foreign_key
- create_foreign_key("fk_user_address", "address", "user", ["user_id"], ["id"])
-
- This internally generates a :class:`~sqlalchemy.schema.Table` object
- containing the necessary columns, then generates a new
- :class:`~sqlalchemy.schema.ForeignKeyConstraint`
- object which it then associates with the :class:`~sqlalchemy.schema.Table`.
- Any event listeners associated with this action will be fired
- off normally. The :class:`~sqlalchemy.schema.AddConstraint`
- construct is ultimately used to generate the ALTER statement.
-
- :param name: Name of the foreign key constraint. The name is necessary
- so that an ALTER statement can be emitted. For setups that
- use an automated naming scheme such as that described at
- `NamingConventions <http://www.sqlalchemy.org/trac/wiki/UsageRecipes/NamingConventions>`_,
- ``name`` here can be ``None``, as the event listener will
- apply the name to the constraint object when it is associated
- with the table.
- :param source: String name of the source table. Currently
- there is no support for dotted schema names.
- :param referent: String name of the destination table. Currently
- there is no support for dotted schema names.
- :param local_cols: a list of string column names in the
- source table.
- :param remote_cols: a list of string column names in the
- remote table.
-
- """
-
- get_impl().add_constraint(
- _foreign_key_constraint(name, source, referent,
- local_cols, remote_cols)
- )
-
-def create_unique_constraint(name, source, local_cols, **kw):
- """Issue a "create unique constraint" instruction using the current change context.
-
- e.g.::
-
- from alembic.op import create_unique_constraint
- create_unique_constraint("uq_user_name", "user", ["name"])
-
- This internally generates a :class:`~sqlalchemy.schema.Table` object
- containing the necessary columns, then generates a new
- :class:`~sqlalchemy.schema.UniqueConstraint`
- object which it then associates with the :class:`~sqlalchemy.schema.Table`.
- Any event listeners associated with this action will be fired
- off normally. The :class:`~sqlalchemy.schema.AddConstraint`
- construct is ultimately used to generate the ALTER statement.
-
- :param name: Name of the unique constraint. The name is necessary
- so that an ALTER statement can be emitted. For setups that
- use an automated naming scheme such as that described at
- `NamingConventions <http://www.sqlalchemy.org/trac/wiki/UsageRecipes/NamingConventions>`_,
- ``name`` here can be ``None``, as the event listener will
- apply the name to the constraint object when it is associated
- with the table.
- :param source: String name of the source table. Currently
- there is no support for dotted schema names.
- :param local_cols: a list of string column names in the
- source table.
- :param deferrable: optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
- issuing DDL for this constraint.
- :param initially: optional string. If set, emit INITIALLY <value> when issuing DDL
- for this constraint.
-
- """
-
- get_impl().add_constraint(
- _unique_constraint(name, source, local_cols,
- **kw)
- )
-
-def create_check_constraint(name, source, condition, **kw):
- """Issue a "create check constraint" instruction using the current change context.
-
- e.g.::
-
- from alembic.op import create_check_constraint
- from sqlalchemy.sql import column, func
-
- create_check_constraint(
- "ck_user_name_len",
- "user",
- func.len(column('name')) > 5
- )
-
- CHECK constraints are usually against a SQL expression, so ad-hoc
- table metadata is usually needed. The function will convert the given
- arguments into a :class:`sqlalchemy.schema.CheckConstraint` bound
- to an anonymous table in order to emit the CREATE statement.
-
- :param name: Name of the check constraint. The name is necessary
- so that an ALTER statement can be emitted. For setups that
- use an automated naming scheme such as that described at
- `NamingConventions <http://www.sqlalchemy.org/trac/wiki/UsageRecipes/NamingConventions>`_,
- ``name`` here can be ``None``, as the event listener will
- apply the name to the constraint object when it is associated
- with the table.
- :param source: String name of the source table. Currently
- there is no support for dotted schema names.
- :param condition: SQL expression that's the condition of the constraint.
- Can be a string or SQLAlchemy expression language structure.
- :param deferrable: optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
- issuing DDL for this constraint.
- :param initially: optional string. If set, emit INITIALLY <value> when issuing DDL
- for this constraint.
-
- """
- get_impl().add_constraint(
- _check_constraint(name, source, condition, **kw)
- )
-
-def create_table(name, *columns, **kw):
- """Issue a "create table" instruction using the current change context.
-
- This directive receives an argument list similar to that of the
- traditional :class:`sqlalchemy.schema.Table` construct, but without the
- metadata::
-
- from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column
- from alembic.op import create_table
-
- create_table(
- 'accounts',
- Column('id', INTEGER, primary_key=True),
- Column('name', VARCHAR(50), nullable=False),
- Column('description', NVARCHAR(200))
- )
-
- :param name: Name of the table
- :param \*columns: collection of :class:`~sqlalchemy.schema.Column` objects within
- the table, as well as optional :class:`~sqlalchemy.schema.Constraint` objects
- and :class:`~.sqlalchemy.schema.Index` objects.
- :param emit_events: if ``True``, emit ``before_create`` and ``after_create``
- events when the table is being created. In particular, the Postgresql ENUM
- type will emit a CREATE TYPE within these events.
- :param \**kw: Other keyword arguments are passed to the underlying
- :class:`.Table` object created for the command.
-
- """
- get_impl().create_table(
- _table(name, *columns, **kw)
- )
-
-def drop_table(name):
- """Issue a "drop table" instruction using the current change context.
-
-
- e.g.::
-
- drop_table("accounts")
-
- """
- get_impl().drop_table(
- _table(name)
- )
-
-def create_index(name, tablename, *columns, **kw):
- """Issue a "create index" instruction using the current change context.
-
- e.g.::
-
- from alembic.op import create_index
- create_index('ik_test', 't1', ['foo', 'bar'])
-
- """
-
- get_impl().create_index(
- _index(name, tablename, *columns, **kw)
- )
-
-def drop_index(name):
- """Issue a "drop index" instruction using the current change context.
-
-
- e.g.::
-
- drop_index("accounts")
-
- """
- get_impl().drop_index(_index(name, 'foo', []))
-
-def drop_constraint(name, tablename):
- """Drop a constraint of the given name"""
- t = _table(tablename)
- const = schema.Constraint(name=name)
- t.append_constraint(const)
- get_impl().drop_constraint(const)
-
-def bulk_insert(table, rows):
- """Issue a "bulk insert" operation using the current change context.
-
- This provides a means of representing an INSERT of multiple rows
- which works equally well in the context of executing on a live
- connection as well as that of generating a SQL script. In the
- case of a SQL script, the values are rendered inline into the
- statement.
-
- e.g.::
-
- from datetime import date
- from sqlalchemy.sql import table, column
- from sqlalchemy import String, Integer, Date
-
- # Create an ad-hoc table to use for the insert statement.
- accounts_table = table('account',
- column('id', Integer),
- column('name', String),
- column('create_date', Date)
- )
-
- bulk_insert(accounts_table,
- [
- {'id':1, 'name':'John Smith', 'create_date':date(2010, 10, 5)},
- {'id':2, 'name':'Ed Williams', 'create_date':date(2007, 5, 27)},
- {'id':3, 'name':'Wendy Jones', 'create_date':date(2008, 8, 15)},
- ]
- )
- """
- get_impl().bulk_insert(table, rows)
-
-def inline_literal(value, type_=None):
- """Produce an 'inline literal' expression, suitable for
- using in an INSERT, UPDATE, or DELETE statement.
-
- When using Alembic in "offline" mode, CRUD operations
- aren't compatible with SQLAlchemy's default behavior surrounding
- literal values,
- which is that they are converted into bound values and passed
- separately into the ``execute()`` method of the DBAPI cursor.
- An offline SQL
- script needs to have these rendered inline. While it should
- always be noted that inline literal values are an **enormous**
- security hole in an application that handles untrusted input,
- a schema migration is not run in this context, so
- literals are safe to render inline, with the caveat that
- advanced types like dates may not be supported directly
- by SQLAlchemy.
-
- See :func:`.op.execute` for an example usage of
- :func:`.inline_literal`.
-
- :param value: The value to render. Strings, integers, and simple
- numerics should be supported. Other types like boolean,
- dates, etc. may or may not be supported yet by various
- backends.
- :param type_: optional - a :class:`sqlalchemy.types.TypeEngine`
- subclass stating the type of this value. In SQLAlchemy
- expressions, this is usually derived automatically
- from the Python type of the value itself, as well as
- based on the context in which the value is used.
-
- """
- return impl._literal_bindparam(None, value, type_=type_)
-
-def execute(sql):
- """Execute the given SQL using the current change context.
-
- In a SQL script context, the statement is emitted directly to the
- output stream. There is *no* return result, however, as this
- function is oriented towards generating a change script
- that can run in "offline" mode. For full interaction
- with a connected database, use the "bind" available
- from the context::
-
- from alembic.op import get_bind
- connection = get_bind()
-
- Also note that any parameterized statement here *will not work*
- in offline mode - INSERT, UPDATE and DELETE statements which refer
- to literal values would need to render
- inline expressions. For simple use cases, the :func:`.inline_literal`
- function can be used for **rudimentary** quoting of string values.
- For "bulk" inserts, consider using :func:`~alembic.op.bulk_insert`.
-
- For example, to emit an UPDATE statement which is equally
- compatible with both online and offline mode::
-
- from sqlalchemy.sql import table, column
- from sqlalchemy import String
- from alembic.op import execute, inline_literal
-
- account = table('account',
- column('name', String)
- )
- execute(
- account.update().\\
- where(account.c.name==inline_literal('account 1')).\\
- values({'name':inline_literal('account 2')})
- )
-
- Note above we also used the SQLAlchemy :func:`sqlalchemy.sql.expression.table`
- and :func:`sqlalchemy.sql.expression.column` constructs to make a brief,
- ad-hoc table construct just for our UPDATE statement. A full
- :class:`~sqlalchemy.schema.Table` construct of course works perfectly
- fine as well, though note it's a recommended practice to at least ensure
- the definition of a table is self-contained within the migration script,
- rather than imported from a module that may break compatibility with
- older migrations.
-
- :param sql: Any legal SQLAlchemy expression, including:
-
- * a string
- * a :func:`sqlalchemy.sql.expression.text` construct.
- * a :func:`sqlalchemy.sql.expression.insert` construct.
- * a :func:`sqlalchemy.sql.expression.update`, :func:`sqlalchemy.sql.expression.insert`,
- or :func:`sqlalchemy.sql.expression.delete` construct.
- * Pretty much anything that's "executable" as described
- in :ref:`sqlexpression_toplevel`.
-
-
- """
- get_impl().execute(sql)
-
-def get_bind():
- """Return the current 'bind'.
-
- Under normal circumstances, this is the
- :class:`sqlalchemy.engine.Connection` currently being used
- to emit SQL to the database.
-
- In a SQL script context, this value is ``None``. [TODO: verify this]
-
- """
- return get_impl().bind \ No newline at end of file
diff --git a/alembic/operations.py b/alembic/operations.py
new file mode 100644
index 0000000..cc2ef48
--- /dev/null
+++ b/alembic/operations.py
@@ -0,0 +1,616 @@
+from alembic import util
+from alembic.ddl import impl
+from sqlalchemy.types import NULLTYPE, Integer
+from sqlalchemy import schema, sql
+
+__all__ = sorted([
+ 'alter_column',
+ 'add_column',
+ 'drop_column',
+ 'drop_constraint',
+ 'create_foreign_key',
+ 'create_table',
+ 'drop_table',
+ 'drop_index',
+ 'create_index',
+ 'inline_literal',
+ 'bulk_insert',
+ 'rename_table',
+ 'create_unique_constraint',
+ 'create_check_constraint',
+ 'get_context',
+ 'get_bind',
+ 'execute'])
+
+class Operations(object):
+ """Define high level migration operations.
+
+ Each operation corresponds to some schema migration operation,
+ executed against a particular :class:`.MigrationContext`.
+
+ """
+ def __init__(self, migration_context):
+ """Construct a new :class:`.Operations`"""
+ self.migration_context = migration_context
+ self.impl = migration_context.impl
+
+ def _foreign_key_constraint(self, name, source, referent, local_cols, remote_cols):
+ m = schema.MetaData()
+ t1 = schema.Table(source, m,
+ *[schema.Column(n, NULLTYPE) for n in local_cols])
+ t2 = schema.Table(referent, m,
+ *[schema.Column(n, NULLTYPE) for n in remote_cols])
+
+ f = schema.ForeignKeyConstraint(local_cols,
+ ["%s.%s" % (referent, n)
+ for n in remote_cols],
+ name=name
+ )
+ t1.append_constraint(f)
+
+ return f
+
+ def _unique_constraint(self, name, source, local_cols, **kw):
+ t = schema.Table(source, schema.MetaData(),
+ *[schema.Column(n, NULLTYPE) for n in local_cols])
+ kw['name'] = name
+ uq = schema.UniqueConstraint(*t.c, **kw)
+ # TODO: need event tests to ensure the event
+ # is fired off here
+ t.append_constraint(uq)
+ return uq
+
+ def _check_constraint(self, name, source, condition, **kw):
+ t = schema.Table(source, schema.MetaData(),
+ schema.Column('x', Integer))
+ ck = schema.CheckConstraint(condition, name=name, **kw)
+ t.append_constraint(ck)
+ return ck
+
+ def _table(self, name, *columns, **kw):
+ m = schema.MetaData()
+ t = schema.Table(name, m, *columns, **kw)
+ for f in t.foreign_keys:
+ self._ensure_table_for_fk(m, f)
+ return t
+
+ def _column(self, name, type_, **kw):
+ return schema.Column(name, type_, **kw)
+
+ def _index(self, name, tablename, columns, **kw):
+ t = schema.Table(tablename, schema.MetaData(),
+ *[schema.Column(n, NULLTYPE) for n in columns]
+ )
+ return schema.Index(name, *list(t.c), **kw)
+
+ def _ensure_table_for_fk(self, metadata, fk):
+ """create a placeholder Table object for the referent of a
+ ForeignKey.
+
+ """
+ if isinstance(fk._colspec, basestring):
+ table_key, cname = fk._colspec.split('.')
+ if '.' in table_key:
+ tokens = tname.split('.')
+ sname = ".".join(tokens[0:-1])
+ tname = tokens[-1]
+ else:
+ tname = table_key
+ sname = None
+ if table_key not in metadata.tables:
+ rel_t = schema.Table(tname, metadata, schema=sname)
+ else:
+ rel_t = metadata.tables[table_key]
+ if cname not in rel_t.c:
+ rel_t.append_column(schema.Column(cname, NULLTYPE))
+
+ def rename_table(self, old_table_name, new_table_name, schema=None):
+ """Emit an ALTER TABLE to rename a table.
+
+ :param old_table_name: old name.
+ :param new_table_name: new name.
+ :param schema: Optional, name of schema to operate within.
+
+ """
+ self.impl.rename_table(
+ old_table_name,
+ new_table_name,
+ schema=schema
+ )
+
+ def alter_column(self, table_name, column_name,
+ nullable=None,
+ server_default=False,
+ name=None,
+ type_=None,
+ existing_type=None,
+ existing_server_default=False,
+ existing_nullable=None,
+ ):
+ """Issue an "alter column" instruction using the
+ current migration context.
+
+ Generally, only that aspect of the column which
+ is being changed, i.e. name, type, nullability,
+ default, needs to be specified. Multiple changes
+ can also be specified at once and the backend should
+ "do the right thing", emitting each change either
+ separately or together as the backend allows.
+
+ MySQL has special requirements here, since MySQL
+ cannot ALTER a column without a full specification.
+ When producing MySQL-compatible migration files,
+ it is recommended that the ``existing_type``,
+ ``existing_server_default``, and ``existing_nullable``
+ parameters be present, if not being altered.
+
+ Type changes which are against the SQLAlchemy
+ "schema" types :class:`~sqlalchemy.types.Boolean`
+ and :class:`~sqlalchemy.types.Enum` may also
+ add or drop constraints which accompany those
+ types on backends that don't support them natively.
+ The ``existing_server_default`` argument is
+ used in this case as well to remove a previous
+ constraint.
+
+ :param table_name: string name of the target table.
+ :param column_name: string name of the target column,
+ as it exists before the operation begins.
+ :param nullable: Optional; specify ``True`` or ``False``
+ to alter the column's nullability.
+ :param server_default: Optional; specify a string
+ SQL expression, :func:`~sqlalchemy.sql.expression.text`,
+ or :class:`~sqlalchemy.schema.DefaultClause` to indicate
+ an alteration to the column's default value.
+ Set to ``None`` to have the default removed.
+ :param name: Optional; specify a string name here to
+ indicate the new name within a column rename operation.
+ :param type_: Optional; a :class:`~sqlalchemy.types.TypeEngine`
+ type object to specify a change to the column's type.
+ For SQLAlchemy types that also indicate a constraint (i.e.
+ :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`),
+ the constraint is also generated.
+ :param existing_type: Optional; a :class:`~sqlalchemy.types.TypeEngine`
+ type object to specify the previous type. This
+ is required for all MySQL column alter operations that
+ don't otherwise specify a new type, as well as for
+ when nullability is being changed on a SQL Server
+ column. It is also used if the type is a so-called
+ SQLlchemy "schema" type which
+ may define a constraint (i.e.
+ :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`),
+ so that the constraint can be dropped.
+ :param existing_server_default: Optional; The existing
+ default value of the column. Required on MySQL if
+ an existing default is not being changed; else MySQL
+ removes the default.
+ :param existing_nullable: Optional; the existing nullability
+ of the column. Required on MySQL if the existing nullability
+ is not being changed; else MySQL sets this to NULL.
+ """
+
+ if existing_type:
+ t = self._table(table_name, schema.Column(column_name, existing_type))
+ for constraint in t.constraints:
+ if not isinstance(constraint, schema.PrimaryKeyConstraint):
+ self.impl.drop_constraint(constraint)
+
+ self.impl.alter_column(table_name, column_name,
+ nullable=nullable,
+ server_default=server_default,
+ name=name,
+ type_=type_,
+ existing_type=existing_type,
+ existing_server_default=existing_server_default,
+ existing_nullable=existing_nullable,
+ )
+
+ if type_:
+ t = self._table(table_name, schema.Column(column_name, type_))
+ for constraint in t.constraints:
+ if not isinstance(constraint, schema.PrimaryKeyConstraint):
+ self.impl.add_constraint(constraint)
+
+ def add_column(self, table_name, column):
+ """Issue an "add column" instruction using the current migration context.
+
+ e.g.::
+
+ from alembic.op import add_column
+ from sqlalchemy import Column, String
+
+ add_column('organization',
+ Column('name', String())
+ )
+
+ The provided :class:`~sqlalchemy.schema.Column` object can also
+ specify a :class:`~sqlalchemy.schema.ForeignKey`, referencing
+ a remote table name. Alembic will automatically generate a stub
+ "referenced" table and emit a second ALTER statement in order
+ to add the constraint separately::
+
+ from alembic.op import add_column
+ from sqlalchemy import Column, INTEGER, ForeignKey
+
+ add_column('organization',
+ Column('account_id', INTEGER, ForeignKey('accounts.id'))
+ )
+
+ :param table_name: String name of the parent table.
+ :param column: a :class:`sqlalchemy.schema.Column` object
+ representing the new column.
+
+ """
+
+ t = self._table(table_name, column)
+ self.impl.add_column(
+ table_name,
+ column
+ )
+ for constraint in t.constraints:
+ if not isinstance(constraint, schema.PrimaryKeyConstraint):
+ self.impl.add_constraint(constraint)
+
+ def drop_column(self, table_name, column_name, **kw):
+ """Issue a "drop column" instruction using the current migration context.
+
+ e.g.::
+
+ drop_column('organization', 'account_id')
+
+ :param table_name: name of table
+ :param column_name: name of column
+ :param mssql_drop_check: Optional boolean. When ``True``, on
+ Microsoft SQL Server only, first
+ drop the CHECK constraint on the column using a SQL-script-compatible
+ block that selects into a @variable from sys.check_constraints,
+ then exec's a separate DROP CONSTRAINT for that constraint.
+ :param mssql_drop_default: Optional boolean. When ``True``, on
+ Microsoft SQL Server only, first
+ drop the DEFAULT constraint on the column using a SQL-script-compatible
+ block that selects into a @variable from sys.default_constraints,
+ then exec's a separate DROP CONSTRAINT for that default.
+
+ """
+
+ self.impl.drop_column(
+ table_name,
+ self._column(column_name, NULLTYPE),
+ **kw
+ )
+
+
+ def create_foreign_key(self, name, source, referent, local_cols, remote_cols):
+ """Issue a "create foreign key" instruction using the
+ current migration context.
+
+ e.g.::
+
+ from alembic.op import create_foreign_key
+ create_foreign_key("fk_user_address", "address", "user", ["user_id"], ["id"])
+
+ This internally generates a :class:`~sqlalchemy.schema.Table` object
+ containing the necessary columns, then generates a new
+ :class:`~sqlalchemy.schema.ForeignKeyConstraint`
+ object which it then associates with the :class:`~sqlalchemy.schema.Table`.
+ Any event listeners associated with this action will be fired
+ off normally. The :class:`~sqlalchemy.schema.AddConstraint`
+ construct is ultimately used to generate the ALTER statement.
+
+ :param name: Name of the foreign key constraint. The name is necessary
+ so that an ALTER statement can be emitted. For setups that
+ use an automated naming scheme such as that described at
+ `NamingConventions <http://www.sqlalchemy.org/trac/wiki/UsageRecipes/NamingConventions>`_,
+ ``name`` here can be ``None``, as the event listener will
+ apply the name to the constraint object when it is associated
+ with the table.
+ :param source: String name of the source table. Currently
+ there is no support for dotted schema names.
+ :param referent: String name of the destination table. Currently
+ there is no support for dotted schema names.
+ :param local_cols: a list of string column names in the
+ source table.
+ :param remote_cols: a list of string column names in the
+ remote table.
+
+ """
+
+ self.impl.add_constraint(
+ self._foreign_key_constraint(name, source, referent,
+ local_cols, remote_cols)
+ )
+
+ def create_unique_constraint(self, name, source, local_cols, **kw):
+ """Issue a "create unique constraint" instruction using the current migration context.
+
+ e.g.::
+
+ from alembic.op import create_unique_constraint
+ create_unique_constraint("uq_user_name", "user", ["name"])
+
+ This internally generates a :class:`~sqlalchemy.schema.Table` object
+ containing the necessary columns, then generates a new
+ :class:`~sqlalchemy.schema.UniqueConstraint`
+ object which it then associates with the :class:`~sqlalchemy.schema.Table`.
+ Any event listeners associated with this action will be fired
+ off normally. The :class:`~sqlalchemy.schema.AddConstraint`
+ construct is ultimately used to generate the ALTER statement.
+
+ :param name: Name of the unique constraint. The name is necessary
+ so that an ALTER statement can be emitted. For setups that
+ use an automated naming scheme such as that described at
+ `NamingConventions <http://www.sqlalchemy.org/trac/wiki/UsageRecipes/NamingConventions>`_,
+ ``name`` here can be ``None``, as the event listener will
+ apply the name to the constraint object when it is associated
+ with the table.
+ :param source: String name of the source table. Currently
+ there is no support for dotted schema names.
+ :param local_cols: a list of string column names in the
+ source table.
+ :param deferrable: optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
+ issuing DDL for this constraint.
+ :param initially: optional string. If set, emit INITIALLY <value> when issuing DDL
+ for this constraint.
+
+ """
+
+ self.impl.add_constraint(
+ self._unique_constraint(name, source, local_cols,
+ **kw)
+ )
+
+ def create_check_constraint(self, name, source, condition, **kw):
+ """Issue a "create check constraint" instruction using the current migration context.
+
+ e.g.::
+
+ from alembic.op import create_check_constraint
+ from sqlalchemy.sql import column, func
+
+ create_check_constraint(
+ "ck_user_name_len",
+ "user",
+ func.len(column('name')) > 5
+ )
+
+ CHECK constraints are usually against a SQL expression, so ad-hoc
+ table metadata is usually needed. The function will convert the given
+ arguments into a :class:`sqlalchemy.schema.CheckConstraint` bound
+ to an anonymous table in order to emit the CREATE statement.
+
+ :param name: Name of the check constraint. The name is necessary
+ so that an ALTER statement can be emitted. For setups that
+ use an automated naming scheme such as that described at
+ `NamingConventions <http://www.sqlalchemy.org/trac/wiki/UsageRecipes/NamingConventions>`_,
+ ``name`` here can be ``None``, as the event listener will
+ apply the name to the constraint object when it is associated
+ with the table.
+ :param source: String name of the source table. Currently
+ there is no support for dotted schema names.
+ :param condition: SQL expression that's the condition of the constraint.
+ Can be a string or SQLAlchemy expression language structure.
+ :param deferrable: optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
+ issuing DDL for this constraint.
+ :param initially: optional string. If set, emit INITIALLY <value> when issuing DDL
+ for this constraint.
+
+ """
+ self.impl.add_constraint(
+ self._check_constraint(name, source, condition, **kw)
+ )
+
+ def create_table(self, name, *columns, **kw):
+ """Issue a "create table" instruction using the current migration context.
+
+ This directive receives an argument list similar to that of the
+ traditional :class:`sqlalchemy.schema.Table` construct, but without the
+ metadata::
+
+ from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column
+ from alembic.op import create_table
+
+ create_table(
+ 'accounts',
+ Column('id', INTEGER, primary_key=True),
+ Column('name', VARCHAR(50), nullable=False),
+ Column('description', NVARCHAR(200))
+ )
+
+ :param name: Name of the table
+ :param \*columns: collection of :class:`~sqlalchemy.schema.Column` objects within
+ the table, as well as optional :class:`~sqlalchemy.schema.Constraint` objects
+ and :class:`~.sqlalchemy.schema.Index` objects.
+ :param emit_events: if ``True``, emit ``before_create`` and ``after_create``
+ events when the table is being created. In particular, the Postgresql ENUM
+ type will emit a CREATE TYPE within these events.
+ :param \**kw: Other keyword arguments are passed to the underlying
+ :class:`.Table` object created for the command.
+
+ """
+ self.impl.create_table(
+ self._table(name, *columns, **kw)
+ )
+
+ def drop_table(self, name):
+ """Issue a "drop table" instruction using the current migration context.
+
+
+ e.g.::
+
+ drop_table("accounts")
+
+ """
+ self.impl.drop_table(
+ self._table(name)
+ )
+
+ def create_index(self, name, tablename, *columns, **kw):
+ """Issue a "create index" instruction using the current migration context.
+
+ e.g.::
+
+ from alembic.op import create_index
+ create_index('ik_test', 't1', ['foo', 'bar'])
+
+ """
+
+ self.impl.create_index(
+ self._index(name, tablename, *columns, **kw)
+ )
+
+ def drop_index(self, name):
+ """Issue a "drop index" instruction using the current migration context.
+
+
+ e.g.::
+
+ drop_index("accounts")
+
+ """
+ self.impl.drop_index(self._index(name, 'foo', []))
+
+ def drop_constraint(self, name, tablename):
+ """Drop a constraint of the given name"""
+ t = self._table(tablename)
+ const = schema.Constraint(name=name)
+ t.append_constraint(const)
+ self.impl.drop_constraint(const)
+
+ def bulk_insert(self, table, rows):
+ """Issue a "bulk insert" operation using the current migration context.
+
+ This provides a means of representing an INSERT of multiple rows
+ which works equally well in the context of executing on a live
+ connection as well as that of generating a SQL script. In the
+ case of a SQL script, the values are rendered inline into the
+ statement.
+
+ e.g.::
+
+ from datetime import date
+ from sqlalchemy.sql import table, column
+ from sqlalchemy import String, Integer, Date
+
+ # Create an ad-hoc table to use for the insert statement.
+ accounts_table = table('account',
+ column('id', Integer),
+ column('name', String),
+ column('create_date', Date)
+ )
+
+ bulk_insert(accounts_table,
+ [
+ {'id':1, 'name':'John Smith', 'create_date':date(2010, 10, 5)},
+ {'id':2, 'name':'Ed Williams', 'create_date':date(2007, 5, 27)},
+ {'id':3, 'name':'Wendy Jones', 'create_date':date(2008, 8, 15)},
+ ]
+ )
+ """
+ self.impl.bulk_insert(table, rows)
+
+ def inline_literal(self, value, type_=None):
+ """Produce an 'inline literal' expression, suitable for
+ using in an INSERT, UPDATE, or DELETE statement.
+
+ When using Alembic in "offline" mode, CRUD operations
+ aren't compatible with SQLAlchemy's default behavior surrounding
+ literal values,
+ which is that they are converted into bound values and passed
+ separately into the ``execute()`` method of the DBAPI cursor.
+ An offline SQL
+ script needs to have these rendered inline. While it should
+ always be noted that inline literal values are an **enormous**
+ security hole in an application that handles untrusted input,
+ a schema migration is not run in this context, so
+ literals are safe to render inline, with the caveat that
+ advanced types like dates may not be supported directly
+ by SQLAlchemy.
+
+ See :func:`.op.execute` for an example usage of
+ :func:`.inline_literal`.
+
+ :param value: The value to render. Strings, integers, and simple
+ numerics should be supported. Other types like boolean,
+ dates, etc. may or may not be supported yet by various
+ backends.
+ :param type_: optional - a :class:`sqlalchemy.types.TypeEngine`
+ subclass stating the type of this value. In SQLAlchemy
+ expressions, this is usually derived automatically
+ from the Python type of the value itself, as well as
+ based on the context in which the value is used.
+
+ """
+ return impl._literal_bindparam(None, value, type_=type_)
+
+ def execute(self, sql):
+ """Execute the given SQL using the current migration context.
+
+ In a SQL script context, the statement is emitted directly to the
+ output stream. There is *no* return result, however, as this
+ function is oriented towards generating a change script
+ that can run in "offline" mode. For full interaction
+ with a connected database, use the "bind" available
+ from the context::
+
+ from alembic.op import get_bind
+ connection = get_bind()
+
+ Also note that any parameterized statement here *will not work*
+ in offline mode - INSERT, UPDATE and DELETE statements which refer
+ to literal values would need to render
+ inline expressions. For simple use cases, the :func:`.inline_literal`
+ function can be used for **rudimentary** quoting of string values.
+ For "bulk" inserts, consider using :func:`~alembic.op.bulk_insert`.
+
+ For example, to emit an UPDATE statement which is equally
+ compatible with both online and offline mode::
+
+ from sqlalchemy.sql import table, column
+ from sqlalchemy import String
+ from alembic.op import execute, inline_literal
+
+ account = table('account',
+ column('name', String)
+ )
+ execute(
+ account.update().\\
+ where(account.c.name==inline_literal('account 1')).\\
+ values({'name':inline_literal('account 2')})
+ )
+
+ Note above we also used the SQLAlchemy :func:`sqlalchemy.sql.expression.table`
+ and :func:`sqlalchemy.sql.expression.column` constructs to make a brief,
+ ad-hoc table construct just for our UPDATE statement. A full
+ :class:`~sqlalchemy.schema.Table` construct of course works perfectly
+ fine as well, though note it's a recommended practice to at least ensure
+ the definition of a table is self-contained within the migration script,
+ rather than imported from a module that may break compatibility with
+ older migrations.
+
+ :param sql: Any legal SQLAlchemy expression, including:
+
+ * a string
+ * a :func:`sqlalchemy.sql.expression.text` construct.
+ * a :func:`sqlalchemy.sql.expression.insert` construct.
+ * a :func:`sqlalchemy.sql.expression.update`, :func:`sqlalchemy.sql.expression.insert`,
+ or :func:`sqlalchemy.sql.expression.delete` construct.
+ * Pretty much anything that's "executable" as described
+ in :ref:`sqlexpression_toplevel`.
+
+
+ """
+ self.migration_context.impl.execute(sql)
+
+ def get_bind(self):
+ """Return the current 'bind'.
+
+ Under normal circumstances, this is the
+ :class:`sqlalchemy.engine.Connection` currently being used
+ to emit SQL to the database.
+
+ In a SQL script context, this value is ``None``. [TODO: verify this]
+
+ """
+ return self.migration_context.impl.bind
+
+configure = Operations \ No newline at end of file
diff --git a/tests/__init__.py b/tests/__init__.py
index 4d84331..328040a 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -5,10 +5,12 @@ import shutil
import os
import itertools
from sqlalchemy import create_engine, text, MetaData
-from alembic import context, util
+from alembic import util
+from alembic.migration import MigrationContext
import re
+import alembic
+from alembic.operations import Operations
from alembic.script import ScriptDirectory
-from alembic.context import Context
from alembic import ddl
import StringIO
from alembic.ddl.impl import _impls
@@ -140,12 +142,11 @@ def op_fixture(dialect='default', as_sql=False):
)
- class ctx(Context):
+ class ctx(MigrationContext):
def __init__(self, dialect='default', as_sql=False):
self.dialect = _get_dialect(dialect)
self.impl = Impl(self.dialect, as_sql)
- context._context = self
self.as_sql = as_sql
def assert_(self, *sql):
@@ -162,7 +163,9 @@ def op_fixture(dialect='default', as_sql=False):
sql,
self.impl.assertion
)
- return ctx(dialect, as_sql)
+ context = ctx(dialect, as_sql)
+ alembic.op._proxy = Operations(context)
+ return context
def env_file_fixture(txt):
dir_ = os.path.join(staging_directory, 'scripts')
@@ -269,12 +272,10 @@ def staging_env(create=True, template="generic"):
shutil.rmtree(path)
command.init(cfg, path)
sc = script.ScriptDirectory.from_config(cfg)
- context._opts(cfg,sc, fn=lambda:None)
return sc
def clear_staging_env():
shutil.rmtree(staging_directory, True)
- context._clear()
def three_rev_fixture(cfg):
a = util.rev_id()
diff --git a/tests/test_versioning.py b/tests/test_versioning.py
index 4ba09e5..f9e49c7 100644
--- a/tests/test_versioning.py
+++ b/tests/test_versioning.py
@@ -15,13 +15,13 @@ def test_001_revisions():
script.write(a, """
down_revision = None
-from alembic.op import *
+from alembic import op
def upgrade():
- execute("CREATE TABLE foo(id integer)")
+ op.execute("CREATE TABLE foo(id integer)")
def downgrade():
- execute("DROP TABLE foo")
+ op.execute("DROP TABLE foo")
""")
@@ -29,13 +29,13 @@ def downgrade():
script.write(b, """
down_revision = '%s'
-from alembic.op import *
+from alembic import op
def upgrade():
- execute("CREATE TABLE bar(id integer)")
+ op.execute("CREATE TABLE bar(id integer)")
def downgrade():
- execute("DROP TABLE bar")
+ op.execute("DROP TABLE bar")
""" % a)
@@ -43,13 +43,13 @@ def downgrade():
script.write(c, """
down_revision = '%s'
-from alembic.op import *
+from alembic import op
def upgrade():
- execute("CREATE TABLE bat(id integer)")
+ op.execute("CREATE TABLE bat(id integer)")
def downgrade():
- execute("DROP TABLE bat")
+ op.execute("DROP TABLE bat")
""" % b)