summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorjvanasco <jonathan@2xlp.com>2014-10-17 19:37:47 -0400
committerjvanasco <jonathan@2xlp.com>2014-10-17 19:37:47 -0400
commitefca4af93603faa7abfeacbab264cad85ee4105c (patch)
treec98b87e0a489c668acd119800c8a946dc7fdf9d4 /lib
parent4da020dae324cb871074e302f4840e8731988be0 (diff)
parent61a4a89d993eda1d3168b501ba9ed8d94ea9b5f8 (diff)
downloadsqlalchemy-efca4af93603faa7abfeacbab264cad85ee4105c.tar.gz
Merged zzzeek/sqlalchemy into master
Diffstat (limited to 'lib')
-rw-r--r--lib/sqlalchemy/__init__.py1
-rw-r--r--lib/sqlalchemy/dialects/mssql/base.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/base.py30
-rw-r--r--lib/sqlalchemy/dialects/mysql/mysqlconnector.py48
-rw-r--r--lib/sqlalchemy/dialects/oracle/base.py3
-rw-r--r--lib/sqlalchemy/dialects/postgresql/base.py62
-rw-r--r--lib/sqlalchemy/dialects/postgresql/psycopg2.py66
-rw-r--r--lib/sqlalchemy/engine/base.py2
-rw-r--r--lib/sqlalchemy/engine/reflection.py47
-rw-r--r--lib/sqlalchemy/engine/strategies.py1
-rw-r--r--lib/sqlalchemy/exc.py6
-rw-r--r--lib/sqlalchemy/ext/declarative/api.py2
-rw-r--r--lib/sqlalchemy/orm/mapper.py2
-rw-r--r--lib/sqlalchemy/orm/persistence.py43
-rw-r--r--lib/sqlalchemy/orm/query.py26
-rw-r--r--lib/sqlalchemy/orm/session.py96
-rw-r--r--lib/sqlalchemy/orm/state.py4
-rw-r--r--lib/sqlalchemy/orm/util.py10
-rw-r--r--lib/sqlalchemy/pool.py20
-rw-r--r--lib/sqlalchemy/sql/__init__.py1
-rw-r--r--lib/sqlalchemy/sql/compiler.py8
-rw-r--r--lib/sqlalchemy/sql/crud.py97
-rw-r--r--lib/sqlalchemy/sql/dml.py26
-rw-r--r--lib/sqlalchemy/sql/elements.py119
-rw-r--r--lib/sqlalchemy/sql/expression.py4
-rw-r--r--lib/sqlalchemy/sql/functions.py31
-rw-r--r--lib/sqlalchemy/sql/schema.py8
-rw-r--r--lib/sqlalchemy/sql/selectable.py5
-rw-r--r--lib/sqlalchemy/testing/engines.py4
-rw-r--r--lib/sqlalchemy/testing/exclusions.py3
-rw-r--r--lib/sqlalchemy/testing/provision.py10
-rw-r--r--lib/sqlalchemy/testing/suite/test_insert.py37
-rw-r--r--lib/sqlalchemy/testing/suite/test_reflection.py13
-rw-r--r--lib/sqlalchemy/util/langhelpers.py10
34 files changed, 672 insertions, 175 deletions
diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py
index 853566172..d184e1fbf 100644
--- a/lib/sqlalchemy/__init__.py
+++ b/lib/sqlalchemy/__init__.py
@@ -25,6 +25,7 @@ from .sql import (
extract,
false,
func,
+ funcfilter,
insert,
intersect,
intersect_all,
diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py
index ba3050ae5..dad02ee0f 100644
--- a/lib/sqlalchemy/dialects/mssql/base.py
+++ b/lib/sqlalchemy/dialects/mssql/base.py
@@ -846,7 +846,7 @@ class MSExecutionContext(default.DefaultExecutionContext):
"SET IDENTITY_INSERT %s OFF" %
self.dialect.identifier_preparer. format_table(
self.compiled.statement.table)))
- except:
+ except Exception:
pass
def get_result_proxy(self):
diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py
index 7ccd59abb..2fb054d0c 100644
--- a/lib/sqlalchemy/dialects/mysql/base.py
+++ b/lib/sqlalchemy/dialects/mysql/base.py
@@ -341,6 +341,29 @@ reflection will not include foreign keys. For these tables, you may supply a
:ref:`mysql_storage_engines`
+.. _mysql_unique_constraints:
+
+MySQL Unique Constraints and Reflection
+---------------------------------------
+
+SQLAlchemy supports both the :class:`.Index` construct with the
+flag ``unique=True``, indicating a UNIQUE index, as well as the
+:class:`.UniqueConstraint` construct, representing a UNIQUE constraint.
+Both objects/syntaxes are supported by MySQL when emitting DDL to create
+these constraints. However, MySQL does not have a unique constraint
+construct that is separate from a unique index; that is, the "UNIQUE"
+constraint on MySQL is equivalent to creating a "UNIQUE INDEX".
+
+When reflecting these constructs, the :meth:`.Inspector.get_indexes`
+and the :meth:`.Inspector.get_unique_constraints` methods will **both**
+return an entry for a UNIQUE index in MySQL. However, when performing
+full table reflection using ``Table(..., autoload=True)``,
+the :class:`.UniqueConstraint` construct is
+**not** part of the fully reflected :class:`.Table` construct under any
+circumstances; this construct is always represented by a :class:`.Index`
+with the ``unique=True`` setting present in the :attr:`.Table.indexes`
+collection.
+
.. _mysql_timestamp_null:
@@ -2317,7 +2340,7 @@ class MySQLDialect(default.DefaultDialect):
# basic operations via autocommit fail.
try:
dbapi_connection.commit()
- except:
+ except Exception:
if self.server_version_info < (3, 23, 15):
args = sys.exc_info()[1].args
if args and args[0] == 1064:
@@ -2329,7 +2352,7 @@ class MySQLDialect(default.DefaultDialect):
try:
dbapi_connection.rollback()
- except:
+ except Exception:
if self.server_version_info < (3, 23, 15):
args = sys.exc_info()[1].args
if args and args[0] == 1064:
@@ -2590,7 +2613,8 @@ class MySQLDialect(default.DefaultDialect):
return [
{
'name': key['name'],
- 'column_names': [col[0] for col in key['columns']]
+ 'column_names': [col[0] for col in key['columns']],
+ 'duplicates_index': key['name'],
}
for key in parsed_state.keys
if key['type'] == 'UNIQUE'
diff --git a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
index e51e80005..417e1ad6f 100644
--- a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
+++ b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
@@ -21,6 +21,7 @@ from .base import (MySQLDialect, MySQLExecutionContext,
BIT)
from ... import util
+import re
class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext):
@@ -31,18 +32,34 @@ class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext):
class MySQLCompiler_mysqlconnector(MySQLCompiler):
def visit_mod_binary(self, binary, operator, **kw):
- return self.process(binary.left, **kw) + " %% " + \
- self.process(binary.right, **kw)
+ if self.dialect._mysqlconnector_double_percents:
+ return self.process(binary.left, **kw) + " %% " + \
+ self.process(binary.right, **kw)
+ else:
+ return self.process(binary.left, **kw) + " % " + \
+ self.process(binary.right, **kw)
def post_process_text(self, text):
- return text.replace('%', '%%')
+ if self.dialect._mysqlconnector_double_percents:
+ return text.replace('%', '%%')
+ else:
+ return text
+
+ def escape_literal_column(self, text):
+ if self.dialect._mysqlconnector_double_percents:
+ return text.replace('%', '%%')
+ else:
+ return text
class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer):
def _escape_identifier(self, value):
value = value.replace(self.escape_quote, self.escape_to_quote)
- return value.replace("%", "%%")
+ if self.dialect._mysqlconnector_double_percents:
+ return value.replace("%", "%%")
+ else:
+ return value
class _myconnpyBIT(BIT):
@@ -55,8 +72,6 @@ class _myconnpyBIT(BIT):
class MySQLDialect_mysqlconnector(MySQLDialect):
driver = 'mysqlconnector'
- if util.py2k:
- supports_unicode_statements = False
supports_unicode_binds = True
supports_sane_rowcount = True
@@ -77,6 +92,10 @@ class MySQLDialect_mysqlconnector(MySQLDialect):
}
)
+ @util.memoized_property
+ def supports_unicode_statements(self):
+ return util.py3k or self._mysqlconnector_version_info > (2, 0)
+
@classmethod
def dbapi(cls):
from mysql import connector
@@ -103,10 +122,25 @@ class MySQLDialect_mysqlconnector(MySQLDialect):
'client_flags', ClientFlag.get_default())
client_flags |= ClientFlag.FOUND_ROWS
opts['client_flags'] = client_flags
- except:
+ except Exception:
pass
return [[], opts]
+ @util.memoized_property
+ def _mysqlconnector_version_info(self):
+ if self.dbapi and hasattr(self.dbapi, '__version__'):
+ m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?',
+ self.dbapi.__version__)
+ if m:
+ return tuple(
+ int(x)
+ for x in m.group(1, 2, 3)
+ if x is not None)
+
+ @util.memoized_property
+ def _mysqlconnector_double_percents(self):
+ return not util.py3k and self._mysqlconnector_version_info < (2, 0)
+
def _get_server_version_info(self, connection):
dbapi_con = connection.connection
version = dbapi_con.get_server_version()
diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py
index 837a498fb..6df38e57e 100644
--- a/lib/sqlalchemy/dialects/oracle/base.py
+++ b/lib/sqlalchemy/dialects/oracle/base.py
@@ -813,7 +813,8 @@ class OracleDDLCompiler(compiler.DDLCompiler):
class OracleIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words = set([x.lower() for x in RESERVED_WORDS])
- illegal_initial_characters = set(range(0, 10)).union(["_", "$"])
+ illegal_initial_characters = set(
+ (str(dig) for dig in range(0, 10))).union(["_", "$"])
def _bindparam_requires_quotes(self, value):
"""Return True if the given identifier requires quoting."""
diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py
index b9a0d461b..baa640eaa 100644
--- a/lib/sqlalchemy/dialects/postgresql/base.py
+++ b/lib/sqlalchemy/dialects/postgresql/base.py
@@ -402,6 +402,28 @@ underlying CREATE INDEX command, so it *must* be a valid index type for your
version of PostgreSQL.
+.. _postgresql_index_reflection:
+
+Postgresql Index Reflection
+---------------------------
+
+The Postgresql database creates a UNIQUE INDEX implicitly whenever the
+UNIQUE CONSTRAINT construct is used. When inspecting a table using
+:class:`.Inspector`, the :meth:`.Inspector.get_indexes`
+and the :meth:`.Inspector.get_unique_constraints` will report on these
+two constructs distinctly; in the case of the index, the key
+``duplicates_constraint`` will be present in the index entry if it is
+detected as mirroring a constraint. When performing reflection using
+``Table(..., autoload=True)``, the UNIQUE INDEX is **not** returned
+in :attr:`.Table.indexes` when it is detected as mirroring a
+:class:`.UniqueConstraint` in the :attr:`.Table.constraints` collection.
+
+.. versionchanged:: 1.0.0 - :class:`.Table` reflection now includes
+ :class:`.UniqueConstraint` objects present in the :attr:`.Table.constraints`
+ collection; the Postgresql backend will no longer include a "mirrored"
+ :class:`.Index` construct in :attr:`.Table.indexes` if it is detected
+ as corresponding to a unique constraint.
+
Special Reflection Options
--------------------------
@@ -2471,14 +2493,19 @@ class PGDialect(default.DefaultDialect):
SELECT
i.relname as relname,
ix.indisunique, ix.indexprs, ix.indpred,
- a.attname, a.attnum, ix.indkey%s
+ a.attname, a.attnum, c.conrelid, ix.indkey%s
FROM
pg_class t
join pg_index ix on t.oid = ix.indrelid
- join pg_class i on i.oid=ix.indexrelid
+ join pg_class i on i.oid = ix.indexrelid
left outer join
pg_attribute a
- on t.oid=a.attrelid and %s
+ on t.oid = a.attrelid and %s
+ left outer join
+ pg_constraint c
+ on (ix.indrelid = c.conrelid and
+ ix.indexrelid = c.conindid and
+ c.contype in ('p', 'u', 'x'))
WHERE
t.relkind IN ('r', 'v', 'f', 'm')
and t.oid = :table_oid
@@ -2501,7 +2528,7 @@ class PGDialect(default.DefaultDialect):
sv_idx_name = None
for row in c.fetchall():
- idx_name, unique, expr, prd, col, col_num, idx_key = row
+ idx_name, unique, expr, prd, col, col_num, conrelid, idx_key = row
if expr:
if idx_name != sv_idx_name:
@@ -2518,18 +2545,27 @@ class PGDialect(default.DefaultDialect):
% idx_name)
sv_idx_name = idx_name
+ has_idx = idx_name in indexes
index = indexes[idx_name]
if col is not None:
index['cols'][col_num] = col
- index['key'] = [int(k.strip()) for k in idx_key.split()]
- index['unique'] = unique
-
- return [
- {'name': name,
- 'unique': idx['unique'],
- 'column_names': [idx['cols'][i] for i in idx['key']]}
- for name, idx in indexes.items()
- ]
+ if not has_idx:
+ index['key'] = [int(k.strip()) for k in idx_key.split()]
+ index['unique'] = unique
+ if conrelid is not None:
+ index['duplicates_constraint'] = idx_name
+
+ result = []
+ for name, idx in indexes.items():
+ entry = {
+ 'name': name,
+ 'unique': idx['unique'],
+ 'column_names': [idx['cols'][i] for i in idx['key']]
+ }
+ if 'duplicates_constraint' in idx:
+ entry['duplicates_constraint'] = idx['duplicates_constraint']
+ result.append(entry)
+ return result
@reflection.cache
def get_unique_constraints(self, connection, table_name,
diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
index 9dfd53e22..1a2a1ffe4 100644
--- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py
+++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
@@ -32,10 +32,25 @@ psycopg2-specific keyword arguments which are accepted by
way of enabling this mode on a per-execution basis.
* ``use_native_unicode``: Enable the usage of Psycopg2 "native unicode" mode
per connection. True by default.
+
+ .. seealso::
+
+ :ref:`psycopg2_disable_native_unicode`
+
* ``isolation_level``: This option, available for all PostgreSQL dialects,
includes the ``AUTOCOMMIT`` isolation level when using the psycopg2
- dialect. See :ref:`psycopg2_isolation_level`.
+ dialect.
+
+ .. seealso::
+
+ :ref:`psycopg2_isolation_level`
+
+* ``client_encoding``: sets the client encoding in a libpq-agnostic way,
+ using psycopg2's ``set_client_encoding()`` method.
+
+ .. seealso::
+ :ref:`psycopg2_unicode`
Unix Domain Connections
------------------------
@@ -75,8 +90,10 @@ The following DBAPI-specific options are respected when used with
If ``None`` or not set, the ``server_side_cursors`` option of the
:class:`.Engine` is used.
-Unicode
--------
+.. _psycopg2_unicode:
+
+Unicode with Psycopg2
+----------------------
By default, the psycopg2 driver uses the ``psycopg2.extensions.UNICODE``
extension, such that the DBAPI receives and returns all strings as Python
@@ -84,27 +101,51 @@ Unicode objects directly - SQLAlchemy passes these values through without
change. Psycopg2 here will encode/decode string values based on the
current "client encoding" setting; by default this is the value in
the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``.
-Typically, this can be changed to ``utf-8``, as a more useful default::
+Typically, this can be changed to ``utf8``, as a more useful default::
+
+ # postgresql.conf file
- #client_encoding = sql_ascii # actually, defaults to database
+ # client_encoding = sql_ascii # actually, defaults to database
# encoding
client_encoding = utf8
A second way to affect the client encoding is to set it within Psycopg2
-locally. SQLAlchemy will call psycopg2's ``set_client_encoding()``
-method (see:
-http://initd.org/psycopg/docs/connection.html#connection.set_client_encoding)
+locally. SQLAlchemy will call psycopg2's
+:meth:`psycopg2:connection.set_client_encoding` method
on all new connections based on the value passed to
:func:`.create_engine` using the ``client_encoding`` parameter::
+ # set_client_encoding() setting;
+ # works for *all* Postgresql versions
engine = create_engine("postgresql://user:pass@host/dbname",
client_encoding='utf8')
This overrides the encoding specified in the Postgresql client configuration.
+When using the parameter in this way, the psycopg2 driver emits
+``SET client_encoding TO 'utf8'`` on the connection explicitly, and works
+in all Postgresql versions.
+
+Note that the ``client_encoding`` setting as passed to :func:`.create_engine`
+is **not the same** as the more recently added ``client_encoding`` parameter
+now supported by libpq directly. This is enabled when ``client_encoding``
+is passed directly to ``psycopg2.connect()``, and from SQLAlchemy is passed
+using the :paramref:`.create_engine.connect_args` parameter::
+
+ # libpq direct parameter setting;
+ # only works for Postgresql **9.1 and above**
+ engine = create_engine("postgresql://user:pass@host/dbname",
+ connect_args={'client_encoding': 'utf8'})
+
+ # using the query string is equivalent
+ engine = create_engine("postgresql://user:pass@host/dbname?client_encoding=utf8")
+
+The above parameter was only added to libpq as of version 9.1 of Postgresql,
+so using the previous method is better for cross-version support.
+
+.. _psycopg2_disable_native_unicode:
-.. versionadded:: 0.7.3
- The psycopg2-specific ``client_encoding`` parameter to
- :func:`.create_engine`.
+Disabling Native Unicode
+^^^^^^^^^^^^^^^^^^^^^^^^
SQLAlchemy can also be instructed to skip the usage of the psycopg2
``UNICODE`` extension and to instead utilize its own unicode encode/decode
@@ -116,8 +157,7 @@ in and coerce from bytes on the way back,
using the value of the :func:`.create_engine` ``encoding`` parameter, which
defaults to ``utf-8``.
SQLAlchemy's own unicode encode/decode functionality is steadily becoming
-obsolete as more DBAPIs support unicode fully along with the approach of
-Python 3; in modern usage psycopg2 should be relied upon to handle unicode.
+obsolete as most DBAPIs now support unicode fully.
Transactions
------------
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index e5feda138..dd82be1d1 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -1126,8 +1126,6 @@ class Connection(Connectable):
"""
try:
cursor.close()
- except (SystemExit, KeyboardInterrupt):
- raise
except Exception:
# log the error through the connection pool's logger.
self.engine.pool.logger.error(
diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py
index 838a5bdd2..2a1def86a 100644
--- a/lib/sqlalchemy/engine/reflection.py
+++ b/lib/sqlalchemy/engine/reflection.py
@@ -508,6 +508,10 @@ class Inspector(object):
table_name, schema, table, cols_by_orig_name,
include_columns, exclude_columns, reflection_options)
+ self._reflect_unique_constraints(
+ table_name, schema, table, cols_by_orig_name,
+ include_columns, exclude_columns, reflection_options)
+
def _reflect_column(
self, table, col_d, include_columns,
exclude_columns, cols_by_orig_name):
@@ -638,12 +642,15 @@ class Inspector(object):
columns = index_d['column_names']
unique = index_d['unique']
flavor = index_d.get('type', 'index')
+ duplicates = index_d.get('duplicates_constraint')
if include_columns and \
not set(columns).issubset(include_columns):
util.warn(
"Omitting %s key for (%s), key covers omitted columns." %
(flavor, ', '.join(columns)))
continue
+ if duplicates:
+ continue
# look for columns by orig name in cols_by_orig_name,
# but support columns that are in-Python only as fallback
idx_cols = []
@@ -661,3 +668,43 @@ class Inspector(object):
idx_cols.append(idx_col)
sa_schema.Index(name, *idx_cols, **dict(unique=unique))
+
+ def _reflect_unique_constraints(
+ self, table_name, schema, table, cols_by_orig_name,
+ include_columns, exclude_columns, reflection_options):
+
+ # Unique Constraints
+ try:
+ constraints = self.get_unique_constraints(table_name, schema)
+ except NotImplementedError:
+ # optional dialect feature
+ return
+
+ for const_d in constraints:
+ conname = const_d['name']
+ columns = const_d['column_names']
+ duplicates = const_d.get('duplicates_index')
+ if include_columns and \
+ not set(columns).issubset(include_columns):
+ util.warn(
+ "Omitting unique constraint key for (%s), "
+ "key covers omitted columns." %
+ ', '.join(columns))
+ continue
+ if duplicates:
+ continue
+ # look for columns by orig name in cols_by_orig_name,
+ # but support columns that are in-Python only as fallback
+ constrained_cols = []
+ for c in columns:
+ try:
+ constrained_col = cols_by_orig_name[c] \
+ if c in cols_by_orig_name else table.c[c]
+ except KeyError:
+ util.warn(
+ "unique constraint key '%s' was not located in "
+ "columns for table '%s'" % (c, table_name))
+ else:
+ constrained_cols.append(constrained_col)
+ table.append_constraint(
+ sa_schema.UniqueConstraint(*constrained_cols, name=conname))
diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py
index 38206be89..398ef8df6 100644
--- a/lib/sqlalchemy/engine/strategies.py
+++ b/lib/sqlalchemy/engine/strategies.py
@@ -162,6 +162,7 @@ class DefaultEngineStrategy(EngineStrategy):
def first_connect(dbapi_connection, connection_record):
c = base.Connection(engine, connection=dbapi_connection,
_has_events=False)
+ c._execution_options = util.immutabledict()
dialect.initialize(c)
event.listen(pool, 'first_connect', first_connect, once=True)
diff --git a/lib/sqlalchemy/exc.py b/lib/sqlalchemy/exc.py
index a82bae33f..5d35dc2e7 100644
--- a/lib/sqlalchemy/exc.py
+++ b/lib/sqlalchemy/exc.py
@@ -280,7 +280,9 @@ class DBAPIError(StatementError):
connection_invalidated=False):
# Don't ever wrap these, just return them directly as if
# DBAPIError didn't exist.
- if isinstance(orig, (KeyboardInterrupt, SystemExit, DontWrapMixin)):
+ if (isinstance(orig, BaseException) and
+ not isinstance(orig, Exception)) or \
+ isinstance(orig, DontWrapMixin):
return orig
if orig is not None:
@@ -310,8 +312,6 @@ class DBAPIError(StatementError):
def __init__(self, statement, params, orig, connection_invalidated=False):
try:
text = str(orig)
- except (KeyboardInterrupt, SystemExit):
- raise
except Exception as e:
text = 'Error in str() of DB-API-generated exception: ' + str(e)
StatementError.__init__(
diff --git a/lib/sqlalchemy/ext/declarative/api.py b/lib/sqlalchemy/ext/declarative/api.py
index e84b21ad2..66fe05fd0 100644
--- a/lib/sqlalchemy/ext/declarative/api.py
+++ b/lib/sqlalchemy/ext/declarative/api.py
@@ -547,7 +547,7 @@ class AbstractConcreteBase(ConcreteBase):
for scls in cls.__subclasses__():
sm = _mapper_or_none(scls)
- if sm.concrete and cls in scls.__bases__:
+ if sm and sm.concrete and cls in scls.__bases__:
sm._set_concrete_base(m)
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index 2ab239f86..7e88ba161 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -2654,7 +2654,7 @@ def configure_mappers():
mapper._expire_memoizations()
mapper.dispatch.mapper_configured(
mapper, mapper.class_)
- except:
+ except Exception:
exc = sys.exc_info()[1]
if not hasattr(exc, '_configure_failed'):
mapper._configure_failed = exc
diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py
index 74e69e44c..114b79ea5 100644
--- a/lib/sqlalchemy/orm/persistence.py
+++ b/lib/sqlalchemy/orm/persistence.py
@@ -18,7 +18,7 @@ import operator
from itertools import groupby
from .. import sql, util, exc as sa_exc, schema
from . import attributes, sync, exc as orm_exc, evaluator
-from .base import state_str, _attr_as_key
+from .base import state_str, _attr_as_key, _entity_descriptor
from ..sql import expression
from . import loading
@@ -987,6 +987,7 @@ class BulkUpdate(BulkUD):
super(BulkUpdate, self).__init__(query)
self.query._no_select_modifiers("update")
self.values = values
+ self.mapper = self.query._mapper_zero_or_none()
@classmethod
def factory(cls, query, synchronize_session, values):
@@ -996,9 +997,40 @@ class BulkUpdate(BulkUD):
False: BulkUpdate
}, synchronize_session, query, values)
+ def _resolve_string_to_expr(self, key):
+ if self.mapper and isinstance(key, util.string_types):
+ attr = _entity_descriptor(self.mapper, key)
+ return attr.__clause_element__()
+ else:
+ return key
+
+ def _resolve_key_to_attrname(self, key):
+ if self.mapper and isinstance(key, util.string_types):
+ attr = _entity_descriptor(self.mapper, key)
+ return attr.property.key
+ elif isinstance(key, attributes.InstrumentedAttribute):
+ return key.key
+ elif hasattr(key, '__clause_element__'):
+ key = key.__clause_element__()
+
+ if self.mapper and isinstance(key, expression.ColumnElement):
+ try:
+ attr = self.mapper._columntoproperty[key]
+ except orm_exc.UnmappedColumnError:
+ return None
+ else:
+ return attr.key
+ else:
+ raise sa_exc.InvalidRequestError(
+ "Invalid expression type: %r" % key)
+
def _do_exec(self):
+ values = dict(
+ (self._resolve_string_to_expr(k), v)
+ for k, v in self.values.items()
+ )
update_stmt = sql.update(self.primary_table,
- self.context.whereclause, self.values)
+ self.context.whereclause, values)
self.result = self.query.session.execute(
update_stmt, params=self.query._params)
@@ -1044,9 +1076,10 @@ class BulkUpdateEvaluate(BulkEvaluate, BulkUpdate):
def _additional_evaluators(self, evaluator_compiler):
self.value_evaluators = {}
for key, value in self.values.items():
- key = _attr_as_key(key)
- self.value_evaluators[key] = evaluator_compiler.process(
- expression._literal_as_binds(value))
+ key = self._resolve_key_to_attrname(key)
+ if key is not None:
+ self.value_evaluators[key] = evaluator_compiler.process(
+ expression._literal_as_binds(value))
def _do_post_synchronize(self):
session = self.query.session
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index 7b2ea7977..fce7a3665 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -2756,9 +2756,25 @@ class Query(object):
Updates rows matched by this query in the database.
- :param values: a dictionary with attributes names as keys and literal
+ E.g.::
+
+ sess.query(User).filter(User.age == 25).\
+ update({User.age: User.age - 10}, synchronize_session='fetch')
+
+
+ sess.query(User).filter(User.age == 25).\
+ update({"age": User.age - 10}, synchronize_session='evaluate')
+
+
+ :param values: a dictionary with attributes names, or alternatively
+ mapped attributes or SQL expressions, as keys, and literal
values or sql expressions as values.
+ .. versionchanged:: 1.0.0 - string names in the values dictionary
+ are now resolved against the mapped entity; previously, these
+ strings were passed as literal column names with no mapper-level
+ translation.
+
:param synchronize_session: chooses the strategy to update the
attributes on objects in the session. Valid values are:
@@ -2796,7 +2812,7 @@ class Query(object):
which normally occurs upon :meth:`.Session.commit` or can be forced
by using :meth:`.Session.expire_all`.
- * As of 0.8, this method will support multiple table updates, as
+ * The method supports multiple table updates, as
detailed in :ref:`multi_table_updates`, and this behavior does
extend to support updates of joined-inheritance and other multiple
table mappings. However, the **join condition of an inheritance
@@ -2827,12 +2843,6 @@ class Query(object):
"""
- # TODO: value keys need to be mapped to corresponding sql cols and
- # instr.attr.s to string keys
- # TODO: updates of manytoone relationships need to be converted to
- # fk assignments
- # TODO: cascades need handling.
-
update_op = persistence.BulkUpdate.factory(
self, synchronize_session, values)
update_op.exec_()
diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py
index 13afcb357..db9d3a51d 100644
--- a/lib/sqlalchemy/orm/session.py
+++ b/lib/sqlalchemy/orm/session.py
@@ -641,14 +641,8 @@ class Session(_SessionClassMethods):
SessionExtension._adapt_listener(self, ext)
if binds is not None:
- for mapperortable, bind in binds.items():
- insp = inspect(mapperortable)
- if insp.is_selectable:
- self.bind_table(mapperortable, bind)
- elif insp.is_mapper:
- self.bind_mapper(mapperortable, bind)
- else:
- assert False
+ for key, bind in binds.items():
+ self._add_bind(key, bind)
if not self.autocommit:
self.begin()
@@ -1026,40 +1020,47 @@ class Session(_SessionClassMethods):
# TODO: + crystallize + document resolution order
# vis. bind_mapper/bind_table
- def bind_mapper(self, mapper, bind):
- """Bind operations for a mapper to a Connectable.
-
- mapper
- A mapper instance or mapped class
+ def _add_bind(self, key, bind):
+ try:
+ insp = inspect(key)
+ except sa_exc.NoInspectionAvailable:
+ if not isinstance(key, type):
+ raise exc.ArgumentError(
+ "Not acceptable bind target: %s" %
+ key)
+ else:
+ self.__binds[key] = bind
+ else:
+ if insp.is_selectable:
+ self.__binds[insp] = bind
+ elif insp.is_mapper:
+ self.__binds[insp.class_] = bind
+ for selectable in insp._all_tables:
+ self.__binds[selectable] = bind
+ else:
+ raise exc.ArgumentError(
+ "Not acceptable bind target: %s" %
+ key)
- bind
- Any Connectable: a :class:`.Engine` or :class:`.Connection`.
+ def bind_mapper(self, mapper, bind):
+ """Associate a :class:`.Mapper` with a "bind", e.g. a :class:`.Engine`
+ or :class:`.Connection`.
- All subsequent operations involving this mapper will use the given
- `bind`.
+ The given mapper is added to a lookup used by the
+ :meth:`.Session.get_bind` method.
"""
- if isinstance(mapper, type):
- mapper = class_mapper(mapper)
-
- self.__binds[mapper.base_mapper] = bind
- for t in mapper._all_tables:
- self.__binds[t] = bind
+ self._add_bind(mapper, bind)
def bind_table(self, table, bind):
- """Bind operations on a Table to a Connectable.
-
- table
- A :class:`.Table` instance
+ """Associate a :class:`.Table` with a "bind", e.g. a :class:`.Engine`
+ or :class:`.Connection`.
- bind
- Any Connectable: a :class:`.Engine` or :class:`.Connection`.
-
- All subsequent operations involving this :class:`.Table` will use the
- given `bind`.
+ The given mapper is added to a lookup used by the
+ :meth:`.Session.get_bind` method.
"""
- self.__binds[table] = bind
+ self._add_bind(table, bind)
def get_bind(self, mapper=None, clause=None):
"""Return a "bind" to which this :class:`.Session` is bound.
@@ -1113,6 +1114,7 @@ class Session(_SessionClassMethods):
bound :class:`.MetaData`.
"""
+
if mapper is clause is None:
if self.bind:
return self.bind
@@ -1122,15 +1124,23 @@ class Session(_SessionClassMethods):
"Connection, and no context was provided to locate "
"a binding.")
- c_mapper = mapper is not None and _class_to_mapper(mapper) or None
+ if mapper is not None:
+ try:
+ mapper = inspect(mapper)
+ except sa_exc.NoInspectionAvailable:
+ if isinstance(mapper, type):
+ raise exc.UnmappedClassError(mapper)
+ else:
+ raise
- # manually bound?
if self.__binds:
- if c_mapper:
- if c_mapper.base_mapper in self.__binds:
- return self.__binds[c_mapper.base_mapper]
- elif c_mapper.mapped_table in self.__binds:
- return self.__binds[c_mapper.mapped_table]
+ if mapper:
+ for cls in mapper.class_.__mro__:
+ if cls in self.__binds:
+ return self.__binds[cls]
+ if clause is None:
+ clause = mapper.mapped_table
+
if clause is not None:
for t in sql_util.find_tables(clause, include_crud=True):
if t in self.__binds:
@@ -1142,12 +1152,12 @@ class Session(_SessionClassMethods):
if isinstance(clause, sql.expression.ClauseElement) and clause.bind:
return clause.bind
- if c_mapper and c_mapper.mapped_table.bind:
- return c_mapper.mapped_table.bind
+ if mapper and mapper.mapped_table.bind:
+ return mapper.mapped_table.bind
context = []
if mapper is not None:
- context.append('mapper %s' % c_mapper)
+ context.append('mapper %s' % mapper)
if clause is not None:
context.append('SQL expression')
diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py
index 3c12fda1a..4756f1707 100644
--- a/lib/sqlalchemy/orm/state.py
+++ b/lib/sqlalchemy/orm/state.py
@@ -258,8 +258,8 @@ class InstanceState(interfaces.InspectionAttr):
try:
return manager.original_init(*mixed[1:], **kwargs)
except:
- manager.dispatch.init_failure(self, args, kwargs)
- raise
+ with util.safe_reraise():
+ manager.dispatch.init_failure(self, args, kwargs)
def get_history(self, key, passive):
return self.manager[key].impl.get_history(self, self.dict, passive)
diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py
index 734f9d5e6..8d40ae21c 100644
--- a/lib/sqlalchemy/orm/util.py
+++ b/lib/sqlalchemy/orm/util.py
@@ -804,6 +804,16 @@ class _ORMJoin(expression.Join):
expression.Join.__init__(self, left, right, onclause, isouter)
+ if not prop and getattr(right_info, 'mapper', None) \
+ and right_info.mapper.single:
+ # if single inheritance target and we are using a manual
+ # or implicit ON clause, augment it the same way we'd augment the
+ # WHERE.
+ single_crit = right_info.mapper._single_table_criterion
+ if right_info.is_aliased_class:
+ single_crit = right_info._adapter.traverse(single_crit)
+ self.onclause = self.onclause & single_crit
+
def join(self, right, onclause=None, isouter=False, join_to_left=None):
return _ORMJoin(self, right, onclause, isouter)
diff --git a/lib/sqlalchemy/pool.py b/lib/sqlalchemy/pool.py
index bc9affe4a..a174df784 100644
--- a/lib/sqlalchemy/pool.py
+++ b/lib/sqlalchemy/pool.py
@@ -248,9 +248,7 @@ class Pool(log.Identified):
self.logger.debug("Closing connection %r", connection)
try:
self._dialect.do_close(connection)
- except (SystemExit, KeyboardInterrupt):
- raise
- except:
+ except Exception:
self.logger.error("Exception closing connection %r",
connection, exc_info=True)
@@ -441,8 +439,8 @@ class _ConnectionRecord(object):
try:
dbapi_connection = rec.get_connection()
except:
- rec.checkin()
- raise
+ with util.safe_reraise():
+ rec.checkin()
echo = pool._should_log_debug()
fairy = _ConnectionFairy(dbapi_connection, rec, echo)
rec.fairy_ref = weakref.ref(
@@ -569,12 +567,12 @@ def _finalize_fairy(connection, connection_record,
# Immediately close detached instances
if not connection_record:
pool._close_connection(connection)
- except Exception as e:
+ except BaseException as e:
pool.logger.error(
"Exception during reset or similar", exc_info=True)
if connection_record:
connection_record.invalidate(e=e)
- if isinstance(e, (SystemExit, KeyboardInterrupt)):
+ if not isinstance(e, Exception):
raise
if connection_record:
@@ -842,9 +840,7 @@ class SingletonThreadPool(Pool):
for conn in self._all_conns:
try:
conn.close()
- except (SystemExit, KeyboardInterrupt):
- raise
- except:
+ except Exception:
# pysqlite won't even let you close a conn from a thread
# that didn't create it
pass
@@ -962,8 +958,8 @@ class QueuePool(Pool):
try:
return self._create_connection()
except:
- self._dec_overflow()
- raise
+ with util.safe_reraise():
+ self._dec_overflow()
else:
return self._do_get()
diff --git a/lib/sqlalchemy/sql/__init__.py b/lib/sqlalchemy/sql/__init__.py
index 4d013859c..351e08d0b 100644
--- a/lib/sqlalchemy/sql/__init__.py
+++ b/lib/sqlalchemy/sql/__init__.py
@@ -38,6 +38,7 @@ from .expression import (
false,
False_,
func,
+ funcfilter,
insert,
intersect,
intersect_all,
diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py
index 18b4d4cfc..a6c30b7dc 100644
--- a/lib/sqlalchemy/sql/compiler.py
+++ b/lib/sqlalchemy/sql/compiler.py
@@ -746,6 +746,12 @@ class SQLCompiler(Compiled):
)
)
+ def visit_funcfilter(self, funcfilter, **kwargs):
+ return "%s FILTER (WHERE %s)" % (
+ funcfilter.func._compiler_dispatch(self, **kwargs),
+ funcfilter.criterion._compiler_dispatch(self, **kwargs)
+ )
+
def visit_extract(self, extract, **kwargs):
field = self.extract_map.get(extract.field, extract.field)
return "EXTRACT(%s FROM %s)" % (
@@ -1787,7 +1793,7 @@ class SQLCompiler(Compiled):
text += " " + returning_clause
if insert_stmt.select is not None:
- text += " %s" % self.process(insert_stmt.select, **kw)
+ text += " %s" % self.process(self._insert_from_select, **kw)
elif not crud_params and supports_default_values:
text += " DEFAULT VALUES"
elif insert_stmt._has_multi_parameters:
diff --git a/lib/sqlalchemy/sql/crud.py b/lib/sqlalchemy/sql/crud.py
index 1c1f661d2..831d05be1 100644
--- a/lib/sqlalchemy/sql/crud.py
+++ b/lib/sqlalchemy/sql/crud.py
@@ -89,18 +89,15 @@ def _get_crud_params(compiler, stmt, **kw):
_col_bind_name, _getattr_col_key, values, kw)
if compiler.isinsert and stmt.select_names:
- # for an insert from select, we can only use names that
- # are given, so only select for those names.
- cols = (stmt.table.c[_column_as_key(name)]
- for name in stmt.select_names)
+ _scan_insert_from_select_cols(
+ compiler, stmt, parameters,
+ _getattr_col_key, _column_as_key,
+ _col_bind_name, check_columns, values, kw)
else:
- # iterate through all table columns to maintain
- # ordering, even for those cols that aren't included
- cols = stmt.table.columns
-
- _scan_cols(
- compiler, stmt, cols, parameters,
- _getattr_col_key, _col_bind_name, check_columns, values, kw)
+ _scan_cols(
+ compiler, stmt, parameters,
+ _getattr_col_key, _column_as_key,
+ _col_bind_name, check_columns, values, kw)
if parameters and stmt_parameters:
check = set(parameters).intersection(
@@ -118,13 +115,17 @@ def _get_crud_params(compiler, stmt, **kw):
return values
-def _create_bind_param(compiler, col, value, required=False, name=None):
+def _create_bind_param(
+ compiler, col, value, process=True, required=False, name=None):
if name is None:
name = col.key
bindparam = elements.BindParameter(name, value,
type_=col.type, required=required)
bindparam._is_crud = True
- return bindparam._compiler_dispatch(compiler)
+ if process:
+ bindparam = bindparam._compiler_dispatch(compiler)
+ return bindparam
+
def _key_getters_for_crud_column(compiler):
if compiler.isupdate and compiler.statement._extra_froms:
@@ -162,14 +163,52 @@ def _key_getters_for_crud_column(compiler):
return _column_as_key, _getattr_col_key, _col_bind_name
+def _scan_insert_from_select_cols(
+ compiler, stmt, parameters, _getattr_col_key,
+ _column_as_key, _col_bind_name, check_columns, values, kw):
+
+ need_pks, implicit_returning, \
+ implicit_return_defaults, postfetch_lastrowid = \
+ _get_returning_modifiers(compiler, stmt)
+
+ cols = [stmt.table.c[_column_as_key(name)]
+ for name in stmt.select_names]
+
+ compiler._insert_from_select = stmt.select
+
+ add_select_cols = []
+ if stmt.include_insert_from_select_defaults:
+ col_set = set(cols)
+ for col in stmt.table.columns:
+ if col not in col_set and col.default:
+ cols.append(col)
+
+ for c in cols:
+ col_key = _getattr_col_key(c)
+ if col_key in parameters and col_key not in check_columns:
+ parameters.pop(col_key)
+ values.append((c, None))
+ else:
+ _append_param_insert_select_hasdefault(
+ compiler, stmt, c, add_select_cols, kw)
+
+ if add_select_cols:
+ values.extend(add_select_cols)
+ compiler._insert_from_select = compiler._insert_from_select._generate()
+ compiler._insert_from_select._raw_columns += tuple(
+ expr for col, expr in add_select_cols)
+
+
def _scan_cols(
- compiler, stmt, cols, parameters, _getattr_col_key,
- _col_bind_name, check_columns, values, kw):
+ compiler, stmt, parameters, _getattr_col_key,
+ _column_as_key, _col_bind_name, check_columns, values, kw):
need_pks, implicit_returning, \
implicit_return_defaults, postfetch_lastrowid = \
_get_returning_modifiers(compiler, stmt)
+ cols = stmt.table.columns
+
for c in cols:
col_key = _getattr_col_key(c)
if col_key in parameters and col_key not in check_columns:
@@ -196,7 +235,8 @@ def _scan_cols(
elif c.default is not None:
_append_param_insert_hasdefault(
- compiler, stmt, c, implicit_return_defaults, values, kw)
+ compiler, stmt, c, implicit_return_defaults,
+ values, kw)
elif c.server_default is not None:
if implicit_return_defaults and \
@@ -299,10 +339,8 @@ def _append_param_insert_hasdefault(
elif not c.primary_key:
compiler.postfetch.append(c)
elif c.default.is_clause_element:
- values.append(
- (c, compiler.process(
- c.default.arg.self_group(), **kw))
- )
+ proc = compiler.process(c.default.arg.self_group(), **kw)
+ values.append((c, proc))
if implicit_return_defaults and \
c in implicit_return_defaults:
@@ -317,6 +355,25 @@ def _append_param_insert_hasdefault(
compiler.prefetch.append(c)
+def _append_param_insert_select_hasdefault(
+ compiler, stmt, c, values, kw):
+
+ if c.default.is_sequence:
+ if compiler.dialect.supports_sequences and \
+ (not c.default.optional or
+ not compiler.dialect.sequences_optional):
+ proc = c.default
+ values.append((c, proc))
+ elif c.default.is_clause_element:
+ proc = c.default.arg.self_group()
+ values.append((c, proc))
+ else:
+ values.append(
+ (c, _create_bind_param(compiler, c, None, process=False))
+ )
+ compiler.prefetch.append(c)
+
+
def _append_param_update(
compiler, stmt, c, implicit_return_defaults, values, kw):
diff --git a/lib/sqlalchemy/sql/dml.py b/lib/sqlalchemy/sql/dml.py
index 1934d0776..9f2ce7ce3 100644
--- a/lib/sqlalchemy/sql/dml.py
+++ b/lib/sqlalchemy/sql/dml.py
@@ -475,6 +475,7 @@ class Insert(ValuesBase):
ValuesBase.__init__(self, table, values, prefixes)
self._bind = bind
self.select = self.select_names = None
+ self.include_insert_from_select_defaults = False
self.inline = inline
self._returning = returning
self._validate_dialect_kwargs(dialect_kw)
@@ -487,7 +488,7 @@ class Insert(ValuesBase):
return ()
@_generative
- def from_select(self, names, select):
+ def from_select(self, names, select, include_defaults=True):
"""Return a new :class:`.Insert` construct which represents
an ``INSERT...FROM SELECT`` statement.
@@ -506,6 +507,21 @@ class Insert(ValuesBase):
is not checked before passing along to the database, the database
would normally raise an exception if these column lists don't
correspond.
+ :param include_defaults: if True, non-server default values and
+ SQL expressions as specified on :class:`.Column` objects
+ (as documented in :ref:`metadata_defaults_toplevel`) not
+ otherwise specified in the list of names will be rendered
+ into the INSERT and SELECT statements, so that these values are also
+ included in the data to be inserted.
+
+ .. note:: A Python-side default that uses a Python callable function
+ will only be invoked **once** for the whole statement, and **not
+ per row**.
+
+ .. versionadded:: 1.0.0 - :meth:`.Insert.from_select` now renders
+ Python-side and SQL expression column defaults into the
+ SELECT statement for columns otherwise not included in the
+ list of column names.
.. versionchanged:: 1.0.0 an INSERT that uses FROM SELECT
implies that the :paramref:`.insert.inline` flag is set to
@@ -514,13 +530,6 @@ class Insert(ValuesBase):
deals with an arbitrary number of rows, so the
:attr:`.ResultProxy.inserted_primary_key` accessor does not apply.
- .. note::
-
- A SELECT..INSERT construct in SQL has no VALUES clause. Therefore
- :class:`.Column` objects which utilize Python-side defaults
- (e.g. as described at :ref:`metadata_defaults_toplevel`)
- will **not** take effect when using :meth:`.Insert.from_select`.
-
.. versionadded:: 0.8.3
"""
@@ -533,6 +542,7 @@ class Insert(ValuesBase):
self.select_names = names
self.inline = True
+ self.include_insert_from_select_defaults = include_defaults
self.select = _interpret_as_select(select)
def _copy_internals(self, clone=_clone, **kw):
diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py
index 8ec0aa700..444273e67 100644
--- a/lib/sqlalchemy/sql/elements.py
+++ b/lib/sqlalchemy/sql/elements.py
@@ -228,6 +228,7 @@ class ClauseElement(Visitable):
is_selectable = False
is_clause_element = True
+ description = None
_order_by_label_element = None
_is_from_container = False
@@ -540,7 +541,7 @@ class ClauseElement(Visitable):
__nonzero__ = __bool__
def __repr__(self):
- friendly = getattr(self, 'description', None)
+ friendly = self.description
if friendly is None:
return object.__repr__(self)
else:
@@ -2888,6 +2889,120 @@ class Over(ColumnElement):
))
+class FunctionFilter(ColumnElement):
+ """Represent a function FILTER clause.
+
+ This is a special operator against aggregate and window functions,
+ which controls which rows are passed to it.
+ It's supported only by certain database backends.
+
+ Invocation of :class:`.FunctionFilter` is via
+ :meth:`.FunctionElement.filter`::
+
+ func.count(1).filter(True)
+
+ .. versionadded:: 1.0.0
+
+ .. seealso::
+
+ :meth:`.FunctionElement.filter`
+
+ """
+ __visit_name__ = 'funcfilter'
+
+ criterion = None
+
+ def __init__(self, func, *criterion):
+ """Produce a :class:`.FunctionFilter` object against a function.
+
+ Used against aggregate and window functions,
+ for database backends that support the "FILTER" clause.
+
+ E.g.::
+
+ from sqlalchemy import funcfilter
+ funcfilter(func.count(1), MyClass.name == 'some name')
+
+ Would produce "COUNT(1) FILTER (WHERE myclass.name = 'some name')".
+
+ This function is also available from the :data:`~.expression.func`
+ construct itself via the :meth:`.FunctionElement.filter` method.
+
+ .. versionadded:: 1.0.0
+
+ .. seealso::
+
+ :meth:`.FunctionElement.filter`
+
+
+ """
+ self.func = func
+ self.filter(*criterion)
+
+ def filter(self, *criterion):
+ """Produce an additional FILTER against the function.
+
+ This method adds additional criteria to the initial criteria
+ set up by :meth:`.FunctionElement.filter`.
+
+ Multiple criteria are joined together at SQL render time
+ via ``AND``.
+
+
+ """
+
+ for criterion in list(criterion):
+ criterion = _expression_literal_as_text(criterion)
+
+ if self.criterion is not None:
+ self.criterion = self.criterion & criterion
+ else:
+ self.criterion = criterion
+
+ return self
+
+ def over(self, partition_by=None, order_by=None):
+ """Produce an OVER clause against this filtered function.
+
+ Used against aggregate or so-called "window" functions,
+ for database backends that support window functions.
+
+ The expression::
+
+ func.rank().filter(MyClass.y > 5).over(order_by='x')
+
+ is shorthand for::
+
+ from sqlalchemy import over, funcfilter
+ over(funcfilter(func.rank(), MyClass.y > 5), order_by='x')
+
+ See :func:`~.expression.over` for a full description.
+
+ """
+ return Over(self, partition_by=partition_by, order_by=order_by)
+
+ @util.memoized_property
+ def type(self):
+ return self.func.type
+
+ def get_children(self, **kwargs):
+ return [c for c in
+ (self.func, self.criterion)
+ if c is not None]
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.func = clone(self.func, **kw)
+ if self.criterion is not None:
+ self.criterion = clone(self.criterion, **kw)
+
+ @property
+ def _from_objects(self):
+ return list(itertools.chain(
+ *[c._from_objects for c in (self.func, self.criterion)
+ if c is not None]
+ ))
+
+
class Label(ColumnElement):
"""Represents a column label (AS).
@@ -3491,7 +3606,7 @@ def _string_or_unprintable(element):
else:
try:
return str(element)
- except:
+ except Exception:
return "unprintable element %r" % element
diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py
index d96f048b9..2e10b7370 100644
--- a/lib/sqlalchemy/sql/expression.py
+++ b/lib/sqlalchemy/sql/expression.py
@@ -36,7 +36,7 @@ from .elements import ClauseElement, ColumnElement,\
True_, False_, BinaryExpression, Tuple, TypeClause, Extract, \
Grouping, not_, \
collate, literal_column, between,\
- literal, outparam, type_coerce, ClauseList
+ literal, outparam, type_coerce, ClauseList, FunctionFilter
from .elements import SavepointClause, RollbackToSavepointClause, \
ReleaseSavepointClause
@@ -97,6 +97,8 @@ outerjoin = public_factory(Join._create_outerjoin, ".expression.outerjoin")
insert = public_factory(Insert, ".expression.insert")
update = public_factory(Update, ".expression.update")
delete = public_factory(Delete, ".expression.delete")
+funcfilter = public_factory(
+ FunctionFilter, ".expression.funcfilter")
# internal functions still being called from tests and the ORM,
diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py
index 7efb1e916..9280c7d60 100644
--- a/lib/sqlalchemy/sql/functions.py
+++ b/lib/sqlalchemy/sql/functions.py
@@ -12,7 +12,7 @@ from . import sqltypes, schema
from .base import Executable, ColumnCollection
from .elements import ClauseList, Cast, Extract, _literal_as_binds, \
literal_column, _type_from_args, ColumnElement, _clone,\
- Over, BindParameter
+ Over, BindParameter, FunctionFilter
from .selectable import FromClause, Select, Alias
from . import operators
@@ -116,6 +116,35 @@ class FunctionElement(Executable, ColumnElement, FromClause):
"""
return Over(self, partition_by=partition_by, order_by=order_by)
+ def filter(self, *criterion):
+ """Produce a FILTER clause against this function.
+
+ Used against aggregate and window functions,
+ for database backends that support the "FILTER" clause.
+
+ The expression::
+
+ func.count(1).filter(True)
+
+ is shorthand for::
+
+ from sqlalchemy import funcfilter
+ funcfilter(func.count(1), True)
+
+ .. versionadded:: 1.0.0
+
+ .. seealso::
+
+ :class:`.FunctionFilter`
+
+ :func:`.funcfilter`
+
+
+ """
+ if not criterion:
+ return self
+ return FunctionFilter(self, *criterion)
+
@property
def _from_objects(self):
return self.clauses._from_objects
diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py
index 26d7c428e..ef5d79a48 100644
--- a/lib/sqlalchemy/sql/schema.py
+++ b/lib/sqlalchemy/sql/schema.py
@@ -412,8 +412,8 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
table.dispatch.after_parent_attach(table, metadata)
return table
except:
- metadata._remove_table(name, schema)
- raise
+ with util.safe_reraise():
+ metadata._remove_table(name, schema)
@property
@util.deprecated('0.9', 'Use ``table.schema.quote``')
@@ -1061,8 +1061,8 @@ class Column(SchemaItem, ColumnClause):
conditionally rendered differently on different backends,
consider custom compilation rules for :class:`.CreateColumn`.
- ..versionadded:: 0.8.3 Added the ``system=True`` parameter to
- :class:`.Column`.
+ .. versionadded:: 0.8.3 Added the ``system=True`` parameter to
+ :class:`.Column`.
"""
diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py
index b4df87e54..8198a6733 100644
--- a/lib/sqlalchemy/sql/selectable.py
+++ b/lib/sqlalchemy/sql/selectable.py
@@ -2572,7 +2572,7 @@ class Select(HasPrefixes, GenerativeSelect):
following::
select([mytable]).\\
- with_hint(mytable, "+ index(%(name)s ix_mytable)")
+ with_hint(mytable, "index(%(name)s ix_mytable)")
Would render SQL as::
@@ -2583,8 +2583,7 @@ class Select(HasPrefixes, GenerativeSelect):
and Sybase simultaneously::
select([mytable]).\\
- with_hint(
- mytable, "+ index(%(name)s ix_mytable)", 'oracle').\\
+ with_hint(mytable, "index(%(name)s ix_mytable)", 'oracle').\\
with_hint(mytable, "WITH INDEX ix_mytable", 'sybase')
.. seealso::
diff --git a/lib/sqlalchemy/testing/engines.py b/lib/sqlalchemy/testing/engines.py
index 67c13231e..1284f9c2a 100644
--- a/lib/sqlalchemy/testing/engines.py
+++ b/lib/sqlalchemy/testing/engines.py
@@ -37,8 +37,6 @@ class ConnectionKiller(object):
def _safe(self, fn):
try:
fn()
- except (SystemExit, KeyboardInterrupt):
- raise
except Exception as e:
warnings.warn(
"testing_reaper couldn't "
@@ -168,8 +166,6 @@ class ReconnectFixture(object):
def _safe(self, fn):
try:
fn()
- except (SystemExit, KeyboardInterrupt):
- raise
except Exception as e:
warnings.warn(
"ReconnectFixture couldn't "
diff --git a/lib/sqlalchemy/testing/exclusions.py b/lib/sqlalchemy/testing/exclusions.py
index 49211f805..f94724608 100644
--- a/lib/sqlalchemy/testing/exclusions.py
+++ b/lib/sqlalchemy/testing/exclusions.py
@@ -178,8 +178,7 @@ class Predicate(object):
@classmethod
def as_predicate(cls, predicate, description=None):
if isinstance(predicate, compound):
- return cls.as_predicate(predicate.fails.union(predicate.skips))
-
+ return cls.as_predicate(predicate.enabled_for_config, description)
elif isinstance(predicate, Predicate):
if description and predicate.description is None:
predicate.description = description
diff --git a/lib/sqlalchemy/testing/provision.py b/lib/sqlalchemy/testing/provision.py
index 0bcdad959..c8f7fdf30 100644
--- a/lib/sqlalchemy/testing/provision.py
+++ b/lib/sqlalchemy/testing/provision.py
@@ -120,7 +120,7 @@ def _pg_create_db(cfg, eng, ident):
isolation_level="AUTOCOMMIT") as conn:
try:
_pg_drop_db(cfg, conn, ident)
- except:
+ except Exception:
pass
currentdb = conn.scalar("select current_database()")
conn.execute("CREATE DATABASE %s TEMPLATE %s" % (ident, currentdb))
@@ -131,7 +131,7 @@ def _mysql_create_db(cfg, eng, ident):
with eng.connect() as conn:
try:
_mysql_drop_db(cfg, conn, ident)
- except:
+ except Exception:
pass
conn.execute("CREATE DATABASE %s" % ident)
conn.execute("CREATE DATABASE %s_test_schema" % ident)
@@ -173,15 +173,15 @@ def _mysql_drop_db(cfg, eng, ident):
with eng.connect() as conn:
try:
conn.execute("DROP DATABASE %s_test_schema" % ident)
- except:
+ except Exception:
pass
try:
conn.execute("DROP DATABASE %s_test_schema_2" % ident)
- except:
+ except Exception:
pass
try:
conn.execute("DROP DATABASE %s" % ident)
- except:
+ except Exception:
pass
diff --git a/lib/sqlalchemy/testing/suite/test_insert.py b/lib/sqlalchemy/testing/suite/test_insert.py
index 92d3d93e5..38519dfb9 100644
--- a/lib/sqlalchemy/testing/suite/test_insert.py
+++ b/lib/sqlalchemy/testing/suite/test_insert.py
@@ -4,7 +4,7 @@ from .. import exclusions
from ..assertions import eq_
from .. import engines
-from sqlalchemy import Integer, String, select, util
+from sqlalchemy import Integer, String, select, literal_column, literal
from ..schema import Table, Column
@@ -90,6 +90,13 @@ class InsertBehaviorTest(fixtures.TablesTest):
Column('id', Integer, primary_key=True, autoincrement=False),
Column('data', String(50))
)
+ Table('includes_defaults', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50)),
+ Column('x', Integer, default=5),
+ Column('y', Integer,
+ default=literal_column("2", type_=Integer) + literal(2)))
def test_autoclose_on_insert(self):
if requirements.returning.enabled:
@@ -158,6 +165,34 @@ class InsertBehaviorTest(fixtures.TablesTest):
("data3", ), ("data3", )]
)
+ @requirements.insert_from_select
+ def test_insert_from_select_with_defaults(self):
+ table = self.tables.includes_defaults
+ config.db.execute(
+ table.insert(),
+ [
+ dict(id=1, data="data1"),
+ dict(id=2, data="data2"),
+ dict(id=3, data="data3"),
+ ]
+ )
+
+ config.db.execute(
+ table.insert(inline=True).
+ from_select(("id", "data",),
+ select([table.c.id + 5, table.c.data]).
+ where(table.c.data.in_(["data2", "data3"]))
+ ),
+ )
+
+ eq_(
+ config.db.execute(
+ select([table]).order_by(table.c.data, table.c.id)
+ ).fetchall(),
+ [(1, 'data1', 5, 4), (2, 'data2', 5, 4),
+ (7, 'data2', 5, 4), (3, 'data3', 5, 4), (8, 'data3', 5, 4)]
+ )
+
class ReturningTest(fixtures.TablesTest):
run_create_tables = 'each'
diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py
index 60db9eb47..08b858b47 100644
--- a/lib/sqlalchemy/testing/suite/test_reflection.py
+++ b/lib/sqlalchemy/testing/suite/test_reflection.py
@@ -500,10 +500,12 @@ class ComponentReflectionTest(fixtures.TablesTest):
@testing.requires.unique_constraint_reflection
def test_get_temp_table_unique_constraints(self):
insp = inspect(self.metadata.bind)
- eq_(
- insp.get_unique_constraints('user_tmp'),
- [{'column_names': ['name'], 'name': 'user_tmp_uq'}]
- )
+ reflected = insp.get_unique_constraints('user_tmp')
+ for refl in reflected:
+ # Different dialects handle duplicate index and constraints
+ # differently, so ignore this flag
+ refl.pop('duplicates_index', None)
+ eq_(reflected, [{'column_names': ['name'], 'name': 'user_tmp_uq'}])
@testing.requires.temp_table_reflection
def test_get_temp_table_indexes(self):
@@ -556,6 +558,9 @@ class ComponentReflectionTest(fixtures.TablesTest):
)
for orig, refl in zip(uniques, reflected):
+ # Different dialects handle duplicate index and constraints
+ # differently, so ignore this flag
+ refl.pop('duplicates_index', None)
eq_(orig, refl)
@testing.provide_metadata
diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py
index 95369783d..5c17bea88 100644
--- a/lib/sqlalchemy/util/langhelpers.py
+++ b/lib/sqlalchemy/util/langhelpers.py
@@ -134,7 +134,8 @@ def public_factory(target, location):
fn = target.__init__
callable_ = target
doc = "Construct a new :class:`.%s` object. \n\n"\
- "This constructor is mirrored as a public API function; see :func:`~%s` "\
+ "This constructor is mirrored as a public API function; "\
+ "see :func:`~%s` "\
"for a full usage and argument description." % (
target.__name__, location, )
else:
@@ -155,6 +156,7 @@ def %(name)s(%(args)s):
exec(code, env)
decorated = env[location_name]
decorated.__doc__ = fn.__doc__
+ decorated.__module__ = "sqlalchemy" + location.rsplit(".", 1)[0]
if compat.py2k or hasattr(fn, '__func__'):
fn.__func__.__doc__ = doc
else:
@@ -490,7 +492,7 @@ def generic_repr(obj, additional_kw=(), to_inspect=None, omit_kwarg=()):
val = getattr(obj, arg, missing)
if val is not missing and val != defval:
output.append('%s=%r' % (arg, val))
- except:
+ except Exception:
pass
if additional_kw:
@@ -499,7 +501,7 @@ def generic_repr(obj, additional_kw=(), to_inspect=None, omit_kwarg=()):
val = getattr(obj, arg, missing)
if val is not missing and val != defval:
output.append('%s=%r' % (arg, val))
- except:
+ except Exception:
pass
return "%s(%s)" % (obj.__class__.__name__, ", ".join(output))
@@ -1198,7 +1200,7 @@ def warn_exception(func, *args, **kwargs):
"""
try:
return func(*args, **kwargs)
- except:
+ except Exception:
warn("%s('%s') ignored" % sys.exc_info()[0:2])