summaryrefslogtreecommitdiff
path: root/lib/sqlalchemy/engine
diff options
context:
space:
mode:
authorjonathan vanasco <jonathan@2xlp.com>2015-04-02 13:30:26 -0400
committerjonathan vanasco <jonathan@2xlp.com>2015-04-02 13:30:26 -0400
commit6de3d490a2adb0fff43f98e15a53407b46668b61 (patch)
treed5e0e2077dfe7dc69ce30e9d0a8c89ceff78e3fe /lib/sqlalchemy/engine
parentefca4af93603faa7abfeacbab264cad85ee4105c (diff)
parent5e04995a82c00e801a99765cde7726f5e73e18c2 (diff)
downloadsqlalchemy-6de3d490a2adb0fff43f98e15a53407b46668b61.tar.gz
Merge branch 'master' of bitbucket.org:zzzeek/sqlalchemy
Diffstat (limited to 'lib/sqlalchemy/engine')
-rw-r--r--lib/sqlalchemy/engine/__init__.py28
-rw-r--r--lib/sqlalchemy/engine/base.py329
-rw-r--r--lib/sqlalchemy/engine/default.py152
-rw-r--r--lib/sqlalchemy/engine/interfaces.py91
-rw-r--r--lib/sqlalchemy/engine/reflection.py84
-rw-r--r--lib/sqlalchemy/engine/result.py395
-rw-r--r--lib/sqlalchemy/engine/strategies.py13
-rw-r--r--lib/sqlalchemy/engine/threadlocal.py7
-rw-r--r--lib/sqlalchemy/engine/url.py2
-rw-r--r--lib/sqlalchemy/engine/util.py2
10 files changed, 831 insertions, 272 deletions
diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py
index 68145f5cd..0678dd201 100644
--- a/lib/sqlalchemy/engine/__init__.py
+++ b/lib/sqlalchemy/engine/__init__.py
@@ -1,5 +1,5 @@
# engine/__init__.py
-# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -72,6 +72,7 @@ from .base import (
)
from .result import (
+ BaseRowProxy,
BufferedColumnResultProxy,
BufferedColumnRow,
BufferedRowResultProxy,
@@ -256,14 +257,26 @@ def create_engine(*args, **kwargs):
Behavior here varies per backend, and
individual dialects should be consulted directly.
+ Note that the isolation level can also be set on a per-:class:`.Connection`
+ basis as well, using the
+ :paramref:`.Connection.execution_options.isolation_level`
+ feature.
+
.. seealso::
- :ref:`SQLite Concurrency <sqlite_concurrency>`
+ :attr:`.Connection.default_isolation_level` - view default level
+
+ :paramref:`.Connection.execution_options.isolation_level`
+ - set per :class:`.Connection` isolation level
+
+ :ref:`SQLite Transaction Isolation <sqlite_isolation_level>`
:ref:`Postgresql Transaction Isolation <postgresql_isolation_level>`
:ref:`MySQL Transaction Isolation <mysql_isolation_level>`
+ :ref:`session_transaction_isolation` - for the ORM
+
:param label_length=None: optional integer value which limits
the size of dynamically generated column labels to that many
characters. If less than 6, labels are generated as
@@ -292,6 +305,17 @@ def create_engine(*args, **kwargs):
be used instead. Can be used for testing of DBAPIs as well as to
inject "mock" DBAPI implementations into the :class:`.Engine`.
+ :param paramstyle=None: The `paramstyle <http://legacy.python.org/dev/peps/pep-0249/#paramstyle>`_
+ to use when rendering bound parameters. This style defaults to the
+ one recommended by the DBAPI itself, which is retrieved from the
+ ``.paramstyle`` attribute of the DBAPI. However, most DBAPIs accept
+ more than one paramstyle, and in particular it may be desirable
+ to change a "named" paramstyle into a "positional" one, or vice versa.
+ When this attribute is passed, it should be one of the values
+ ``"qmark"``, ``"numeric"``, ``"named"``, ``"format"`` or
+ ``"pyformat"``, and should correspond to a parameter style known
+ to be supported by the DBAPI in use.
+
:param pool=None: an already-constructed instance of
:class:`~sqlalchemy.pool.Pool`, such as a
:class:`~sqlalchemy.pool.QueuePool` instance. If non-None, this
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index dd82be1d1..5921ab9ba 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -1,5 +1,5 @@
# engine/base.py
-# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -201,14 +201,19 @@ class Connection(Connectable):
used by the ORM internally supersedes a cache dictionary
specified here.
- :param isolation_level: Available on: Connection.
+ :param isolation_level: Available on: :class:`.Connection`.
Set the transaction isolation level for
- the lifespan of this connection. Valid values include
- those string values accepted by the ``isolation_level``
- parameter passed to :func:`.create_engine`, and are
- database specific, including those for :ref:`sqlite_toplevel`,
- :ref:`postgresql_toplevel` - see those dialect's documentation
- for further info.
+ the lifespan of this :class:`.Connection` object (*not* the
+ underyling DBAPI connection, for which the level is reset
+ to its original setting upon termination of this
+ :class:`.Connection` object).
+
+ Valid values include
+ those string values accepted by the
+ :paramref:`.create_engine.isolation_level`
+ parameter passed to :func:`.create_engine`. These levels are
+ semi-database specific; see individual dialect documentation for
+ valid levels.
Note that this option necessarily affects the underlying
DBAPI connection for the lifespan of the originating
@@ -217,6 +222,41 @@ class Connection(Connectable):
is returned to the connection pool, i.e.
the :meth:`.Connection.close` method is called.
+ .. warning:: The ``isolation_level`` execution option should
+ **not** be used when a transaction is already established, that
+ is, the :meth:`.Connection.begin` method or similar has been
+ called. A database cannot change the isolation level on a
+ transaction in progress, and different DBAPIs and/or
+ SQLAlchemy dialects may implicitly roll back or commit
+ the transaction, or not affect the connection at all.
+
+ .. versionchanged:: 0.9.9 A warning is emitted when the
+ ``isolation_level`` execution option is used after a
+ transaction has been started with :meth:`.Connection.begin`
+ or similar.
+
+ .. note:: The ``isolation_level`` execution option is implicitly
+ reset if the :class:`.Connection` is invalidated, e.g. via
+ the :meth:`.Connection.invalidate` method, or if a
+ disconnection error occurs. The new connection produced after
+ the invalidation will not have the isolation level re-applied
+ to it automatically.
+
+ .. seealso::
+
+ :paramref:`.create_engine.isolation_level`
+ - set per :class:`.Engine` isolation level
+
+ :meth:`.Connection.get_isolation_level` - view current level
+
+ :ref:`SQLite Transaction Isolation <sqlite_isolation_level>`
+
+ :ref:`Postgresql Transaction Isolation <postgresql_isolation_level>`
+
+ :ref:`MySQL Transaction Isolation <mysql_isolation_level>`
+
+ :ref:`session_transaction_isolation` - for the ORM
+
:param no_parameters: When ``True``, if the final parameter
list or dictionary is totally empty, will invoke the
statement on the cursor as ``cursor.execute(statement)``,
@@ -260,23 +300,97 @@ class Connection(Connectable):
@property
def connection(self):
- "The underlying DB-API connection managed by this Connection."
+ """The underlying DB-API connection managed by this Connection.
+
+ .. seealso::
+
+
+ :ref:`dbapi_connections`
+
+ """
try:
return self.__connection
except AttributeError:
- return self._revalidate_connection()
+ try:
+ return self._revalidate_connection()
+ except Exception as e:
+ self._handle_dbapi_exception(e, None, None, None, None)
+
+ def get_isolation_level(self):
+ """Return the current isolation level assigned to this
+ :class:`.Connection`.
+
+ This will typically be the default isolation level as determined
+ by the dialect, unless if the
+ :paramref:`.Connection.execution_options.isolation_level`
+ feature has been used to alter the isolation level on a
+ per-:class:`.Connection` basis.
+
+ This attribute will typically perform a live SQL operation in order
+ to procure the current isolation level, so the value returned is the
+ actual level on the underlying DBAPI connection regardless of how
+ this state was set. Compare to the
+ :attr:`.Connection.default_isolation_level` accessor
+ which returns the dialect-level setting without performing a SQL
+ query.
+
+ .. versionadded:: 0.9.9
+
+ .. seealso::
+
+ :attr:`.Connection.default_isolation_level` - view default level
+
+ :paramref:`.create_engine.isolation_level`
+ - set per :class:`.Engine` isolation level
+
+ :paramref:`.Connection.execution_options.isolation_level`
+ - set per :class:`.Connection` isolation level
+
+ """
+ try:
+ return self.dialect.get_isolation_level(self.connection)
+ except Exception as e:
+ self._handle_dbapi_exception(e, None, None, None, None)
+
+ @property
+ def default_isolation_level(self):
+ """The default isolation level assigned to this :class:`.Connection`.
+
+ This is the isolation level setting that the :class:`.Connection`
+ has when first procured via the :meth:`.Engine.connect` method.
+ This level stays in place until the
+ :paramref:`.Connection.execution_options.isolation_level` is used
+ to change the setting on a per-:class:`.Connection` basis.
+
+ Unlike :meth:`.Connection.get_isolation_level`, this attribute is set
+ ahead of time from the first connection procured by the dialect,
+ so SQL query is not invoked when this accessor is called.
+
+ .. versionadded:: 0.9.9
+
+ .. seealso::
+
+ :meth:`.Connection.get_isolation_level` - view current level
+
+ :paramref:`.create_engine.isolation_level`
+ - set per :class:`.Engine` isolation level
+
+ :paramref:`.Connection.execution_options.isolation_level`
+ - set per :class:`.Connection` isolation level
+
+ """
+ return self.dialect.default_isolation_level
def _revalidate_connection(self):
if self.__branch_from:
return self.__branch_from._revalidate_connection()
-
if self.__can_reconnect and self.__invalid:
if self.__transaction is not None:
raise exc.InvalidRequestError(
"Can't reconnect until invalid "
"transaction is rolled back")
- self.__connection = self.engine.raw_connection()
+ self.__connection = self.engine.raw_connection(_connection=self)
self.__invalid = False
return self.__connection
raise exc.ResourceClosedError("This Connection is closed")
@@ -741,7 +855,7 @@ class Connection(Connectable):
a subclass of :class:`.Executable`, such as a
:func:`~.expression.select` construct
* a :class:`.FunctionElement`, such as that generated
- by :attr:`.func`, will be automatically wrapped in
+ by :data:`.func`, will be automatically wrapped in
a SELECT statement, which is then executed.
* a :class:`.DDLElement` object
* a :class:`.DefaultGenerator` object
@@ -877,9 +991,8 @@ class Connection(Connectable):
dialect = self.dialect
if 'compiled_cache' in self._execution_options:
key = dialect, elem, tuple(sorted(keys)), len(distilled_params) > 1
- if key in self._execution_options['compiled_cache']:
- compiled_sql = self._execution_options['compiled_cache'][key]
- else:
+ compiled_sql = self._execution_options['compiled_cache'].get(key)
+ if compiled_sql is None:
compiled_sql = elem.compile(
dialect=dialect, column_keys=keys,
inline=len(distilled_params) > 1)
@@ -959,9 +1072,10 @@ class Connection(Connectable):
context = constructor(dialect, self, conn, *args)
except Exception as e:
- self._handle_dbapi_exception(e,
- util.text_type(statement), parameters,
- None, None)
+ self._handle_dbapi_exception(
+ e,
+ util.text_type(statement), parameters,
+ None, None)
if context.compiled:
context.pre_exec()
@@ -985,36 +1099,39 @@ class Connection(Connectable):
"%r",
sql_util._repr_params(parameters, batches=10)
)
+
+ evt_handled = False
try:
if context.executemany:
- for fn in () if not self.dialect._has_events \
- else self.dialect.dispatch.do_executemany:
- if fn(cursor, statement, parameters, context):
- break
- else:
+ if self.dialect._has_events:
+ for fn in self.dialect.dispatch.do_executemany:
+ if fn(cursor, statement, parameters, context):
+ evt_handled = True
+ break
+ if not evt_handled:
self.dialect.do_executemany(
cursor,
statement,
parameters,
context)
-
elif not parameters and context.no_parameters:
- for fn in () if not self.dialect._has_events \
- else self.dialect.dispatch.do_execute_no_params:
- if fn(cursor, statement, context):
- break
- else:
+ if self.dialect._has_events:
+ for fn in self.dialect.dispatch.do_execute_no_params:
+ if fn(cursor, statement, context):
+ evt_handled = True
+ break
+ if not evt_handled:
self.dialect.do_execute_no_params(
cursor,
statement,
context)
-
else:
- for fn in () if not self.dialect._has_events \
- else self.dialect.dispatch.do_execute:
- if fn(cursor, statement, parameters, context):
- break
- else:
+ if self.dialect._has_events:
+ for fn in self.dialect.dispatch.do_execute:
+ if fn(cursor, statement, parameters, context):
+ evt_handled = True
+ break
+ if not evt_handled:
self.dialect.do_execute(
cursor,
statement,
@@ -1038,36 +1155,17 @@ class Connection(Connectable):
if context.compiled:
context.post_exec()
- if context.isinsert and not context.executemany:
- context.post_insert()
-
- # create a resultproxy, get rowcount/implicit RETURNING
- # rows, close cursor if no further results pending
- result = context.get_result_proxy()
- if context.isinsert:
- if context._is_implicit_returning:
- context._fetch_implicit_returning(result)
- result.close(_autoclose_connection=False)
- result._metadata = None
- elif not context._is_explicit_returning:
- result.close(_autoclose_connection=False)
- result._metadata = None
- elif context.isupdate and context._is_implicit_returning:
- context._fetch_implicit_update_returning(result)
- result.close(_autoclose_connection=False)
- result._metadata = None
-
- elif result._metadata is None:
- # no results, get rowcount
- # (which requires open cursor on some drivers
- # such as kintersbasdb, mxodbc),
- result.rowcount
- result.close(_autoclose_connection=False)
+ if context.is_crud:
+ result = context._setup_crud_result_proxy()
+ else:
+ result = context.get_result_proxy()
+ if result._metadata is None:
+ result._soft_close(_autoclose_connection=False)
if context.should_autocommit and self._root.__transaction is None:
self._root._commit_impl(autocommit=True)
- if result.closed and self.should_close_with_result:
+ if result._soft_closed and self.should_close_with_result:
self.close()
return result
@@ -1149,7 +1247,10 @@ class Connection(Connectable):
self._is_disconnect = \
isinstance(e, self.dialect.dbapi.Error) and \
not self.closed and \
- self.dialect.is_disconnect(e, self.__connection, cursor)
+ self.dialect.is_disconnect(
+ e,
+ self.__connection if not self.invalidated else None,
+ cursor)
if context:
context.is_disconnect = self._is_disconnect
@@ -1194,7 +1295,8 @@ class Connection(Connectable):
# new handle_error event
ctx = ExceptionContextImpl(
- e, sqlalchemy_exception, self, cursor, statement,
+ e, sqlalchemy_exception, self.engine,
+ self, cursor, statement,
parameters, context, self._is_disconnect)
for fn in self.dispatch.handle_error:
@@ -1236,12 +1338,65 @@ class Connection(Connectable):
del self._reentrant_error
if self._is_disconnect:
del self._is_disconnect
- dbapi_conn_wrapper = self.connection
- self.engine.pool._invalidate(dbapi_conn_wrapper, e)
- self.invalidate(e)
+ if not self.invalidated:
+ dbapi_conn_wrapper = self.__connection
+ self.engine.pool._invalidate(dbapi_conn_wrapper, e)
+ self.invalidate(e)
if self.should_close_with_result:
self.close()
+ @classmethod
+ def _handle_dbapi_exception_noconnection(cls, e, dialect, engine):
+
+ exc_info = sys.exc_info()
+
+ is_disconnect = dialect.is_disconnect(e, None, None)
+
+ should_wrap = isinstance(e, dialect.dbapi.Error)
+
+ if should_wrap:
+ sqlalchemy_exception = exc.DBAPIError.instance(
+ None,
+ None,
+ e,
+ dialect.dbapi.Error,
+ connection_invalidated=is_disconnect)
+ else:
+ sqlalchemy_exception = None
+
+ newraise = None
+
+ if engine._has_events:
+ ctx = ExceptionContextImpl(
+ e, sqlalchemy_exception, engine, None, None, None,
+ None, None, is_disconnect)
+ for fn in engine.dispatch.handle_error:
+ try:
+ # handler returns an exception;
+ # call next handler in a chain
+ per_fn = fn(ctx)
+ if per_fn is not None:
+ ctx.chained_exception = newraise = per_fn
+ except Exception as _raised:
+ # handler raises an exception - stop processing
+ newraise = _raised
+ break
+
+ if sqlalchemy_exception and \
+ is_disconnect != ctx.is_disconnect:
+ sqlalchemy_exception.connection_invalidated = \
+ is_disconnect = ctx.is_disconnect
+
+ if newraise:
+ util.raise_from_cause(newraise, exc_info)
+ elif should_wrap:
+ util.raise_from_cause(
+ sqlalchemy_exception,
+ exc_info
+ )
+ else:
+ util.reraise(*exc_info)
+
def default_schema_name(self):
return self.engine.dialect.get_default_schema_name(self)
@@ -1320,8 +1475,9 @@ class ExceptionContextImpl(ExceptionContext):
"""Implement the :class:`.ExceptionContext` interface."""
def __init__(self, exception, sqlalchemy_exception,
- connection, cursor, statement, parameters,
+ engine, connection, cursor, statement, parameters,
context, is_disconnect):
+ self.engine = engine
self.connection = connection
self.sqlalchemy_exception = sqlalchemy_exception
self.original_exception = exception
@@ -1865,10 +2021,11 @@ class Engine(Connectable, log.Identified):
"""
- return self._connection_cls(self,
- self.pool.connect(),
- close_with_result=close_with_result,
- **kwargs)
+ return self._connection_cls(
+ self,
+ self._wrap_pool_connect(self.pool.connect, None),
+ close_with_result=close_with_result,
+ **kwargs)
def table_names(self, schema=None, connection=None):
"""Return a list of all table names available in the database.
@@ -1898,7 +2055,18 @@ class Engine(Connectable, log.Identified):
"""
return self.run_callable(self.dialect.has_table, table_name, schema)
- def raw_connection(self):
+ def _wrap_pool_connect(self, fn, connection):
+ dialect = self.dialect
+ try:
+ return fn()
+ except dialect.dbapi.Error as e:
+ if connection is None:
+ Connection._handle_dbapi_exception_noconnection(
+ e, dialect, self)
+ else:
+ util.reraise(*sys.exc_info())
+
+ def raw_connection(self, _connection=None):
"""Return a "raw" DBAPI connection from the connection pool.
The returned object is a proxied version of the DBAPI
@@ -1909,13 +2077,18 @@ class Engine(Connectable, log.Identified):
for real.
This method provides direct DBAPI connection access for
- special situations. In most situations, the :class:`.Connection`
- object should be used, which is procured using the
- :meth:`.Engine.connect` method.
+ special situations when the API provided by :class:`.Connection`
+ is not needed. When a :class:`.Connection` object is already
+ present, the DBAPI connection is available using
+ the :attr:`.Connection.connection` accessor.
- """
+ .. seealso::
- return self.pool.unique_connection()
+ :ref:`dbapi_connections`
+
+ """
+ return self._wrap_pool_connect(
+ self.pool.unique_connection, _connection)
class OptionEngine(Engine):
diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py
index a5af6ff19..3eebc6c06 100644
--- a/lib/sqlalchemy/engine/default.py
+++ b/lib/sqlalchemy/engine/default.py
@@ -1,5 +1,5 @@
# engine/default.py
-# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -395,6 +395,12 @@ class DefaultDialect(interfaces.Dialect):
self._set_connection_isolation(connection, opts['isolation_level'])
def _set_connection_isolation(self, connection, level):
+ if connection.in_transaction():
+ util.warn(
+ "Connection is already established with a Transaction; "
+ "setting isolation_level may implicitly rollback or commit "
+ "the existing transaction, or have no effect until "
+ "next transaction")
self.set_isolation_level(connection.connection, level)
connection.connection._connection_record.\
finalize_callback.append(self.reset_isolation_level)
@@ -452,14 +458,12 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
isinsert = False
isupdate = False
isdelete = False
+ is_crud = False
isddl = False
executemany = False
- result_map = None
compiled = None
statement = None
- postfetch_cols = None
- prefetch_cols = None
- returning_cols = None
+ result_column_struct = None
_is_implicit_returning = False
_is_explicit_returning = False
@@ -515,15 +519,11 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
if not compiled.can_execute:
raise exc.ArgumentError("Not an executable clause")
- self.execution_options = compiled.statement._execution_options
- if connection._execution_options:
- self.execution_options = dict(self.execution_options)
- self.execution_options.update(connection._execution_options)
-
- # compiled clauseelement. process bind params, process table defaults,
- # track collections used by ResultProxy to target and process results
+ self.execution_options = compiled.statement._execution_options.union(
+ connection._execution_options)
- self.result_map = compiled.result_map
+ self.result_column_struct = (
+ compiled._result_columns, compiled._ordered_columns)
self.unicode_statement = util.text_type(compiled)
if not dialect.supports_unicode_statements:
@@ -548,6 +548,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
self.cursor = self.create_cursor()
if self.isinsert or self.isupdate or self.isdelete:
+ self.is_crud = True
self._is_explicit_returning = bool(compiled.statement._returning)
self._is_implicit_returning = bool(
compiled.returning and not compiled.statement._returning)
@@ -681,10 +682,6 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
return self.execution_options.get("no_parameters", False)
@util.memoized_property
- def is_crud(self):
- return self.isinsert or self.isupdate or self.isdelete
-
- @util.memoized_property
def should_autocommit(self):
autocommit = self.execution_options.get('autocommit',
not self.compiled and
@@ -799,52 +796,84 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
def supports_sane_multi_rowcount(self):
return self.dialect.supports_sane_multi_rowcount
- def post_insert(self):
-
+ def _setup_crud_result_proxy(self):
+ if self.isinsert and \
+ not self.executemany:
+ if not self._is_implicit_returning and \
+ not self.compiled.inline and \
+ self.dialect.postfetch_lastrowid:
+
+ self._setup_ins_pk_from_lastrowid()
+
+ elif not self._is_implicit_returning:
+ self._setup_ins_pk_from_empty()
+
+ result = self.get_result_proxy()
+
+ if self.isinsert:
+ if self._is_implicit_returning:
+ row = result.fetchone()
+ self.returned_defaults = row
+ self._setup_ins_pk_from_implicit_returning(row)
+ result._soft_close(_autoclose_connection=False)
+ result._metadata = None
+ elif not self._is_explicit_returning:
+ result._soft_close(_autoclose_connection=False)
+ result._metadata = None
+ elif self.isupdate and self._is_implicit_returning:
+ row = result.fetchone()
+ self.returned_defaults = row
+ result._soft_close(_autoclose_connection=False)
+ result._metadata = None
+
+ elif result._metadata is None:
+ # no results, get rowcount
+ # (which requires open cursor on some drivers
+ # such as kintersbasdb, mxodbc)
+ result.rowcount
+ result._soft_close(_autoclose_connection=False)
+ return result
+
+ def _setup_ins_pk_from_lastrowid(self):
key_getter = self.compiled._key_getters_for_crud_column[2]
table = self.compiled.statement.table
+ compiled_params = self.compiled_parameters[0]
+
+ lastrowid = self.get_lastrowid()
+ autoinc_col = table._autoincrement_column
+ if autoinc_col is not None:
+ # apply type post processors to the lastrowid
+ proc = autoinc_col.type._cached_result_processor(
+ self.dialect, None)
+ if proc is not None:
+ lastrowid = proc(lastrowid)
+ self.inserted_primary_key = [
+ lastrowid if c is autoinc_col else
+ compiled_params.get(key_getter(c), None)
+ for c in table.primary_key
+ ]
- if not self._is_implicit_returning and \
- not self._is_explicit_returning and \
- not self.compiled.inline and \
- self.dialect.postfetch_lastrowid:
-
- lastrowid = self.get_lastrowid()
- autoinc_col = table._autoincrement_column
- if autoinc_col is not None:
- # apply type post processors to the lastrowid
- proc = autoinc_col.type._cached_result_processor(
- self.dialect, None)
- if proc is not None:
- lastrowid = proc(lastrowid)
- self.inserted_primary_key = [
- lastrowid if c is autoinc_col else
- self.compiled_parameters[0].get(key_getter(c), None)
- for c in table.primary_key
- ]
- else:
- self.inserted_primary_key = [
- self.compiled_parameters[0].get(key_getter(c), None)
- for c in table.primary_key
- ]
-
- def _fetch_implicit_returning(self, resultproxy):
+ def _setup_ins_pk_from_empty(self):
+ key_getter = self.compiled._key_getters_for_crud_column[2]
table = self.compiled.statement.table
- row = resultproxy.fetchone()
-
- ipk = []
- for c, v in zip(table.primary_key, self.inserted_primary_key):
- if v is not None:
- ipk.append(v)
- else:
- ipk.append(row[c])
+ compiled_params = self.compiled_parameters[0]
+ self.inserted_primary_key = [
+ compiled_params.get(key_getter(c), None)
+ for c in table.primary_key
+ ]
- self.inserted_primary_key = ipk
- self.returned_defaults = row
+ def _setup_ins_pk_from_implicit_returning(self, row):
+ key_getter = self.compiled._key_getters_for_crud_column[2]
+ table = self.compiled.statement.table
+ compiled_params = self.compiled_parameters[0]
- def _fetch_implicit_update_returning(self, resultproxy):
- row = resultproxy.fetchone()
- self.returned_defaults = row
+ self.inserted_primary_key = [
+ row[col] if value is None else value
+ for col, value in [
+ (col, compiled_params.get(key_getter(col), None))
+ for col in table.primary_key
+ ]
+ ]
def lastrow_has_defaults(self):
return (self.isinsert or self.isupdate) and \
@@ -956,14 +985,17 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
def _process_executesingle_defaults(self):
key_getter = self.compiled._key_getters_for_crud_column[2]
-
prefetch = self.compiled.prefetch
self.current_parameters = compiled_parameters = \
self.compiled_parameters[0]
for c in prefetch:
if self.isinsert:
- val = self.get_insert_default(c)
+ if c.default and \
+ not c.default.is_sequence and c.default.is_scalar:
+ val = c.default.arg
+ else:
+ val = self.get_insert_default(c)
else:
val = self.get_update_default(c)
@@ -972,6 +1004,4 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
del self.current_parameters
-
-
DefaultDialect.execution_ctx_cls = DefaultExecutionContext
diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py
index 0ad2efae0..da8fa81eb 100644
--- a/lib/sqlalchemy/engine/interfaces.py
+++ b/lib/sqlalchemy/engine/interfaces.py
@@ -1,5 +1,5 @@
# engine/interfaces.py
-# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -654,17 +654,82 @@ class Dialect(object):
return None
def reset_isolation_level(self, dbapi_conn):
- """Given a DBAPI connection, revert its isolation to the default."""
+ """Given a DBAPI connection, revert its isolation to the default.
+
+ Note that this is a dialect-level method which is used as part
+ of the implementation of the :class:`.Connection` and
+ :class:`.Engine`
+ isolation level facilities; these APIs should be preferred for
+ most typical use cases.
+
+ .. seealso::
+
+ :meth:`.Connection.get_isolation_level` - view current level
+
+ :attr:`.Connection.default_isolation_level` - view default level
+
+ :paramref:`.Connection.execution_options.isolation_level` -
+ set per :class:`.Connection` isolation level
+
+ :paramref:`.create_engine.isolation_level` -
+ set per :class:`.Engine` isolation level
+
+ """
raise NotImplementedError()
def set_isolation_level(self, dbapi_conn, level):
- """Given a DBAPI connection, set its isolation level."""
+ """Given a DBAPI connection, set its isolation level.
+
+ Note that this is a dialect-level method which is used as part
+ of the implementation of the :class:`.Connection` and
+ :class:`.Engine`
+ isolation level facilities; these APIs should be preferred for
+ most typical use cases.
+
+ .. seealso::
+
+ :meth:`.Connection.get_isolation_level` - view current level
+
+ :attr:`.Connection.default_isolation_level` - view default level
+
+ :paramref:`.Connection.execution_options.isolation_level` -
+ set per :class:`.Connection` isolation level
+
+ :paramref:`.create_engine.isolation_level` -
+ set per :class:`.Engine` isolation level
+
+ """
raise NotImplementedError()
def get_isolation_level(self, dbapi_conn):
- """Given a DBAPI connection, return its isolation level."""
+ """Given a DBAPI connection, return its isolation level.
+
+ When working with a :class:`.Connection` object, the corresponding
+ DBAPI connection may be procured using the
+ :attr:`.Connection.connection` accessor.
+
+ Note that this is a dialect-level method which is used as part
+ of the implementation of the :class:`.Connection` and
+ :class:`.Engine` isolation level facilities;
+ these APIs should be preferred for most typical use cases.
+
+
+ .. seealso::
+
+ :meth:`.Connection.get_isolation_level` - view current level
+
+ :attr:`.Connection.default_isolation_level` - view default level
+
+ :paramref:`.Connection.execution_options.isolation_level` -
+ set per :class:`.Connection` isolation level
+
+ :paramref:`.create_engine.isolation_level` -
+ set per :class:`.Engine` isolation level
+
+
+ """
raise NotImplementedError()
@@ -917,7 +982,23 @@ class ExceptionContext(object):
connection = None
"""The :class:`.Connection` in use during the exception.
- This member is always present.
+ This member is present, except in the case of a failure when
+ first connecting.
+
+ .. seealso::
+
+ :attr:`.ExceptionContext.engine`
+
+
+ """
+
+ engine = None
+ """The :class:`.Engine` in use during the exception.
+
+ This member should always be present, even in the case of a failure
+ when first connecting.
+
+ .. versionadded:: 1.0.0
"""
diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py
index 2a1def86a..59eed51ec 100644
--- a/lib/sqlalchemy/engine/reflection.py
+++ b/lib/sqlalchemy/engine/reflection.py
@@ -1,5 +1,5 @@
# engine/reflection.py
-# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -173,7 +173,14 @@ class Inspector(object):
passed as ``None``. For special quoting, use :class:`.quoted_name`.
:param order_by: Optional, may be the string "foreign_key" to sort
- the result on foreign key dependencies.
+ the result on foreign key dependencies. Does not automatically
+ resolve cycles, and will raise :class:`.CircularDependencyError`
+ if cycles exist.
+
+ .. deprecated:: 1.0.0 - see
+ :meth:`.Inspector.get_sorted_table_and_fkc_names` for a version
+ of this which resolves foreign key cycles between tables
+ automatically.
.. versionchanged:: 0.8 the "foreign_key" sorting sorts tables
in order of dependee to dependent; that is, in creation
@@ -183,6 +190,8 @@ class Inspector(object):
.. seealso::
+ :meth:`.Inspector.get_sorted_table_and_fkc_names`
+
:attr:`.MetaData.sorted_tables`
"""
@@ -201,6 +210,64 @@ class Inspector(object):
tnames = list(topological.sort(tuples, tnames))
return tnames
+ def get_sorted_table_and_fkc_names(self, schema=None):
+ """Return dependency-sorted table and foreign key constraint names in
+ referred to within a particular schema.
+
+ This will yield 2-tuples of
+ ``(tablename, [(tname, fkname), (tname, fkname), ...])``
+ consisting of table names in CREATE order grouped with the foreign key
+ constraint names that are not detected as belonging to a cycle.
+ The final element
+ will be ``(None, [(tname, fkname), (tname, fkname), ..])``
+ which will consist of remaining
+ foreign key constraint names that would require a separate CREATE
+ step after-the-fact, based on dependencies between tables.
+
+ .. versionadded:: 1.0.-
+
+ .. seealso::
+
+ :meth:`.Inspector.get_table_names`
+
+ :func:`.sort_tables_and_constraints` - similar method which works
+ with an already-given :class:`.MetaData`.
+
+ """
+ if hasattr(self.dialect, 'get_table_names'):
+ tnames = self.dialect.get_table_names(
+ self.bind, schema, info_cache=self.info_cache)
+ else:
+ tnames = self.engine.table_names(schema)
+
+ tuples = set()
+ remaining_fkcs = set()
+
+ fknames_for_table = {}
+ for tname in tnames:
+ fkeys = self.get_foreign_keys(tname, schema)
+ fknames_for_table[tname] = set(
+ [fk['name'] for fk in fkeys]
+ )
+ for fkey in fkeys:
+ if tname != fkey['referred_table']:
+ tuples.add((fkey['referred_table'], tname))
+ try:
+ candidate_sort = list(topological.sort(tuples, tnames))
+ except exc.CircularDependencyError as err:
+ for edge in err.edges:
+ tuples.remove(edge)
+ remaining_fkcs.update(
+ (edge[1], fkc)
+ for fkc in fknames_for_table[edge[1]]
+ )
+
+ candidate_sort = list(topological.sort(tuples, tnames))
+ return [
+ (tname, fknames_for_table[tname].difference(remaining_fkcs))
+ for tname in candidate_sort
+ ] + [(None, list(remaining_fkcs))]
+
def get_temp_table_names(self):
"""return a list of temporary table names for the current bind.
@@ -394,6 +461,12 @@ class Inspector(object):
unique
boolean
+ dialect_options
+ dict of dialect-specific index options. May not be present
+ for all dialects.
+
+ .. versionadded:: 1.0.0
+
:param table_name: string name of the table. For special quoting,
use :class:`.quoted_name`.
@@ -642,6 +715,8 @@ class Inspector(object):
columns = index_d['column_names']
unique = index_d['unique']
flavor = index_d.get('type', 'index')
+ dialect_options = index_d.get('dialect_options', {})
+
duplicates = index_d.get('duplicates_constraint')
if include_columns and \
not set(columns).issubset(include_columns):
@@ -667,7 +742,10 @@ class Inspector(object):
else:
idx_cols.append(idx_col)
- sa_schema.Index(name, *idx_cols, **dict(unique=unique))
+ sa_schema.Index(
+ name, *idx_cols,
+ **dict(list(dialect_options.items()) + [('unique', unique)])
+ )
def _reflect_unique_constraints(
self, table_name, schema, table, cols_by_orig_name,
diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py
index 3995942ef..6d19cb6d0 100644
--- a/lib/sqlalchemy/engine/result.py
+++ b/lib/sqlalchemy/engine/result.py
@@ -1,5 +1,5 @@
# engine/result.py
-# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -187,86 +187,162 @@ class ResultMetaData(object):
context."""
def __init__(self, parent, metadata):
- self._processors = processors = []
-
- # We do not strictly need to store the processor in the key mapping,
- # though it is faster in the Python version (probably because of the
- # saved attribute lookup self._processors)
- self._keymap = keymap = {}
- self.keys = []
context = parent.context
dialect = context.dialect
typemap = dialect.dbapi_type_map
translate_colname = context._translate_colname
- self.case_sensitive = dialect.case_sensitive
-
- # high precedence key values.
- primary_keymap = {}
-
- for i, rec in enumerate(metadata):
- colname = rec[0]
- coltype = rec[1]
-
- if dialect.description_encoding:
- colname = dialect._description_decoder(colname)
+ self.case_sensitive = case_sensitive = dialect.case_sensitive
+ if context.result_column_struct:
+ result_columns, cols_are_ordered = context.result_column_struct
+ num_ctx_cols = len(result_columns)
+ else:
+ num_ctx_cols = None
+
+ if num_ctx_cols and \
+ cols_are_ordered and \
+ num_ctx_cols == len(metadata):
+ # case 1 - SQL expression statement, number of columns
+ # in result matches number of cols in compiled. This is the
+ # vast majority case for SQL expression constructs. In this
+ # case we don't bother trying to parse or match up to
+ # the colnames in the result description.
+ raw = [
+ (
+ idx,
+ key,
+ name.lower() if not case_sensitive else name,
+ context.get_result_processor(
+ type_, key, metadata[idx][1]
+ ),
+ obj,
+ None
+ ) for idx, (key, name, obj, type_)
+ in enumerate(result_columns)
+ ]
+ self.keys = [
+ elem[1] for elem in result_columns
+ ]
+ else:
+ # case 2 - raw string, or number of columns in result does
+ # not match number of cols in compiled. The raw string case
+ # is very common. The latter can happen
+ # when text() is used with only a partial typemap, or
+ # in the extremely unlikely cases where the compiled construct
+ # has a single element with multiple col expressions in it
+ # (e.g. has commas embedded) or there's some kind of statement
+ # that is adding extra columns.
+ # In all these cases we fall back to the "named" approach
+ # that SQLAlchemy has used up through 0.9.
+
+ if num_ctx_cols:
+ result_map = self._create_result_map(result_columns)
+
+ raw = []
+ self.keys = []
+ untranslated = None
+ for idx, rec in enumerate(metadata):
+ colname = rec[0]
+ coltype = rec[1]
+
+ if dialect.description_encoding:
+ colname = dialect._description_decoder(colname)
+
+ if translate_colname:
+ colname, untranslated = translate_colname(colname)
+
+ if dialect.requires_name_normalize:
+ colname = dialect.normalize_name(colname)
+
+ self.keys.append(colname)
+ if not case_sensitive:
+ colname = colname.lower()
+
+ if num_ctx_cols:
+ try:
+ ctx_rec = result_map[colname]
+ except KeyError:
+ mapped_type = typemap.get(coltype, sqltypes.NULLTYPE)
+ obj = None
+ else:
+ obj = ctx_rec[1]
+ mapped_type = ctx_rec[2]
+ else:
+ mapped_type = typemap.get(coltype, sqltypes.NULLTYPE)
+ obj = None
+ processor = context.get_result_processor(
+ mapped_type, colname, coltype)
+
+ raw.append(
+ (idx, colname, colname, processor, obj, untranslated)
+ )
+
+ # keymap indexes by integer index...
+ self._keymap = dict([
+ (elem[0], (elem[3], elem[4], elem[0]))
+ for elem in raw
+ ])
+
+ # processors in key order for certain per-row
+ # views like __iter__ and slices
+ self._processors = [elem[3] for elem in raw]
+
+ if num_ctx_cols:
+ # keymap by primary string...
+ by_key = dict([
+ (elem[2], (elem[3], elem[4], elem[0]))
+ for elem in raw
+ ])
+
+ # if by-primary-string dictionary smaller (or bigger?!) than
+ # number of columns, assume we have dupes, rewrite
+ # dupe records with "None" for index which results in
+ # ambiguous column exception when accessed.
+ if len(by_key) != num_ctx_cols:
+ seen = set()
+ for rec in raw:
+ key = rec[1]
+ if key in seen:
+ by_key[key] = (None, by_key[key][1], None)
+ seen.add(key)
+
+ # update keymap with secondary "object"-based keys
+ self._keymap.update([
+ (obj_elem, by_key[elem[2]])
+ for elem in raw if elem[4]
+ for obj_elem in elem[4]
+ ])
+
+ # update keymap with primary string names taking
+ # precedence
+ self._keymap.update(by_key)
+ else:
+ self._keymap.update([
+ (elem[2], (elem[3], elem[4], elem[0]))
+ for elem in raw
+ ])
+ # update keymap with "translated" names (sqlite-only thing)
if translate_colname:
- colname, untranslated = translate_colname(colname)
-
- if dialect.requires_name_normalize:
- colname = dialect.normalize_name(colname)
-
- if context.result_map:
- try:
- name, obj, type_ = context.result_map[
- colname if self.case_sensitive else colname.lower()]
- except KeyError:
- name, obj, type_ = \
- colname, None, typemap.get(coltype, sqltypes.NULLTYPE)
+ self._keymap.update([
+ (elem[5], self._keymap[elem[2]])
+ for elem in raw if elem[5]
+ ])
+
+ @classmethod
+ def _create_result_map(cls, result_columns):
+ d = {}
+ for elem in result_columns:
+ key, rec = elem[0], elem[1:]
+ if key in d:
+ # conflicting keyname, just double up the list
+ # of objects. this will cause an "ambiguous name"
+ # error if an attempt is made by the result set to
+ # access.
+ e_name, e_obj, e_type = d[key]
+ d[key] = e_name, e_obj + rec[1], e_type
else:
- name, obj, type_ = \
- colname, None, typemap.get(coltype, sqltypes.NULLTYPE)
-
- processor = context.get_result_processor(type_, colname, coltype)
-
- processors.append(processor)
- rec = (processor, obj, i)
-
- # indexes as keys. This is only needed for the Python version of
- # RowProxy (the C version uses a faster path for integer indexes).
- primary_keymap[i] = rec
-
- # populate primary keymap, looking for conflicts.
- if primary_keymap.setdefault(
- name if self.case_sensitive
- else name.lower(),
- rec) is not rec:
- # place a record that doesn't have the "index" - this
- # is interpreted later as an AmbiguousColumnError,
- # but only when actually accessed. Columns
- # colliding by name is not a problem if those names
- # aren't used; integer access is always
- # unambiguous.
- primary_keymap[name
- if self.case_sensitive
- else name.lower()] = rec = (None, obj, None)
-
- self.keys.append(colname)
- if obj:
- for o in obj:
- keymap[o] = rec
- # technically we should be doing this but we
- # are saving on callcounts by not doing so.
- # if keymap.setdefault(o, rec) is not rec:
- # keymap[o] = (None, obj, None)
-
- if translate_colname and \
- untranslated:
- keymap[untranslated] = rec
-
- # overwrite keymap values with those of the
- # high precedence keymap.
- keymap.update(primary_keymap)
+ d[key] = rec
+ return d
@util.pending_deprecation("0.8", "sqlite dialect uses "
"_translate_colname() now")
@@ -403,11 +479,12 @@ class ResultProxy(object):
out_parameters = None
_can_close_connection = False
_metadata = None
+ _soft_closed = False
+ closed = False
def __init__(self, context):
self.context = context
self.dialect = context.dialect
- self.closed = False
self.cursor = self._saved_cursor = context.cursor
self.connection = context.root_connection
self._echo = self.connection._echo and \
@@ -544,33 +621,79 @@ class ResultProxy(object):
return self._saved_cursor.description
- def close(self, _autoclose_connection=True):
- """Close this ResultProxy.
-
- Closes the underlying DBAPI cursor corresponding to the execution.
+ def _soft_close(self, _autoclose_connection=True):
+ """Soft close this :class:`.ResultProxy`.
- Note that any data cached within this ResultProxy is still available.
- For some types of results, this may include buffered rows.
-
- If this ResultProxy was generated from an implicit execution,
- the underlying Connection will also be closed (returns the
- underlying DBAPI connection to the connection pool.)
+ This releases all DBAPI cursor resources, but leaves the
+ ResultProxy "open" from a semantic perspective, meaning the
+ fetchXXX() methods will continue to return empty results.
This method is called automatically when:
* all result rows are exhausted using the fetchXXX() methods.
* cursor.description is None.
+ This method is **not public**, but is documented in order to clarify
+ the "autoclose" process used.
+
+ .. versionadded:: 1.0.0
+
+ .. seealso::
+
+ :meth:`.ResultProxy.close`
+
+
+ """
+ if self._soft_closed:
+ return
+ self._soft_closed = True
+ cursor = self.cursor
+ self.connection._safe_close_cursor(cursor)
+ if _autoclose_connection and \
+ self.connection.should_close_with_result:
+ self.connection.close()
+ self.cursor = None
+
+ def close(self):
+ """Close this ResultProxy.
+
+ This closes out the underlying DBAPI cursor corresonding
+ to the statement execution, if one is stil present. Note that the
+ DBAPI cursor is automatically released when the :class:`.ResultProxy`
+ exhausts all available rows. :meth:`.ResultProxy.close` is generally
+ an optional method except in the case when discarding a
+ :class:`.ResultProxy` that still has additional rows pending for fetch.
+
+ In the case of a result that is the product of
+ :ref:`connectionless execution <dbengine_implicit>`,
+ the underyling :class:`.Connection` object is also closed, which
+ :term:`releases` DBAPI connection resources.
+
+ After this method is called, it is no longer valid to call upon
+ the fetch methods, which will raise a :class:`.ResourceClosedError`
+ on subsequent use.
+
+ .. versionchanged:: 1.0.0 - the :meth:`.ResultProxy.close` method
+ has been separated out from the process that releases the underlying
+ DBAPI cursor resource. The "auto close" feature of the
+ :class:`.Connection` now performs a so-called "soft close", which
+ releases the underlying DBAPI cursor, but allows the
+ :class:`.ResultProxy` to still behave as an open-but-exhausted
+ result set; the actual :meth:`.ResultProxy.close` method is never
+ called. It is still safe to discard a :class:`.ResultProxy`
+ that has been fully exhausted without calling this method.
+
+ .. seealso::
+
+ :ref:`connections_toplevel`
+
+ :meth:`.ResultProxy._soft_close`
+
"""
if not self.closed:
+ self._soft_close()
self.closed = True
- self.connection._safe_close_cursor(self.cursor)
- if _autoclose_connection and \
- self.connection.should_close_with_result:
- self.connection.close()
- # allow consistent errors
- self.cursor = None
def __iter__(self):
while True:
@@ -761,7 +884,7 @@ class ResultProxy(object):
try:
return self.cursor.fetchone()
except AttributeError:
- self._non_result()
+ return self._non_result(None)
def _fetchmany_impl(self, size=None):
try:
@@ -770,22 +893,24 @@ class ResultProxy(object):
else:
return self.cursor.fetchmany(size)
except AttributeError:
- self._non_result()
+ return self._non_result([])
def _fetchall_impl(self):
try:
return self.cursor.fetchall()
except AttributeError:
- self._non_result()
+ return self._non_result([])
- def _non_result(self):
+ def _non_result(self, default):
if self._metadata is None:
raise exc.ResourceClosedError(
"This result object does not return rows. "
"It has been closed automatically.",
)
- else:
+ elif self.closed:
raise exc.ResourceClosedError("This result object is closed.")
+ else:
+ return default
def process_rows(self, rows):
process_row = self._process_row
@@ -804,11 +929,25 @@ class ResultProxy(object):
for row in rows]
def fetchall(self):
- """Fetch all rows, just like DB-API ``cursor.fetchall()``."""
+ """Fetch all rows, just like DB-API ``cursor.fetchall()``.
+
+ After all rows have been exhausted, the underlying DBAPI
+ cursor resource is released, and the object may be safely
+ discarded.
+
+ Subsequent calls to :meth:`.ResultProxy.fetchall` will return
+ an empty list. After the :meth:`.ResultProxy.close` method is
+ called, the method will raise :class:`.ResourceClosedError`.
+
+ .. versionchanged:: 1.0.0 - Added "soft close" behavior which
+ allows the result to be used in an "exhausted" state prior to
+ calling the :meth:`.ResultProxy.close` method.
+
+ """
try:
l = self.process_rows(self._fetchall_impl())
- self.close()
+ self._soft_close()
return l
except Exception as e:
self.connection._handle_dbapi_exception(
@@ -819,15 +958,25 @@ class ResultProxy(object):
"""Fetch many rows, just like DB-API
``cursor.fetchmany(size=cursor.arraysize)``.
- If rows are present, the cursor remains open after this is called.
- Else the cursor is automatically closed and an empty list is returned.
+ After all rows have been exhausted, the underlying DBAPI
+ cursor resource is released, and the object may be safely
+ discarded.
+
+ Calls to :meth:`.ResultProxy.fetchmany` after all rows have been
+ exhuasted will return
+ an empty list. After the :meth:`.ResultProxy.close` method is
+ called, the method will raise :class:`.ResourceClosedError`.
+
+ .. versionchanged:: 1.0.0 - Added "soft close" behavior which
+ allows the result to be used in an "exhausted" state prior to
+ calling the :meth:`.ResultProxy.close` method.
"""
try:
l = self.process_rows(self._fetchmany_impl(size))
if len(l) == 0:
- self.close()
+ self._soft_close()
return l
except Exception as e:
self.connection._handle_dbapi_exception(
@@ -837,8 +986,18 @@ class ResultProxy(object):
def fetchone(self):
"""Fetch one row, just like DB-API ``cursor.fetchone()``.
- If a row is present, the cursor remains open after this is called.
- Else the cursor is automatically closed and None is returned.
+ After all rows have been exhausted, the underlying DBAPI
+ cursor resource is released, and the object may be safely
+ discarded.
+
+ Calls to :meth:`.ResultProxy.fetchone` after all rows have
+ been exhausted will return ``None``.
+ After the :meth:`.ResultProxy.close` method is
+ called, the method will raise :class:`.ResourceClosedError`.
+
+ .. versionchanged:: 1.0.0 - Added "soft close" behavior which
+ allows the result to be used in an "exhausted" state prior to
+ calling the :meth:`.ResultProxy.close` method.
"""
try:
@@ -846,7 +1005,7 @@ class ResultProxy(object):
if row is not None:
return self.process_rows([row])[0]
else:
- self.close()
+ self._soft_close()
return None
except Exception as e:
self.connection._handle_dbapi_exception(
@@ -858,9 +1017,12 @@ class ResultProxy(object):
Returns None if no row is present.
+ After calling this method, the object is fully closed,
+ e.g. the :meth:`.ResultProxy.close` method will have been called.
+
"""
if self._metadata is None:
- self._non_result()
+ return self._non_result(None)
try:
row = self._fetchone_impl()
@@ -882,6 +1044,9 @@ class ResultProxy(object):
Returns None if no row is present.
+ After calling this method, the object is fully closed,
+ e.g. the :meth:`.ResultProxy.close` method will have been called.
+
"""
row = self.first()
if row is not None:
@@ -925,13 +1090,19 @@ class BufferedRowResultProxy(ResultProxy):
}
def __buffer_rows(self):
+ if self.cursor is None:
+ return
size = getattr(self, '_bufsize', 1)
self.__rowbuffer = collections.deque(self.cursor.fetchmany(size))
self._bufsize = self.size_growth.get(size, size)
+ def _soft_close(self, **kw):
+ self.__rowbuffer.clear()
+ super(BufferedRowResultProxy, self)._soft_close(**kw)
+
def _fetchone_impl(self):
- if self.closed:
- return None
+ if self.cursor is None:
+ return self._non_result(None)
if not self.__rowbuffer:
self.__buffer_rows()
if not self.__rowbuffer:
@@ -950,6 +1121,8 @@ class BufferedRowResultProxy(ResultProxy):
return result
def _fetchall_impl(self):
+ if self.cursor is None:
+ return self._non_result([])
self.__rowbuffer.extend(self.cursor.fetchall())
ret = self.__rowbuffer
self.__rowbuffer = collections.deque()
@@ -972,11 +1145,15 @@ class FullyBufferedResultProxy(ResultProxy):
def _buffer_rows(self):
return collections.deque(self.cursor.fetchall())
+ def _soft_close(self, **kw):
+ self.__rowbuffer.clear()
+ super(FullyBufferedResultProxy, self)._soft_close(**kw)
+
def _fetchone_impl(self):
if self.__rowbuffer:
return self.__rowbuffer.popleft()
else:
- return None
+ return self._non_result(None)
def _fetchmany_impl(self, size=None):
if size is None:
@@ -990,6 +1167,8 @@ class FullyBufferedResultProxy(ResultProxy):
return result
def _fetchall_impl(self):
+ if not self.cursor:
+ return self._non_result([])
ret = self.__rowbuffer
self.__rowbuffer = collections.deque()
return ret
diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py
index 398ef8df6..1fd105d67 100644
--- a/lib/sqlalchemy/engine/strategies.py
+++ b/lib/sqlalchemy/engine/strategies.py
@@ -1,5 +1,5 @@
# engine/strategies.py
-# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -86,16 +86,7 @@ class DefaultEngineStrategy(EngineStrategy):
pool = pop_kwarg('pool', None)
if pool is None:
def connect():
- try:
- return dialect.connect(*cargs, **cparams)
- except dialect.dbapi.Error as e:
- invalidated = dialect.is_disconnect(e, None, None)
- util.raise_from_cause(
- exc.DBAPIError.instance(
- None, None, e, dialect.dbapi.Error,
- connection_invalidated=invalidated
- )
- )
+ return dialect.connect(*cargs, **cparams)
creator = pop_kwarg('creator', connect)
diff --git a/lib/sqlalchemy/engine/threadlocal.py b/lib/sqlalchemy/engine/threadlocal.py
index 637523a0e..0d6e1c0f1 100644
--- a/lib/sqlalchemy/engine/threadlocal.py
+++ b/lib/sqlalchemy/engine/threadlocal.py
@@ -1,5 +1,5 @@
# engine/threadlocal.py
-# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -59,7 +59,10 @@ class TLEngine(base.Engine):
# guards against pool-level reapers, if desired.
# or not connection.connection.is_valid:
connection = self._tl_connection_cls(
- self, self.pool.connect(), **kw)
+ self,
+ self._wrap_pool_connect(
+ self.pool.connect, connection),
+ **kw)
self._connections.conn = weakref.ref(connection)
return connection._increment_connect()
diff --git a/lib/sqlalchemy/engine/url.py b/lib/sqlalchemy/engine/url.py
index 6544cfbf3..d045961dd 100644
--- a/lib/sqlalchemy/engine/url.py
+++ b/lib/sqlalchemy/engine/url.py
@@ -1,5 +1,5 @@
# engine/url.py
-# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/engine/util.py b/lib/sqlalchemy/engine/util.py
index d9eb1df10..3734c9960 100644
--- a/lib/sqlalchemy/engine/util.py
+++ b/lib/sqlalchemy/engine/util.py
@@ -1,5 +1,5 @@
# engine/util.py
-# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under