summaryrefslogtreecommitdiff
path: root/lib/sqlalchemy/engine
diff options
context:
space:
mode:
Diffstat (limited to 'lib/sqlalchemy/engine')
-rw-r--r--lib/sqlalchemy/engine/__init__.py8
-rw-r--r--lib/sqlalchemy/engine/base.py228
-rw-r--r--lib/sqlalchemy/engine/default.py116
-rw-r--r--lib/sqlalchemy/engine/interfaces.py23
-rw-r--r--lib/sqlalchemy/engine/reflection.py72
-rw-r--r--lib/sqlalchemy/engine/result.py136
-rw-r--r--lib/sqlalchemy/engine/strategies.py12
-rw-r--r--lib/sqlalchemy/engine/threadlocal.py10
-rw-r--r--lib/sqlalchemy/engine/url.py13
-rw-r--r--lib/sqlalchemy/engine/util.py3
10 files changed, 318 insertions, 303 deletions
diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py
index 6d6e7a354..9c9e03821 100644
--- a/lib/sqlalchemy/engine/__init__.py
+++ b/lib/sqlalchemy/engine/__init__.py
@@ -69,7 +69,7 @@ from .base import (
RootTransaction,
Transaction,
TwoPhaseTransaction,
- )
+)
from .result import (
BufferedColumnResultProxy,
@@ -78,11 +78,11 @@ from .result import (
FullyBufferedResultProxy,
ResultProxy,
RowProxy,
- )
+)
from .util import (
connection_memoize
- )
+)
from . import util, strategies
@@ -371,4 +371,4 @@ def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
__all__ = (
'create_engine',
'engine_from_config',
- )
+)
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index 73c35c38f..cf0689626 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -72,7 +72,7 @@ class Connection(Connectable):
# want to handle any of the engine's events in that case.
self.dispatch = self.dispatch._join(engine.dispatch)
self._has_events = _has_events or (
- _has_events is None and engine._has_events)
+ _has_events is None and engine._has_events)
self._echo = self.engine._should_log_info()
if _execution_options:
@@ -94,11 +94,11 @@ class Connection(Connectable):
"""
return self.engine._connection_cls(
- self.engine,
- self.__connection,
- _branch=True,
- _has_events=self._has_events,
- _dispatch=self.dispatch)
+ self.engine,
+ self.__connection,
+ _branch=True,
+ _has_events=self._has_events,
+ _dispatch=self.dispatch)
def _clone(self):
"""Create a shallow copy of this Connection.
@@ -239,8 +239,8 @@ class Connection(Connectable):
if self.__can_reconnect and self.__invalid:
if self.__transaction is not None:
raise exc.InvalidRequestError(
- "Can't reconnect until invalid "
- "transaction is rolled back")
+ "Can't reconnect until invalid "
+ "transaction is rolled back")
self.__connection = self.engine.raw_connection()
self.__invalid = False
return self.__connection
@@ -324,10 +324,10 @@ class Connection(Connectable):
:meth:`.Connection.invalidate` method is called, at the DBAPI
level all state associated with this transaction is lost, as
the DBAPI connection is closed. The :class:`.Connection`
- will not allow a reconnection to proceed until the :class:`.Transaction`
- object is ended, by calling the :meth:`.Transaction.rollback`
- method; until that point, any attempt at continuing to use the
- :class:`.Connection` will raise an
+ will not allow a reconnection to proceed until the
+ :class:`.Transaction` object is ended, by calling the
+ :meth:`.Transaction.rollback` method; until that point, any attempt at
+ continuing to use the :class:`.Connection` will raise an
:class:`~sqlalchemy.exc.InvalidRequestError`.
This is to prevent applications from accidentally
continuing an ongoing transactional operations despite the
@@ -335,8 +335,8 @@ class Connection(Connectable):
invalidation.
The :meth:`.Connection.invalidate` method, just like auto-invalidation,
- will at the connection pool level invoke the :meth:`.PoolEvents.invalidate`
- event.
+ will at the connection pool level invoke the
+ :meth:`.PoolEvents.invalidate` event.
.. seealso::
@@ -585,7 +585,8 @@ class Connection(Connectable):
if self._still_open_and_connection_is_valid:
assert isinstance(self.__transaction, TwoPhaseTransaction)
try:
- self.engine.dialect.do_rollback_twophase(self, xid, is_prepared)
+ self.engine.dialect.do_rollback_twophase(
+ self, xid, is_prepared)
finally:
if self.connection._reset_agent is self.__transaction:
self.connection._reset_agent = None
@@ -722,8 +723,8 @@ class Connection(Connectable):
meth = object._execute_on_connection
except AttributeError:
raise exc.InvalidRequestError(
- "Unexecutable object type: %s" %
- type(object))
+ "Unexecutable object type: %s" %
+ type(object))
else:
return meth(self, multiparams, params)
@@ -731,7 +732,7 @@ class Connection(Connectable):
"""Execute a sql.FunctionElement object."""
return self._execute_clauseelement(func.select(),
- multiparams, params)
+ multiparams, params)
def _execute_default(self, default, multiparams, params):
"""Execute a schema.ColumnDefault object."""
@@ -749,7 +750,7 @@ class Connection(Connectable):
dialect = self.dialect
ctx = dialect.execution_ctx_cls._init_default(
- dialect, self, conn)
+ dialect, self, conn)
except Exception as e:
self._handle_dbapi_exception(e, None, None, None, None)
@@ -759,7 +760,7 @@ class Connection(Connectable):
if self._has_events or self.engine._has_events:
self.dispatch.after_execute(self,
- default, multiparams, params, ret)
+ default, multiparams, params, ret)
return ret
@@ -783,7 +784,7 @@ class Connection(Connectable):
)
if self._has_events or self.engine._has_events:
self.dispatch.after_execute(self,
- ddl, multiparams, params, ret)
+ ddl, multiparams, params, ret)
return ret
def _execute_clauseelement(self, elem, multiparams, params):
@@ -809,13 +810,13 @@ class Connection(Connectable):
compiled_sql = self._execution_options['compiled_cache'][key]
else:
compiled_sql = elem.compile(
- dialect=dialect, column_keys=keys,
- inline=len(distilled_params) > 1)
+ dialect=dialect, column_keys=keys,
+ inline=len(distilled_params) > 1)
self._execution_options['compiled_cache'][key] = compiled_sql
else:
compiled_sql = elem.compile(
- dialect=dialect, column_keys=keys,
- inline=len(distilled_params) > 1)
+ dialect=dialect, column_keys=keys,
+ inline=len(distilled_params) > 1)
ret = self._execute_context(
dialect,
@@ -826,7 +827,7 @@ class Connection(Connectable):
)
if self._has_events or self.engine._has_events:
self.dispatch.after_execute(self,
- elem, multiparams, params, ret)
+ elem, multiparams, params, ret)
return ret
def _execute_compiled(self, compiled, multiparams, params):
@@ -848,7 +849,7 @@ class Connection(Connectable):
)
if self._has_events or self.engine._has_events:
self.dispatch.after_execute(self,
- compiled, multiparams, params, ret)
+ compiled, multiparams, params, ret)
return ret
def _execute_text(self, statement, multiparams, params):
@@ -870,12 +871,12 @@ class Connection(Connectable):
)
if self._has_events or self.engine._has_events:
self.dispatch.after_execute(self,
- statement, multiparams, params, ret)
+ statement, multiparams, params, ret)
return ret
def _execute_context(self, dialect, constructor,
- statement, parameters,
- *args):
+ statement, parameters,
+ *args):
"""Create an :class:`.ExecutionContext` and execute, returning
a :class:`.ResultProxy`."""
@@ -888,15 +889,15 @@ class Connection(Connectable):
context = constructor(dialect, self, conn, *args)
except Exception as e:
self._handle_dbapi_exception(e,
- util.text_type(statement), parameters,
- None, None)
+ util.text_type(statement), parameters,
+ None, None)
if context.compiled:
context.pre_exec()
cursor, statement, parameters = context.cursor, \
- context.statement, \
- context.parameters
+ context.statement, \
+ context.parameters
if not context.executemany:
parameters = parameters[0]
@@ -904,62 +905,64 @@ class Connection(Connectable):
if self._has_events or self.engine._has_events:
for fn in self.dispatch.before_cursor_execute:
statement, parameters = \
- fn(self, cursor, statement, parameters,
- context, context.executemany)
+ fn(self, cursor, statement, parameters,
+ context, context.executemany)
if self._echo:
self.engine.logger.info(statement)
- self.engine.logger.info("%r",
- sql_util._repr_params(parameters, batches=10))
+ self.engine.logger.info(
+ "%r",
+ sql_util._repr_params(parameters, batches=10)
+ )
try:
if context.executemany:
for fn in () if not self.dialect._has_events \
- else self.dialect.dispatch.do_executemany:
+ else self.dialect.dispatch.do_executemany:
if fn(cursor, statement, parameters, context):
break
else:
self.dialect.do_executemany(
- cursor,
- statement,
- parameters,
- context)
+ cursor,
+ statement,
+ parameters,
+ context)
elif not parameters and context.no_parameters:
for fn in () if not self.dialect._has_events \
- else self.dialect.dispatch.do_execute_no_params:
+ else self.dialect.dispatch.do_execute_no_params:
if fn(cursor, statement, context):
break
else:
self.dialect.do_execute_no_params(
- cursor,
- statement,
- context)
+ cursor,
+ statement,
+ context)
else:
for fn in () if not self.dialect._has_events \
- else self.dialect.dispatch.do_execute:
+ else self.dialect.dispatch.do_execute:
if fn(cursor, statement, parameters, context):
break
else:
self.dialect.do_execute(
- cursor,
- statement,
- parameters,
- context)
+ cursor,
+ statement,
+ parameters,
+ context)
except Exception as e:
self._handle_dbapi_exception(
- e,
- statement,
- parameters,
- cursor,
- context)
+ e,
+ statement,
+ parameters,
+ cursor,
+ context)
if self._has_events or self.engine._has_events:
self.dispatch.after_cursor_execute(self, cursor,
- statement,
- parameters,
- context,
- context.executemany)
+ statement,
+ parameters,
+ context,
+ context.executemany)
if context.compiled:
context.post_exec()
@@ -1012,38 +1015,38 @@ class Connection(Connectable):
if self._has_events or self.engine._has_events:
for fn in self.dispatch.before_cursor_execute:
statement, parameters = \
- fn(self, cursor, statement, parameters,
- context,
- False)
+ fn(self, cursor, statement, parameters,
+ context,
+ False)
if self._echo:
self.engine.logger.info(statement)
self.engine.logger.info("%r", parameters)
try:
for fn in () if not self.dialect._has_events \
- else self.dialect.dispatch.do_execute:
+ else self.dialect.dispatch.do_execute:
if fn(cursor, statement, parameters, context):
break
else:
self.dialect.do_execute(
- cursor,
- statement,
- parameters,
- context)
+ cursor,
+ statement,
+ parameters,
+ context)
except Exception as e:
self._handle_dbapi_exception(
- e,
- statement,
- parameters,
- cursor,
- context)
+ e,
+ statement,
+ parameters,
+ cursor,
+ context)
if self._has_events or self.engine._has_events:
self.dispatch.after_cursor_execute(self, cursor,
- statement,
- parameters,
- context,
- False)
+ statement,
+ parameters,
+ context,
+ False)
def _safe_close_cursor(self, cursor):
"""Close the given cursor, catching exceptions
@@ -1057,17 +1060,17 @@ class Connection(Connectable):
except Exception:
# log the error through the connection pool's logger.
self.engine.pool.logger.error(
- "Error closing cursor", exc_info=True)
+ "Error closing cursor", exc_info=True)
_reentrant_error = False
_is_disconnect = False
def _handle_dbapi_exception(self,
- e,
- statement,
- parameters,
- cursor,
- context):
+ e,
+ statement,
+ parameters,
+ cursor,
+ context):
exc_info = sys.exc_info()
@@ -1084,12 +1087,12 @@ class Connection(Connectable):
if self._reentrant_error:
util.raise_from_cause(
- exc.DBAPIError.instance(statement,
- parameters,
- e,
- self.dialect.dbapi.Error),
- exc_info
- )
+ exc.DBAPIError.instance(statement,
+ parameters,
+ e,
+ self.dialect.dbapi.Error),
+ exc_info
+ )
self._reentrant_error = True
try:
# non-DBAPI error - if we already got a context,
@@ -1113,11 +1116,11 @@ class Connection(Connectable):
# legacy dbapi_error event
if should_wrap and context:
self.dispatch.dbapi_error(self,
- cursor,
- statement,
- parameters,
- context,
- e)
+ cursor,
+ statement,
+ parameters,
+ context,
+ e)
# new handle_error event
ctx = ExceptionContextImpl(
@@ -1153,9 +1156,9 @@ class Connection(Connectable):
util.raise_from_cause(newraise, exc_info)
elif should_wrap:
util.raise_from_cause(
- sqlalchemy_exception,
- exc_info
- )
+ sqlalchemy_exception,
+ exc_info
+ )
else:
util.reraise(*exc_info)
@@ -1240,15 +1243,15 @@ class Connection(Connectable):
def _run_visitor(self, visitorcallable, element, **kwargs):
visitorcallable(self.dialect, self,
- **kwargs).traverse_single(element)
+ **kwargs).traverse_single(element)
class ExceptionContextImpl(ExceptionContext):
"""Implement the :class:`.ExceptionContext` interface."""
def __init__(self, exception, sqlalchemy_exception,
- connection, cursor, statement, parameters,
- context, is_disconnect):
+ connection, cursor, statement, parameters,
+ context, is_disconnect):
self.connection = connection
self.sqlalchemy_exception = sqlalchemy_exception
self.original_exception = exception
@@ -1371,6 +1374,7 @@ class NestedTransaction(Transaction):
The interface is the same as that of :class:`.Transaction`.
"""
+
def __init__(self, connection, parent):
super(NestedTransaction, self).__init__(connection, parent)
self._savepoint = self.connection._savepoint_impl()
@@ -1378,12 +1382,12 @@ class NestedTransaction(Transaction):
def _do_rollback(self):
if self.is_active:
self.connection._rollback_to_savepoint_impl(
- self._savepoint, self._parent)
+ self._savepoint, self._parent)
def _do_commit(self):
if self.is_active:
self.connection._release_savepoint_impl(
- self._savepoint, self._parent)
+ self._savepoint, self._parent)
class TwoPhaseTransaction(Transaction):
@@ -1396,6 +1400,7 @@ class TwoPhaseTransaction(Transaction):
with the addition of the :meth:`prepare` method.
"""
+
def __init__(self, connection, xid):
super(TwoPhaseTransaction, self).__init__(connection, None)
self._is_prepared = False
@@ -1442,9 +1447,9 @@ class Engine(Connectable, log.Identified):
_connection_cls = Connection
def __init__(self, pool, dialect, url,
- logging_name=None, echo=None, proxy=None,
- execution_options=None
- ):
+ logging_name=None, echo=None, proxy=None,
+ execution_options=None
+ ):
self.pool = pool
self.url = url
self.dialect = dialect
@@ -1477,7 +1482,7 @@ class Engine(Connectable, log.Identified):
"""
self._execution_options = \
- self._execution_options.union(opt)
+ self._execution_options.union(opt)
self.dispatch.set_engine_execution_options(self, opt)
self.dialect.set_engine_execution_options(self, opt)
@@ -1526,7 +1531,8 @@ class Engine(Connectable, log.Identified):
shards = {"default": "base", shard_1: "db1", "shard_2": "db2"}
@event.listens_for(Engine, "before_cursor_execute")
- def _switch_shard(conn, cursor, stmt, params, context, executemany):
+ def _switch_shard(conn, cursor, stmt,
+ params, context, executemany):
shard_id = conn._execution_options.get('shard_id', "default")
current_shard = conn.info.get("current_shard", None)
@@ -1606,7 +1612,7 @@ class Engine(Connectable, log.Identified):
yield connection
def _run_visitor(self, visitorcallable, element,
- connection=None, **kwargs):
+ connection=None, **kwargs):
with self._optional_conn_ctx_manager(connection) as conn:
conn._run_visitor(visitorcallable, element, **kwargs)
@@ -1813,8 +1819,8 @@ class Engine(Connectable, log.Identified):
.. seealso::
- :ref:`metadata_reflection_inspector` - detailed schema inspection using
- the :class:`.Inspector` interface.
+ :ref:`metadata_reflection_inspector` - detailed schema inspection
+ using the :class:`.Inspector` interface.
:class:`.quoted_name` - used to pass quoting information along
with a schema identifier.
diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py
index 58915fed2..2fece76b9 100644
--- a/lib/sqlalchemy/engine/default.py
+++ b/lib/sqlalchemy/engine/default.py
@@ -24,9 +24,8 @@ import weakref
from .. import event
AUTOCOMMIT_REGEXP = re.compile(
- r'\s*(?:UPDATE|INSERT|CREATE|DELETE|DROP|ALTER)',
- re.I | re.UNICODE)
-
+ r'\s*(?:UPDATE|INSERT|CREATE|DELETE|DROP|ALTER)',
+ re.I | re.UNICODE)
class DefaultDialect(interfaces.Dialect):
@@ -195,26 +194,24 @@ class DefaultDialect(interfaces.Dialect):
if label_length and label_length > self.max_identifier_length:
raise exc.ArgumentError(
- "Label length of %d is greater than this dialect's"
- " maximum identifier length of %d" %
- (label_length, self.max_identifier_length))
+ "Label length of %d is greater than this dialect's"
+ " maximum identifier length of %d" %
+ (label_length, self.max_identifier_length))
self.label_length = label_length
if self.description_encoding == 'use_encoding':
self._description_decoder = \
- processors.to_unicode_processor_factory(
- encoding
- )
+ processors.to_unicode_processor_factory(
+ encoding
+ )
elif self.description_encoding is not None:
self._description_decoder = \
- processors.to_unicode_processor_factory(
- self.description_encoding
- )
+ processors.to_unicode_processor_factory(
+ self.description_encoding
+ )
self._encoder = codecs.getencoder(self.encoding)
self._decoder = processors.to_unicode_processor_factory(self.encoding)
-
-
@util.memoized_property
def _type_memos(self):
return weakref.WeakKeyDictionary()
@@ -230,25 +227,25 @@ class DefaultDialect(interfaces.Dialect):
def initialize(self, connection):
try:
self.server_version_info = \
- self._get_server_version_info(connection)
+ self._get_server_version_info(connection)
except NotImplementedError:
self.server_version_info = None
try:
self.default_schema_name = \
- self._get_default_schema_name(connection)
+ self._get_default_schema_name(connection)
except NotImplementedError:
self.default_schema_name = None
try:
self.default_isolation_level = \
- self.get_isolation_level(connection.connection)
+ self.get_isolation_level(connection.connection)
except NotImplementedError:
self.default_isolation_level = None
self.returns_unicode_strings = self._check_unicode_returns(connection)
if self.description_encoding is not None and \
- self._check_unicode_description(connection):
+ self._check_unicode_description(connection):
self._description_decoder = self.description_encoding = None
self.do_rollback(connection.connection)
@@ -279,7 +276,8 @@ class DefaultDialect(interfaces.Dialect):
parameters = {}
def check_unicode(test):
- statement = cast_to(expression.select([test]).compile(dialect=self))
+ statement = cast_to(
+ expression.select([test]).compile(dialect=self))
try:
cursor = connection.connection.cursor()
connection._cursor_execute(cursor, statement, parameters)
@@ -289,7 +287,7 @@ class DefaultDialect(interfaces.Dialect):
# note that _cursor_execute() will have closed the cursor
# if an exception is thrown.
util.warn("Exception attempting to "
- "detect unicode returns: %r" % de)
+ "detect unicode returns: %r" % de)
return False
else:
return isinstance(row[0], util.text_type)
@@ -300,7 +298,8 @@ class DefaultDialect(interfaces.Dialect):
expression.literal_column("'test plain returns'"),
sqltypes.VARCHAR(60)
),
- # detect if there's an NVARCHAR type with different behavior available
+ # detect if there's an NVARCHAR type with different behavior
+ # available
expression.cast(
expression.literal_column("'test unicode returns'"),
sqltypes.Unicode(60)
@@ -351,7 +350,8 @@ class DefaultDialect(interfaces.Dialect):
"""
return sqltypes.adapt_type(typeobj, self.colspecs)
- def reflecttable(self, connection, table, include_columns, exclude_columns):
+ def reflecttable(
+ self, connection, table, include_columns, exclude_columns):
insp = reflection.Inspector.from_engine(connection)
return insp.reflecttable(table, include_columns, exclude_columns)
@@ -362,8 +362,8 @@ class DefaultDialect(interfaces.Dialect):
"""
return {
'constrained_columns':
- self.get_primary_keys(conn, table_name,
- schema=schema, **kw)
+ self.get_primary_keys(conn, table_name,
+ schema=schema, **kw)
}
def validate_identifier(self, ident):
@@ -384,6 +384,7 @@ class DefaultDialect(interfaces.Dialect):
def set_engine_execution_options(self, engine, opts):
if 'isolation_level' in opts:
isolation_level = opts['isolation_level']
+
@event.listens_for(engine, "engine_connect")
def set_isolation(connection, branch):
if not branch:
@@ -398,7 +399,6 @@ class DefaultDialect(interfaces.Dialect):
connection.connection._connection_record.\
finalize_callback.append(self.reset_isolation_level)
-
def do_begin(self, dbapi_connection):
pass
@@ -503,7 +503,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
@classmethod
def _init_compiled(cls, dialect, connection, dbapi_connection,
- compiled, parameters):
+ compiled, parameters):
"""Initialize execution context for a Compiled construct."""
self = cls.__new__(cls)
@@ -530,7 +530,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
self.unicode_statement = util.text_type(compiled)
if not dialect.supports_unicode_statements:
self.statement = self.unicode_statement.encode(
- self.dialect.encoding)
+ self.dialect.encoding)
else:
self.statement = self.unicode_statement
@@ -540,15 +540,15 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
if self.isinsert or self.isupdate or self.isdelete:
self._is_explicit_returning = bool(compiled.statement._returning)
- self._is_implicit_returning = bool(compiled.returning and \
- not compiled.statement._returning)
+ self._is_implicit_returning = bool(
+ compiled.returning and not compiled.statement._returning)
if not parameters:
self.compiled_parameters = [compiled.construct_params()]
else:
self.compiled_parameters = \
- [compiled.construct_params(m, _group_number=grp) for
- grp, m in enumerate(parameters)]
+ [compiled.construct_params(m, _group_number=grp) for
+ grp, m in enumerate(parameters)]
self.executemany = len(parameters) > 1
@@ -582,10 +582,10 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
for key in compiled_params:
if key in processors:
param[dialect._encoder(key)[0]] = \
- processors[key](compiled_params[key])
+ processors[key](compiled_params[key])
else:
param[dialect._encoder(key)[0]] = \
- compiled_params[key]
+ compiled_params[key]
else:
for key in compiled_params:
if key in processors:
@@ -599,7 +599,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
@classmethod
def _init_statement(cls, dialect, connection, dbapi_connection,
- statement, parameters):
+ statement, parameters):
"""Initialize execution context for a string SQL statement."""
self = cls.__new__(cls)
@@ -623,12 +623,12 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
self.parameters = parameters
else:
self.parameters = [
- dict((dialect._encoder(k)[0], d[k]) for k in d)
- for d in parameters
- ] or [{}]
+ dict((dialect._encoder(k)[0], d[k]) for k in d)
+ for d in parameters
+ ] or [{}]
else:
self.parameters = [dialect.execute_sequence_format(p)
- for p in parameters]
+ for p in parameters]
self.executemany = len(parameters) > 1
@@ -701,9 +701,9 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
if type_ is not None:
# apply type post processors to the result
proc = type_._cached_result_processor(
- self.dialect,
- self.cursor.description[0][1]
- )
+ self.dialect,
+ self.cursor.description[0][1]
+ )
if proc:
return proc(r)
return r
@@ -783,8 +783,8 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
not self._is_explicit_returning and \
not self.compiled.inline and \
self.dialect.postfetch_lastrowid and \
- (not self.inserted_primary_key or \
- None in self.inserted_primary_key):
+ (not self.inserted_primary_key or
+ None in self.inserted_primary_key):
table = self.compiled.statement.table
lastrowid = self.get_lastrowid()
@@ -792,15 +792,15 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
if autoinc_col is not None:
# apply type post processors to the lastrowid
proc = autoinc_col.type._cached_result_processor(
- self.dialect, None)
+ self.dialect, None)
if proc is not None:
lastrowid = proc(lastrowid)
self.inserted_primary_key = [
lastrowid if c is autoinc_col else v
for c, v in zip(
- table.primary_key,
- self.inserted_primary_key)
+ table.primary_key,
+ self.inserted_primary_key)
]
def _fetch_implicit_returning(self, resultproxy):
@@ -839,29 +839,29 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
return
types = dict(
- (self.compiled.bind_names[bindparam], bindparam.type)
- for bindparam in self.compiled.bind_names)
+ (self.compiled.bind_names[bindparam], bindparam.type)
+ for bindparam in self.compiled.bind_names)
if self.dialect.positional:
inputsizes = []
for key in self.compiled.positiontup:
typeengine = types[key]
dbtype = typeengine.dialect_impl(self.dialect).\
- get_dbapi_type(self.dialect.dbapi)
+ get_dbapi_type(self.dialect.dbapi)
if dbtype is not None and \
- (not exclude_types or dbtype not in exclude_types):
+ (not exclude_types or dbtype not in exclude_types):
inputsizes.append(dbtype)
try:
self.cursor.setinputsizes(*inputsizes)
except Exception as e:
self.root_connection._handle_dbapi_exception(
- e, None, None, None, self)
+ e, None, None, None, self)
else:
inputsizes = {}
for key in self.compiled.bind_names.values():
typeengine = types[key]
dbtype = typeengine.dialect_impl(self.dialect).\
- get_dbapi_type(self.dialect.dbapi)
+ get_dbapi_type(self.dialect.dbapi)
if dbtype is not None and \
(not exclude_types or dbtype not in exclude_types):
if translate:
@@ -873,7 +873,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
self.cursor.setinputsizes(**inputsizes)
except Exception as e:
self.root_connection._handle_dbapi_exception(
- e, None, None, None, self)
+ e, None, None, None, self)
def _exec_default(self, default, type_):
if default.is_sequence:
@@ -935,7 +935,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
del self.current_parameters
else:
self.current_parameters = compiled_parameters = \
- self.compiled_parameters[0]
+ self.compiled_parameters[0]
for c in self.compiled.prefetch:
if self.isinsert:
@@ -949,10 +949,10 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
if self.isinsert:
self.inserted_primary_key = [
- self.compiled_parameters[0].get(key_getter(c), None)
- for c in self.compiled.\
- statement.table.primary_key
- ]
+ self.compiled_parameters[0].get(key_getter(c), None)
+ for c in self.compiled.
+ statement.table.primary_key
+ ]
DefaultDialect.execution_ctx_cls = DefaultExecutionContext
diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py
index e7f43d821..71df29cac 100644
--- a/lib/sqlalchemy/engine/interfaces.py
+++ b/lib/sqlalchemy/engine/interfaces.py
@@ -12,6 +12,7 @@ from .. import util, event
# backwards compat
from ..sql.compiler import Compiled, TypeCompiler
+
class Dialect(object):
"""Define the behavior of a specific database and DB-API combination.
@@ -153,7 +154,6 @@ class Dialect(object):
_has_events = False
-
def create_connect_args(self, url):
"""Build DB-API compatible connection arguments.
@@ -197,7 +197,8 @@ class Dialect(object):
pass
- def reflecttable(self, connection, table, include_columns, exclude_columns):
+ def reflecttable(
+ self, connection, table, include_columns, exclude_columns):
"""Load table description from the database.
Given a :class:`.Connection` and a
@@ -254,7 +255,8 @@ class Dialect(object):
Deprecated. This method is only called by the default
implementation of :meth:`.Dialect.get_pk_constraint`. Dialects should
- instead implement the :meth:`.Dialect.get_pk_constraint` method directly.
+ instead implement the :meth:`.Dialect.get_pk_constraint` method
+ directly.
"""
@@ -346,7 +348,8 @@ class Dialect(object):
raise NotImplementedError()
- def get_unique_constraints(self, connection, table_name, schema=None, **kw):
+ def get_unique_constraints(
+ self, connection, table_name, schema=None, **kw):
"""Return information about unique constraints in `table_name`.
Given a string `table_name` and an optional string `schema`, return
@@ -359,7 +362,8 @@ class Dialect(object):
list of column names in order
\**kw
- other options passed to the dialect's get_unique_constraints() method.
+ other options passed to the dialect's get_unique_constraints()
+ method.
.. versionadded:: 0.9.0
@@ -465,7 +469,6 @@ class Dialect(object):
raise NotImplementedError()
-
def do_commit(self, dbapi_connection):
"""Provide an implementation of ``connection.commit()``, given a
DB-API connection.
@@ -551,7 +554,7 @@ class Dialect(object):
raise NotImplementedError()
def do_rollback_twophase(self, connection, xid, is_prepared=True,
- recover=False):
+ recover=False):
"""Rollback a two phase transaction on the given connection.
:param connection: a :class:`.Connection`.
@@ -565,7 +568,7 @@ class Dialect(object):
raise NotImplementedError()
def do_commit_twophase(self, connection, xid, is_prepared=True,
- recover=False):
+ recover=False):
"""Commit a two phase transaction on the given connection.
@@ -742,7 +745,6 @@ class ExecutionContext(object):
"""
-
def create_cursor(self):
"""Return a new cursor generated from this ExecutionContext's
connection.
@@ -878,12 +880,13 @@ class Connectable(object):
raise NotImplementedError()
def _run_visitor(self, visitorcallable, element,
- **kwargs):
+ **kwargs):
raise NotImplementedError()
def _execute_clauseelement(self, elem, multiparams=None, params=None):
raise NotImplementedError()
+
class ExceptionContext(object):
"""Encapsulate information about an error condition in progress.
diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py
index 2fa5dc494..012d1d35d 100644
--- a/lib/sqlalchemy/engine/reflection.py
+++ b/lib/sqlalchemy/engine/reflection.py
@@ -41,14 +41,14 @@ def cache(fn, self, con, *args, **kw):
if info_cache is None:
return fn(self, con, *args, **kw)
key = (
- fn.__name__,
- tuple(a for a in args if isinstance(a, util.string_types)),
- tuple((k, v) for k, v in kw.items() if
- isinstance(v,
- util.string_types + util.int_types + (float, )
- )
- )
- )
+ fn.__name__,
+ tuple(a for a in args if isinstance(a, util.string_types)),
+ tuple((k, v) for k, v in kw.items() if
+ isinstance(v,
+ util.string_types + util.int_types + (float, )
+ )
+ )
+ )
ret = info_cache.get(key)
if ret is None:
ret = fn(self, con, *args, **kw)
@@ -155,7 +155,7 @@ class Inspector(object):
if hasattr(self.dialect, 'get_schema_names'):
return self.dialect.get_schema_names(self.bind,
- info_cache=self.info_cache)
+ info_cache=self.info_cache)
return []
def get_table_names(self, schema=None, order_by=None):
@@ -188,8 +188,8 @@ class Inspector(object):
"""
if hasattr(self.dialect, 'get_table_names'):
- tnames = self.dialect.get_table_names(self.bind,
- schema, info_cache=self.info_cache)
+ tnames = self.dialect.get_table_names(
+ self.bind, schema, info_cache=self.info_cache)
else:
tnames = self.engine.table_names(schema)
if order_by == 'foreign_key':
@@ -230,7 +230,7 @@ class Inspector(object):
"""
return self.dialect.get_view_names(self.bind, schema,
- info_cache=self.info_cache)
+ info_cache=self.info_cache)
def get_view_definition(self, view_name, schema=None):
"""Return definition for `view_name`.
@@ -293,8 +293,8 @@ class Inspector(object):
"""
return self.dialect.get_pk_constraint(self.bind, table_name, schema,
- info_cache=self.info_cache,
- **kw)['constrained_columns']
+ info_cache=self.info_cache,
+ **kw)['constrained_columns']
def get_pk_constraint(self, table_name, schema=None, **kw):
"""Return information about primary key constraint on `table_name`.
@@ -352,8 +352,8 @@ class Inspector(object):
"""
return self.dialect.get_foreign_keys(self.bind, table_name, schema,
- info_cache=self.info_cache,
- **kw)
+ info_cache=self.info_cache,
+ **kw)
def get_indexes(self, table_name, schema=None, **kw):
"""Return information about indexes in `table_name`.
@@ -380,8 +380,8 @@ class Inspector(object):
"""
return self.dialect.get_indexes(self.bind, table_name,
- schema,
- info_cache=self.info_cache, **kw)
+ schema,
+ info_cache=self.info_cache, **kw)
def get_unique_constraints(self, table_name, schema=None, **kw):
"""Return information about unique constraints in `table_name`.
@@ -446,7 +446,8 @@ class Inspector(object):
)
# reflect table options, like mysql_engine
- tbl_opts = self.get_table_options(table_name, schema, **table.dialect_kwargs)
+ tbl_opts = self.get_table_options(
+ table_name, schema, **table.dialect_kwargs)
if tbl_opts:
# add additional kwargs to the Table if the dialect
# returned them
@@ -461,7 +462,8 @@ class Inspector(object):
found_table = False
cols_by_orig_name = {}
- for col_d in self.get_columns(table_name, schema, **table.dialect_kwargs):
+ for col_d in self.get_columns(
+ table_name, schema, **table.dialect_kwargs):
found_table = True
orig_name = col_d['name']
@@ -503,7 +505,7 @@ class Inspector(object):
colargs.append(sequence)
cols_by_orig_name[orig_name] = col = \
- sa_schema.Column(name, coltype, *colargs, **col_kw)
+ sa_schema.Column(name, coltype, *colargs, **col_kw)
if col.key in table.primary_key:
col.primary_key = True
@@ -512,7 +514,8 @@ class Inspector(object):
if not found_table:
raise exc.NoSuchTableError(table.name)
- pk_cons = self.get_pk_constraint(table_name, schema, **table.dialect_kwargs)
+ pk_cons = self.get_pk_constraint(
+ table_name, schema, **table.dialect_kwargs)
if pk_cons:
pk_cols = [
cols_by_orig_name[pk]
@@ -527,18 +530,19 @@ class Inspector(object):
# its column collection
table.primary_key._reload(pk_cols)
- fkeys = self.get_foreign_keys(table_name, schema, **table.dialect_kwargs)
+ fkeys = self.get_foreign_keys(
+ table_name, schema, **table.dialect_kwargs)
for fkey_d in fkeys:
conname = fkey_d['name']
# look for columns by orig name in cols_by_orig_name,
# but support columns that are in-Python only as fallback
constrained_columns = [
- cols_by_orig_name[c].key
- if c in cols_by_orig_name else c
- for c in fkey_d['constrained_columns']
- ]
+ cols_by_orig_name[c].key
+ if c in cols_by_orig_name else c
+ for c in fkey_d['constrained_columns']
+ ]
if exclude_columns and set(constrained_columns).intersection(
- exclude_columns):
+ exclude_columns):
continue
referred_schema = fkey_d['referred_schema']
referred_table = fkey_d['referred_table']
@@ -576,7 +580,7 @@ class Inspector(object):
unique = index_d['unique']
flavor = index_d.get('type', 'unknown type')
if include_columns and \
- not set(columns).issubset(include_columns):
+ not set(columns).issubset(include_columns):
util.warn(
"Omitting %s KEY for (%s), key covers omitted columns." %
(flavor, ', '.join(columns)))
@@ -584,8 +588,8 @@ class Inspector(object):
# look for columns by orig name in cols_by_orig_name,
# but support columns that are in-Python only as fallback
sa_schema.Index(name, *[
- cols_by_orig_name[c] if c in cols_by_orig_name
- else table.c[c]
- for c in columns
- ],
- **dict(unique=unique))
+ cols_by_orig_name[c] if c in cols_by_orig_name
+ else table.c[c]
+ for c in columns
+ ],
+ **dict(unique=unique))
diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py
index d86322e51..06a81aa6c 100644
--- a/lib/sqlalchemy/engine/result.py
+++ b/lib/sqlalchemy/engine/result.py
@@ -9,7 +9,6 @@
and :class:`.RowProxy."""
-
from .. import exc, util
from ..sql import expression, sqltypes
import collections
@@ -75,7 +74,7 @@ except ImportError:
if isinstance(key, slice):
l = []
for processor, value in zip(self._processors[key],
- self._row[key]):
+ self._row[key]):
if processor is None:
l.append(value)
else:
@@ -85,8 +84,8 @@ except ImportError:
raise
if index is None:
raise exc.InvalidRequestError(
- "Ambiguous column name '%s' in result set! "
- "try 'use_labels' option on select statement." % key)
+ "Ambiguous column name '%s' in result set! "
+ "try 'use_labels' option on select statement." % key)
if processor is not None:
return processor(self._row[index])
else:
@@ -219,15 +218,14 @@ class ResultMetaData(object):
if context.result_map:
try:
- name, obj, type_ = context.result_map[colname
- if self.case_sensitive
- else colname.lower()]
+ name, obj, type_ = context.result_map[
+ colname if self.case_sensitive else colname.lower()]
except KeyError:
name, obj, type_ = \
colname, None, typemap.get(coltype, sqltypes.NULLTYPE)
else:
name, obj, type_ = \
- colname, None, typemap.get(coltype, sqltypes.NULLTYPE)
+ colname, None, typemap.get(coltype, sqltypes.NULLTYPE)
processor = context.get_result_processor(type_, colname, coltype)
@@ -240,9 +238,9 @@ class ResultMetaData(object):
# populate primary keymap, looking for conflicts.
if primary_keymap.setdefault(
- name if self.case_sensitive
- else name.lower(),
- rec) is not rec:
+ name if self.case_sensitive
+ else name.lower(),
+ rec) is not rec:
# place a record that doesn't have the "index" - this
# is interpreted later as an AmbiguousColumnError,
# but only when actually accessed. Columns
@@ -250,8 +248,8 @@ class ResultMetaData(object):
# aren't used; integer access is always
# unambiguous.
primary_keymap[name
- if self.case_sensitive
- else name.lower()] = rec = (None, obj, None)
+ if self.case_sensitive
+ else name.lower()] = rec = (None, obj, None)
self.keys.append(colname)
if obj:
@@ -263,16 +261,15 @@ class ResultMetaData(object):
# keymap[o] = (None, obj, None)
if translate_colname and \
- untranslated:
+ untranslated:
keymap[untranslated] = rec
# overwrite keymap values with those of the
# high precedence keymap.
keymap.update(primary_keymap)
-
@util.pending_deprecation("0.8", "sqlite dialect uses "
- "_translate_colname() now")
+ "_translate_colname() now")
def _set_keymap_synonym(self, name, origname):
"""Set a synonym for the given name.
@@ -282,8 +279,8 @@ class ResultMetaData(object):
"""
rec = (processor, obj, i) = self._keymap[origname if
- self.case_sensitive
- else origname.lower()]
+ self.case_sensitive
+ else origname.lower()]
if self._keymap.setdefault(name, rec) is not rec:
self._keymap[name] = (processor, obj, None)
@@ -298,26 +295,26 @@ class ResultMetaData(object):
# pickle/unpickle roundtrip
elif isinstance(key, expression.ColumnElement):
if key._label and (
- key._label
- if self.case_sensitive
- else key._label.lower()) in map:
+ key._label
+ if self.case_sensitive
+ else key._label.lower()) in map:
result = map[key._label
- if self.case_sensitive
- else key._label.lower()]
+ if self.case_sensitive
+ else key._label.lower()]
elif hasattr(key, 'name') and (
- key.name
- if self.case_sensitive
- else key.name.lower()) in map:
+ key.name
+ if self.case_sensitive
+ else key.name.lower()) in map:
# match is only on name.
result = map[key.name
- if self.case_sensitive
- else key.name.lower()]
+ if self.case_sensitive
+ else key.name.lower()]
# search extra hard to make sure this
# isn't a column/label name overlap.
# this check isn't currently available if the row
# was unpickled.
if result is not None and \
- result[1] is not None:
+ result[1] is not None:
for obj in result[1]:
if key._compare_name_for_result(obj):
break
@@ -327,7 +324,7 @@ class ResultMetaData(object):
if raiseerr:
raise exc.NoSuchColumnError(
"Could not locate column in row for column '%s'" %
- expression._string_or_unprintable(key))
+ expression._string_or_unprintable(key))
else:
return None
else:
@@ -398,7 +395,7 @@ class ResultProxy(object):
self.cursor = self._saved_cursor = context.cursor
self.connection = context.root_connection
self._echo = self.connection._echo and \
- context.engine._should_log_debug()
+ context.engine._should_log_debug()
self._init_metadata()
def _init_metadata(self):
@@ -414,7 +411,7 @@ class ResultProxy(object):
else:
self._metadata = ResultMetaData(self, metadata)
if self._echo:
- self.context.engine.logger.debug(
+ self.context.engine.logger.debug(
"Col %r", tuple(x[0] for x in metadata))
def keys(self):
@@ -469,7 +466,7 @@ class ResultProxy(object):
return self.context.rowcount
except Exception as e:
self.connection._handle_dbapi_exception(
- e, None, None, self.cursor, self.context)
+ e, None, None, self.cursor, self.context)
@property
def lastrowid(self):
@@ -491,8 +488,8 @@ class ResultProxy(object):
return self._saved_cursor.lastrowid
except Exception as e:
self.connection._handle_dbapi_exception(
- e, None, None,
- self._saved_cursor, self.context)
+ e, None, None,
+ self._saved_cursor, self.context)
@property
def returns_rows(self):
@@ -548,7 +545,7 @@ class ResultProxy(object):
self.closed = True
self.connection._safe_close_cursor(self.cursor)
if _autoclose_connection and \
- self.connection.should_close_with_result:
+ self.connection.should_close_with_result:
self.connection.close()
# allow consistent errors
self.cursor = None
@@ -590,17 +587,17 @@ class ResultProxy(object):
if not self.context.compiled:
raise exc.InvalidRequestError(
- "Statement is not a compiled "
- "expression construct.")
+ "Statement is not a compiled "
+ "expression construct.")
elif not self.context.isinsert:
raise exc.InvalidRequestError(
- "Statement is not an insert() "
- "expression construct.")
+ "Statement is not an insert() "
+ "expression construct.")
elif self.context._is_explicit_returning:
raise exc.InvalidRequestError(
- "Can't call inserted_primary_key "
- "when returning() "
- "is used.")
+ "Can't call inserted_primary_key "
+ "when returning() "
+ "is used.")
return self.context.inserted_primary_key
@@ -615,12 +612,12 @@ class ResultProxy(object):
"""
if not self.context.compiled:
raise exc.InvalidRequestError(
- "Statement is not a compiled "
- "expression construct.")
+ "Statement is not a compiled "
+ "expression construct.")
elif not self.context.isupdate:
raise exc.InvalidRequestError(
- "Statement is not an update() "
- "expression construct.")
+ "Statement is not an update() "
+ "expression construct.")
elif self.context.executemany:
return self.context.compiled_parameters
else:
@@ -637,12 +634,12 @@ class ResultProxy(object):
"""
if not self.context.compiled:
raise exc.InvalidRequestError(
- "Statement is not a compiled "
- "expression construct.")
+ "Statement is not a compiled "
+ "expression construct.")
elif not self.context.isinsert:
raise exc.InvalidRequestError(
- "Statement is not an insert() "
- "expression construct.")
+ "Statement is not an insert() "
+ "expression construct.")
elif self.context.executemany:
return self.context.compiled_parameters
else:
@@ -690,12 +687,12 @@ class ResultProxy(object):
if not self.context.compiled:
raise exc.InvalidRequestError(
- "Statement is not a compiled "
- "expression construct.")
+ "Statement is not a compiled "
+ "expression construct.")
elif not self.context.isinsert and not self.context.isupdate:
raise exc.InvalidRequestError(
- "Statement is not an insert() or update() "
- "expression construct.")
+ "Statement is not an insert() or update() "
+ "expression construct.")
return self.context.postfetch_cols
def prefetch_cols(self):
@@ -712,12 +709,12 @@ class ResultProxy(object):
if not self.context.compiled:
raise exc.InvalidRequestError(
- "Statement is not a compiled "
- "expression construct.")
+ "Statement is not a compiled "
+ "expression construct.")
elif not self.context.isinsert and not self.context.isupdate:
raise exc.InvalidRequestError(
- "Statement is not an insert() or update() "
- "expression construct.")
+ "Statement is not an insert() or update() "
+ "expression construct.")
return self.context.prefetch_cols
def supports_sane_rowcount(self):
@@ -762,8 +759,8 @@ class ResultProxy(object):
def _non_result(self):
if self._metadata is None:
raise exc.ResourceClosedError(
- "This result object does not return rows. "
- "It has been closed automatically.",
+ "This result object does not return rows. "
+ "It has been closed automatically.",
)
else:
raise exc.ResourceClosedError("This result object is closed.")
@@ -793,8 +790,8 @@ class ResultProxy(object):
return l
except Exception as e:
self.connection._handle_dbapi_exception(
- e, None, None,
- self.cursor, self.context)
+ e, None, None,
+ self.cursor, self.context)
def fetchmany(self, size=None):
"""Fetch many rows, just like DB-API
@@ -812,8 +809,8 @@ class ResultProxy(object):
return l
except Exception as e:
self.connection._handle_dbapi_exception(
- e, None, None,
- self.cursor, self.context)
+ e, None, None,
+ self.cursor, self.context)
def fetchone(self):
"""Fetch one row, just like DB-API ``cursor.fetchone()``.
@@ -831,8 +828,8 @@ class ResultProxy(object):
return None
except Exception as e:
self.connection._handle_dbapi_exception(
- e, None, None,
- self.cursor, self.context)
+ e, None, None,
+ self.cursor, self.context)
def first(self):
"""Fetch the first row and then close the result set unconditionally.
@@ -847,8 +844,8 @@ class ResultProxy(object):
row = self._fetchone_impl()
except Exception as e:
self.connection._handle_dbapi_exception(
- e, None, None,
- self.cursor, self.context)
+ e, None, None,
+ self.cursor, self.context)
try:
if row is not None:
@@ -945,6 +942,7 @@ class FullyBufferedResultProxy(ResultProxy):
such as MSSQL INSERT...OUTPUT after an autocommit.
"""
+
def _init_metadata(self):
super(FullyBufferedResultProxy, self)._init_metadata()
self.__rowbuffer = self._buffer_rows()
diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py
index 23d24e979..38206be89 100644
--- a/lib/sqlalchemy/engine/strategies.py
+++ b/lib/sqlalchemy/engine/strategies.py
@@ -91,8 +91,8 @@ class DefaultEngineStrategy(EngineStrategy):
except dialect.dbapi.Error as e:
invalidated = dialect.is_disconnect(e, None, None)
util.raise_from_cause(
- exc.DBAPIError.instance(None, None,
- e, dialect.dbapi.Error,
+ exc.DBAPIError.instance(
+ None, None, e, dialect.dbapi.Error,
connection_invalidated=invalidated
)
)
@@ -161,7 +161,7 @@ class DefaultEngineStrategy(EngineStrategy):
def first_connect(dbapi_connection, connection_record):
c = base.Connection(engine, connection=dbapi_connection,
- _has_events=False)
+ _has_events=False)
dialect.initialize(c)
event.listen(pool, 'first_connect', first_connect, once=True)
@@ -246,11 +246,11 @@ class MockEngineStrategy(EngineStrategy):
self.dialect, self, **kwargs).traverse_single(entity)
def _run_visitor(self, visitorcallable, element,
- connection=None,
- **kwargs):
+ connection=None,
+ **kwargs):
kwargs['checkfirst'] = False
visitorcallable(self.dialect, self,
- **kwargs).traverse_single(element)
+ **kwargs).traverse_single(element)
def execute(self, object, *multiparams, **params):
raise NotImplementedError()
diff --git a/lib/sqlalchemy/engine/threadlocal.py b/lib/sqlalchemy/engine/threadlocal.py
index eb923e96b..637523a0e 100644
--- a/lib/sqlalchemy/engine/threadlocal.py
+++ b/lib/sqlalchemy/engine/threadlocal.py
@@ -95,20 +95,20 @@ class TLEngine(base.Engine):
def prepare(self):
if not hasattr(self._connections, 'trans') or \
- not self._connections.trans:
+ not self._connections.trans:
return
self._connections.trans[-1].prepare()
def commit(self):
if not hasattr(self._connections, 'trans') or \
- not self._connections.trans:
+ not self._connections.trans:
return
trans = self._connections.trans.pop(-1)
trans.commit()
def rollback(self):
if not hasattr(self._connections, 'trans') or \
- not self._connections.trans:
+ not self._connections.trans:
return
trans = self._connections.trans.pop(-1)
trans.rollback()
@@ -120,8 +120,8 @@ class TLEngine(base.Engine):
@property
def closed(self):
return not hasattr(self._connections, 'conn') or \
- self._connections.conn() is None or \
- self._connections.conn().closed
+ self._connections.conn() is None or \
+ self._connections.conn().closed
def close(self):
if not self.closed:
diff --git a/lib/sqlalchemy/engine/url.py b/lib/sqlalchemy/engine/url.py
index 7d61968b9..e3629613f 100644
--- a/lib/sqlalchemy/engine/url.py
+++ b/lib/sqlalchemy/engine/url.py
@@ -25,8 +25,8 @@ class URL(object):
Represent the components of a URL used to connect to a database.
This object is suitable to be passed directly to a
- :func:`~sqlalchemy.create_engine` call. The fields of the URL are parsed from a
- string by the :func:`.make_url` function. the string
+ :func:`~sqlalchemy.create_engine` call. The fields of the URL are parsed
+ from a string by the :func:`.make_url` function. the string
format of the URL is an RFC-1738-style string.
All initialization parameters are available as public attributes.
@@ -119,8 +119,8 @@ class URL(object):
# would return a module with 'dialect' as the
# actual class
if hasattr(cls, 'dialect') and \
- isinstance(cls.dialect, type) and \
- issubclass(cls.dialect, Dialect):
+ isinstance(cls.dialect, type) and \
+ issubclass(cls.dialect, Dialect):
return cls.dialect
else:
return cls
@@ -189,7 +189,8 @@ def _parse_rfc1738_args(name):
if components['database'] is not None:
tokens = components['database'].split('?', 2)
components['database'] = tokens[0]
- query = (len(tokens) > 1 and dict(util.parse_qsl(tokens[1]))) or None
+ query = (
+ len(tokens) > 1 and dict(util.parse_qsl(tokens[1]))) or None
if util.py2k and query is not None:
query = dict((k.encode('ascii'), query[k]) for k in query)
else:
@@ -215,9 +216,11 @@ def _parse_rfc1738_args(name):
def _rfc_1738_quote(text):
return re.sub(r'[:@/]', lambda m: "%%%X" % ord(m.group(0)), text)
+
def _rfc_1738_unquote(text):
return util.unquote(text)
+
def _parse_keyvalue_args(name):
m = re.match(r'(\w+)://(.*)', name)
if m is not None:
diff --git a/lib/sqlalchemy/engine/util.py b/lib/sqlalchemy/engine/util.py
index 6a6fe6ee1..d9eb1df10 100644
--- a/lib/sqlalchemy/engine/util.py
+++ b/lib/sqlalchemy/engine/util.py
@@ -7,6 +7,7 @@
from .. import util
+
def connection_memoize(key):
"""Decorator, memoize a function in a connection.info stash.
@@ -61,7 +62,7 @@ def py_fallback():
return [[zero]]
else:
if hasattr(multiparams[0], '__iter__') and \
- not hasattr(multiparams[0], 'strip'):
+ not hasattr(multiparams[0], 'strip'):
return multiparams
else:
return [multiparams]