summaryrefslogtreecommitdiff
path: root/lib/sqlalchemy/engine/base.py
diff options
context:
space:
mode:
Diffstat (limited to 'lib/sqlalchemy/engine/base.py')
-rw-r--r--lib/sqlalchemy/engine/base.py123
1 files changed, 61 insertions, 62 deletions
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index bd0f15ecd..b810f5f67 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -305,9 +305,6 @@ class ExecutionContext(object):
should_autocommit
True if the statement is a "committable" statement
- returns_rows
- True if the statement should return result rows
-
postfetch_cols
a list of Column objects for which a server-side default
or inline SQL expression value was fired off. applies to inserts and updates.
@@ -835,7 +832,7 @@ class Connection(Connectable):
context = self.__create_execution_context(statement=statement, parameters=parameters)
self.__execute_raw(context)
self._autocommit(context)
- return context.result()
+ return context.get_result_proxy()
def __distill_params(self, multiparams, params):
"""given arguments from the calling form *multiparams, **params, return a list
@@ -889,7 +886,7 @@ class Connection(Connectable):
self.__execute_raw(context)
context.post_execution()
self._autocommit(context)
- return context.result()
+ return context.get_result_proxy()
def __execute_raw(self, context):
if context.executemany:
@@ -1296,7 +1293,7 @@ class RowProxy(object):
self.__parent = parent
self.__row = row
- if self.__parent._ResultProxy__echo:
+ if self.__parent._echo:
self.__parent.context.engine.logger.debug("Row " + repr(row))
def close(self):
@@ -1392,11 +1389,8 @@ class ResultProxy(object):
self.closed = False
self.cursor = context.cursor
self.connection = context.root_connection
- self.__echo = context.engine._should_log_info
- if context.returns_rows:
- self._init_metadata()
- else:
- self.close()
+ self._echo = context.engine._should_log_info
+ self._init_metadata()
@property
def rowcount(self):
@@ -1411,51 +1405,55 @@ class ResultProxy(object):
return self.context.out_parameters
def _init_metadata(self):
+ metadata = self.cursor.description
+ if metadata is None:
+ # no results, close
+ self.close()
+ return
+
self._props = util.PopulateDict(None)
self._props.creator = self.__key_fallback()
self.keys = []
- metadata = self.cursor.description
- if metadata is not None:
- typemap = self.dialect.dbapi_type_map
+ typemap = self.dialect.dbapi_type_map
- for i, item in enumerate(metadata):
- colname = item[0].decode(self.dialect.encoding)
+ for i, item in enumerate(metadata):
+ colname = item[0].decode(self.dialect.encoding)
- if '.' in colname:
- # sqlite will in some circumstances prepend table name to colnames, so strip
- origname = colname
- colname = colname.split('.')[-1]
- else:
- origname = None
+ if '.' in colname:
+ # sqlite will in some circumstances prepend table name to colnames, so strip
+ origname = colname
+ colname = colname.split('.')[-1]
+ else:
+ origname = None
- if self.context.result_map:
- try:
- (name, obj, type_) = self.context.result_map[colname.lower()]
- except KeyError:
- (name, obj, type_) = (colname, None, typemap.get(item[1], types.NULLTYPE))
- else:
+ if self.context.result_map:
+ try:
+ (name, obj, type_) = self.context.result_map[colname.lower()]
+ except KeyError:
(name, obj, type_) = (colname, None, typemap.get(item[1], types.NULLTYPE))
+ else:
+ (name, obj, type_) = (colname, None, typemap.get(item[1], types.NULLTYPE))
- rec = (type_, type_.dialect_impl(self.dialect).result_processor(self.dialect), i)
+ rec = (type_, type_.dialect_impl(self.dialect).result_processor(self.dialect), i)
- if self._props.setdefault(name.lower(), rec) is not rec:
- self._props[name.lower()] = (type_, self.__ambiguous_processor(name), 0)
+ if self._props.setdefault(name.lower(), rec) is not rec:
+ self._props[name.lower()] = (type_, self.__ambiguous_processor(name), 0)
- # store the "origname" if we truncated (sqlite only)
- if origname:
- if self._props.setdefault(origname.lower(), rec) is not rec:
- self._props[origname.lower()] = (type_, self.__ambiguous_processor(origname), 0)
+ # store the "origname" if we truncated (sqlite only)
+ if origname:
+ if self._props.setdefault(origname.lower(), rec) is not rec:
+ self._props[origname.lower()] = (type_, self.__ambiguous_processor(origname), 0)
- self.keys.append(colname)
- self._props[i] = rec
- if obj:
- for o in obj:
- self._props[o] = rec
+ self.keys.append(colname)
+ self._props[i] = rec
+ if obj:
+ for o in obj:
+ self._props[o] = rec
- if self.__echo:
- self.context.engine.logger.debug(
- "Col " + repr(tuple(x[0] for x in metadata)))
+ if self._echo:
+ self.context.engine.logger.debug(
+ "Col " + repr(tuple(x[0] for x in metadata)))
def __key_fallback(self):
# create a closure without 'self' to avoid circular references
@@ -1481,18 +1479,24 @@ class ResultProxy(object):
def __ambiguous_processor(self, colname):
def process(value):
- raise exc.InvalidRequestError("Ambiguous column name '%s' in result set! try 'use_labels' option on select statement." % colname)
+ raise exc.InvalidRequestError("Ambiguous column name '%s' in result set! "
+ "try 'use_labels' option on select statement." % colname)
return process
def close(self):
- """Close this ResultProxy, and the underlying DB-API cursor corresponding to the execution.
+ """Close this ResultProxy.
+
+ Closes the underlying DBAPI cursor corresponding to the execution.
If this ResultProxy was generated from an implicit execution,
the underlying Connection will also be closed (returns the
- underlying DB-API connection to the connection pool.)
+ underlying DBAPI connection to the connection pool.)
- This method is also called automatically when all result rows
- are exhausted.
+ This method is called automatically when:
+
+ * all result rows are exhausted using the fetchXXX() methods.
+ * cursor.description is None.
+
"""
if not self.closed:
self.closed = True
@@ -1663,7 +1667,9 @@ class BufferedRowResultProxy(ResultProxy):
The pre-fetching behavior fetches only one row initially, and then
grows its buffer size by a fixed amount with each successive need
for additional rows up to a size of 100.
+
"""
+
def _init_metadata(self):
self.__buffer_rows()
super(BufferedRowResultProxy, self)._init_metadata()
@@ -1716,7 +1722,9 @@ class BufferedColumnResultProxy(ResultProxy):
of scope unless explicitly fetched. Currently this includes just
cx_Oracle LOB objects, but this behavior is known to exist in
other DB-APIs as well (Pygresql, currently unsupported).
+
"""
+
_process_row = BufferedColumnRow
def _get_col(self, row, key):
@@ -1757,8 +1765,8 @@ class SchemaIterator(schema.SchemaVisitor):
"""A visitor that can gather text into a buffer and execute the contents of the buffer."""
def __init__(self, connection):
- """Construct a new SchemaIterator.
- """
+ """Construct a new SchemaIterator."""
+
self.connection = connection
self.buffer = StringIO.StringIO()
@@ -1783,6 +1791,7 @@ class DefaultRunner(schema.SchemaVisitor):
DefaultRunners are used internally by Engines and Dialects.
Specific database modules should provide their own subclasses of
DefaultRunner to allow database-specific behavior.
+
"""
def __init__(self, context):
@@ -1803,19 +1812,9 @@ class DefaultRunner(schema.SchemaVisitor):
return None
def visit_passive_default(self, default):
- """Do nothing.
-
- Passive defaults by definition return None on the app side,
- and are post-fetched to get the DB-side value.
- """
-
return None
def visit_sequence(self, seq):
- """Do nothing.
-
- """
-
return None
def exec_default_sql(self, default):
@@ -1824,8 +1823,8 @@ class DefaultRunner(schema.SchemaVisitor):
return conn._execute_compiled(c).scalar()
def execute_string(self, stmt, params=None):
- """execute a string statement, using the raw cursor,
- and return a scalar result."""
+ """execute a string statement, using the raw cursor, and return a scalar result."""
+
conn = self.context._connection
if isinstance(stmt, unicode) and not self.dialect.supports_unicode_statements:
stmt = stmt.encode(self.dialect.encoding)