summaryrefslogtreecommitdiff
path: root/lib/sqlalchemy/engine/result.py
diff options
context:
space:
mode:
Diffstat (limited to 'lib/sqlalchemy/engine/result.py')
-rw-r--r--lib/sqlalchemy/engine/result.py395
1 files changed, 287 insertions, 108 deletions
diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py
index 3995942ef..6d19cb6d0 100644
--- a/lib/sqlalchemy/engine/result.py
+++ b/lib/sqlalchemy/engine/result.py
@@ -1,5 +1,5 @@
# engine/result.py
-# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -187,86 +187,162 @@ class ResultMetaData(object):
context."""
def __init__(self, parent, metadata):
- self._processors = processors = []
-
- # We do not strictly need to store the processor in the key mapping,
- # though it is faster in the Python version (probably because of the
- # saved attribute lookup self._processors)
- self._keymap = keymap = {}
- self.keys = []
context = parent.context
dialect = context.dialect
typemap = dialect.dbapi_type_map
translate_colname = context._translate_colname
- self.case_sensitive = dialect.case_sensitive
-
- # high precedence key values.
- primary_keymap = {}
-
- for i, rec in enumerate(metadata):
- colname = rec[0]
- coltype = rec[1]
-
- if dialect.description_encoding:
- colname = dialect._description_decoder(colname)
+ self.case_sensitive = case_sensitive = dialect.case_sensitive
+ if context.result_column_struct:
+ result_columns, cols_are_ordered = context.result_column_struct
+ num_ctx_cols = len(result_columns)
+ else:
+ num_ctx_cols = None
+
+ if num_ctx_cols and \
+ cols_are_ordered and \
+ num_ctx_cols == len(metadata):
+ # case 1 - SQL expression statement, number of columns
+ # in result matches number of cols in compiled. This is the
+ # vast majority case for SQL expression constructs. In this
+ # case we don't bother trying to parse or match up to
+ # the colnames in the result description.
+ raw = [
+ (
+ idx,
+ key,
+ name.lower() if not case_sensitive else name,
+ context.get_result_processor(
+ type_, key, metadata[idx][1]
+ ),
+ obj,
+ None
+ ) for idx, (key, name, obj, type_)
+ in enumerate(result_columns)
+ ]
+ self.keys = [
+ elem[1] for elem in result_columns
+ ]
+ else:
+ # case 2 - raw string, or number of columns in result does
+ # not match number of cols in compiled. The raw string case
+ # is very common. The latter can happen
+ # when text() is used with only a partial typemap, or
+ # in the extremely unlikely cases where the compiled construct
+ # has a single element with multiple col expressions in it
+ # (e.g. has commas embedded) or there's some kind of statement
+ # that is adding extra columns.
+ # In all these cases we fall back to the "named" approach
+ # that SQLAlchemy has used up through 0.9.
+
+ if num_ctx_cols:
+ result_map = self._create_result_map(result_columns)
+
+ raw = []
+ self.keys = []
+ untranslated = None
+ for idx, rec in enumerate(metadata):
+ colname = rec[0]
+ coltype = rec[1]
+
+ if dialect.description_encoding:
+ colname = dialect._description_decoder(colname)
+
+ if translate_colname:
+ colname, untranslated = translate_colname(colname)
+
+ if dialect.requires_name_normalize:
+ colname = dialect.normalize_name(colname)
+
+ self.keys.append(colname)
+ if not case_sensitive:
+ colname = colname.lower()
+
+ if num_ctx_cols:
+ try:
+ ctx_rec = result_map[colname]
+ except KeyError:
+ mapped_type = typemap.get(coltype, sqltypes.NULLTYPE)
+ obj = None
+ else:
+ obj = ctx_rec[1]
+ mapped_type = ctx_rec[2]
+ else:
+ mapped_type = typemap.get(coltype, sqltypes.NULLTYPE)
+ obj = None
+ processor = context.get_result_processor(
+ mapped_type, colname, coltype)
+
+ raw.append(
+ (idx, colname, colname, processor, obj, untranslated)
+ )
+
+ # keymap indexes by integer index...
+ self._keymap = dict([
+ (elem[0], (elem[3], elem[4], elem[0]))
+ for elem in raw
+ ])
+
+ # processors in key order for certain per-row
+ # views like __iter__ and slices
+ self._processors = [elem[3] for elem in raw]
+
+ if num_ctx_cols:
+ # keymap by primary string...
+ by_key = dict([
+ (elem[2], (elem[3], elem[4], elem[0]))
+ for elem in raw
+ ])
+
+ # if by-primary-string dictionary smaller (or bigger?!) than
+ # number of columns, assume we have dupes, rewrite
+ # dupe records with "None" for index which results in
+ # ambiguous column exception when accessed.
+ if len(by_key) != num_ctx_cols:
+ seen = set()
+ for rec in raw:
+ key = rec[1]
+ if key in seen:
+ by_key[key] = (None, by_key[key][1], None)
+ seen.add(key)
+
+ # update keymap with secondary "object"-based keys
+ self._keymap.update([
+ (obj_elem, by_key[elem[2]])
+ for elem in raw if elem[4]
+ for obj_elem in elem[4]
+ ])
+
+ # update keymap with primary string names taking
+ # precedence
+ self._keymap.update(by_key)
+ else:
+ self._keymap.update([
+ (elem[2], (elem[3], elem[4], elem[0]))
+ for elem in raw
+ ])
+ # update keymap with "translated" names (sqlite-only thing)
if translate_colname:
- colname, untranslated = translate_colname(colname)
-
- if dialect.requires_name_normalize:
- colname = dialect.normalize_name(colname)
-
- if context.result_map:
- try:
- name, obj, type_ = context.result_map[
- colname if self.case_sensitive else colname.lower()]
- except KeyError:
- name, obj, type_ = \
- colname, None, typemap.get(coltype, sqltypes.NULLTYPE)
+ self._keymap.update([
+ (elem[5], self._keymap[elem[2]])
+ for elem in raw if elem[5]
+ ])
+
+ @classmethod
+ def _create_result_map(cls, result_columns):
+ d = {}
+ for elem in result_columns:
+ key, rec = elem[0], elem[1:]
+ if key in d:
+ # conflicting keyname, just double up the list
+ # of objects. this will cause an "ambiguous name"
+ # error if an attempt is made by the result set to
+ # access.
+ e_name, e_obj, e_type = d[key]
+ d[key] = e_name, e_obj + rec[1], e_type
else:
- name, obj, type_ = \
- colname, None, typemap.get(coltype, sqltypes.NULLTYPE)
-
- processor = context.get_result_processor(type_, colname, coltype)
-
- processors.append(processor)
- rec = (processor, obj, i)
-
- # indexes as keys. This is only needed for the Python version of
- # RowProxy (the C version uses a faster path for integer indexes).
- primary_keymap[i] = rec
-
- # populate primary keymap, looking for conflicts.
- if primary_keymap.setdefault(
- name if self.case_sensitive
- else name.lower(),
- rec) is not rec:
- # place a record that doesn't have the "index" - this
- # is interpreted later as an AmbiguousColumnError,
- # but only when actually accessed. Columns
- # colliding by name is not a problem if those names
- # aren't used; integer access is always
- # unambiguous.
- primary_keymap[name
- if self.case_sensitive
- else name.lower()] = rec = (None, obj, None)
-
- self.keys.append(colname)
- if obj:
- for o in obj:
- keymap[o] = rec
- # technically we should be doing this but we
- # are saving on callcounts by not doing so.
- # if keymap.setdefault(o, rec) is not rec:
- # keymap[o] = (None, obj, None)
-
- if translate_colname and \
- untranslated:
- keymap[untranslated] = rec
-
- # overwrite keymap values with those of the
- # high precedence keymap.
- keymap.update(primary_keymap)
+ d[key] = rec
+ return d
@util.pending_deprecation("0.8", "sqlite dialect uses "
"_translate_colname() now")
@@ -403,11 +479,12 @@ class ResultProxy(object):
out_parameters = None
_can_close_connection = False
_metadata = None
+ _soft_closed = False
+ closed = False
def __init__(self, context):
self.context = context
self.dialect = context.dialect
- self.closed = False
self.cursor = self._saved_cursor = context.cursor
self.connection = context.root_connection
self._echo = self.connection._echo and \
@@ -544,33 +621,79 @@ class ResultProxy(object):
return self._saved_cursor.description
- def close(self, _autoclose_connection=True):
- """Close this ResultProxy.
-
- Closes the underlying DBAPI cursor corresponding to the execution.
+ def _soft_close(self, _autoclose_connection=True):
+ """Soft close this :class:`.ResultProxy`.
- Note that any data cached within this ResultProxy is still available.
- For some types of results, this may include buffered rows.
-
- If this ResultProxy was generated from an implicit execution,
- the underlying Connection will also be closed (returns the
- underlying DBAPI connection to the connection pool.)
+ This releases all DBAPI cursor resources, but leaves the
+ ResultProxy "open" from a semantic perspective, meaning the
+ fetchXXX() methods will continue to return empty results.
This method is called automatically when:
* all result rows are exhausted using the fetchXXX() methods.
* cursor.description is None.
+ This method is **not public**, but is documented in order to clarify
+ the "autoclose" process used.
+
+ .. versionadded:: 1.0.0
+
+ .. seealso::
+
+ :meth:`.ResultProxy.close`
+
+
+ """
+ if self._soft_closed:
+ return
+ self._soft_closed = True
+ cursor = self.cursor
+ self.connection._safe_close_cursor(cursor)
+ if _autoclose_connection and \
+ self.connection.should_close_with_result:
+ self.connection.close()
+ self.cursor = None
+
+ def close(self):
+ """Close this ResultProxy.
+
+ This closes out the underlying DBAPI cursor corresonding
+ to the statement execution, if one is stil present. Note that the
+ DBAPI cursor is automatically released when the :class:`.ResultProxy`
+ exhausts all available rows. :meth:`.ResultProxy.close` is generally
+ an optional method except in the case when discarding a
+ :class:`.ResultProxy` that still has additional rows pending for fetch.
+
+ In the case of a result that is the product of
+ :ref:`connectionless execution <dbengine_implicit>`,
+ the underyling :class:`.Connection` object is also closed, which
+ :term:`releases` DBAPI connection resources.
+
+ After this method is called, it is no longer valid to call upon
+ the fetch methods, which will raise a :class:`.ResourceClosedError`
+ on subsequent use.
+
+ .. versionchanged:: 1.0.0 - the :meth:`.ResultProxy.close` method
+ has been separated out from the process that releases the underlying
+ DBAPI cursor resource. The "auto close" feature of the
+ :class:`.Connection` now performs a so-called "soft close", which
+ releases the underlying DBAPI cursor, but allows the
+ :class:`.ResultProxy` to still behave as an open-but-exhausted
+ result set; the actual :meth:`.ResultProxy.close` method is never
+ called. It is still safe to discard a :class:`.ResultProxy`
+ that has been fully exhausted without calling this method.
+
+ .. seealso::
+
+ :ref:`connections_toplevel`
+
+ :meth:`.ResultProxy._soft_close`
+
"""
if not self.closed:
+ self._soft_close()
self.closed = True
- self.connection._safe_close_cursor(self.cursor)
- if _autoclose_connection and \
- self.connection.should_close_with_result:
- self.connection.close()
- # allow consistent errors
- self.cursor = None
def __iter__(self):
while True:
@@ -761,7 +884,7 @@ class ResultProxy(object):
try:
return self.cursor.fetchone()
except AttributeError:
- self._non_result()
+ return self._non_result(None)
def _fetchmany_impl(self, size=None):
try:
@@ -770,22 +893,24 @@ class ResultProxy(object):
else:
return self.cursor.fetchmany(size)
except AttributeError:
- self._non_result()
+ return self._non_result([])
def _fetchall_impl(self):
try:
return self.cursor.fetchall()
except AttributeError:
- self._non_result()
+ return self._non_result([])
- def _non_result(self):
+ def _non_result(self, default):
if self._metadata is None:
raise exc.ResourceClosedError(
"This result object does not return rows. "
"It has been closed automatically.",
)
- else:
+ elif self.closed:
raise exc.ResourceClosedError("This result object is closed.")
+ else:
+ return default
def process_rows(self, rows):
process_row = self._process_row
@@ -804,11 +929,25 @@ class ResultProxy(object):
for row in rows]
def fetchall(self):
- """Fetch all rows, just like DB-API ``cursor.fetchall()``."""
+ """Fetch all rows, just like DB-API ``cursor.fetchall()``.
+
+ After all rows have been exhausted, the underlying DBAPI
+ cursor resource is released, and the object may be safely
+ discarded.
+
+ Subsequent calls to :meth:`.ResultProxy.fetchall` will return
+ an empty list. After the :meth:`.ResultProxy.close` method is
+ called, the method will raise :class:`.ResourceClosedError`.
+
+ .. versionchanged:: 1.0.0 - Added "soft close" behavior which
+ allows the result to be used in an "exhausted" state prior to
+ calling the :meth:`.ResultProxy.close` method.
+
+ """
try:
l = self.process_rows(self._fetchall_impl())
- self.close()
+ self._soft_close()
return l
except Exception as e:
self.connection._handle_dbapi_exception(
@@ -819,15 +958,25 @@ class ResultProxy(object):
"""Fetch many rows, just like DB-API
``cursor.fetchmany(size=cursor.arraysize)``.
- If rows are present, the cursor remains open after this is called.
- Else the cursor is automatically closed and an empty list is returned.
+ After all rows have been exhausted, the underlying DBAPI
+ cursor resource is released, and the object may be safely
+ discarded.
+
+ Calls to :meth:`.ResultProxy.fetchmany` after all rows have been
+ exhuasted will return
+ an empty list. After the :meth:`.ResultProxy.close` method is
+ called, the method will raise :class:`.ResourceClosedError`.
+
+ .. versionchanged:: 1.0.0 - Added "soft close" behavior which
+ allows the result to be used in an "exhausted" state prior to
+ calling the :meth:`.ResultProxy.close` method.
"""
try:
l = self.process_rows(self._fetchmany_impl(size))
if len(l) == 0:
- self.close()
+ self._soft_close()
return l
except Exception as e:
self.connection._handle_dbapi_exception(
@@ -837,8 +986,18 @@ class ResultProxy(object):
def fetchone(self):
"""Fetch one row, just like DB-API ``cursor.fetchone()``.
- If a row is present, the cursor remains open after this is called.
- Else the cursor is automatically closed and None is returned.
+ After all rows have been exhausted, the underlying DBAPI
+ cursor resource is released, and the object may be safely
+ discarded.
+
+ Calls to :meth:`.ResultProxy.fetchone` after all rows have
+ been exhausted will return ``None``.
+ After the :meth:`.ResultProxy.close` method is
+ called, the method will raise :class:`.ResourceClosedError`.
+
+ .. versionchanged:: 1.0.0 - Added "soft close" behavior which
+ allows the result to be used in an "exhausted" state prior to
+ calling the :meth:`.ResultProxy.close` method.
"""
try:
@@ -846,7 +1005,7 @@ class ResultProxy(object):
if row is not None:
return self.process_rows([row])[0]
else:
- self.close()
+ self._soft_close()
return None
except Exception as e:
self.connection._handle_dbapi_exception(
@@ -858,9 +1017,12 @@ class ResultProxy(object):
Returns None if no row is present.
+ After calling this method, the object is fully closed,
+ e.g. the :meth:`.ResultProxy.close` method will have been called.
+
"""
if self._metadata is None:
- self._non_result()
+ return self._non_result(None)
try:
row = self._fetchone_impl()
@@ -882,6 +1044,9 @@ class ResultProxy(object):
Returns None if no row is present.
+ After calling this method, the object is fully closed,
+ e.g. the :meth:`.ResultProxy.close` method will have been called.
+
"""
row = self.first()
if row is not None:
@@ -925,13 +1090,19 @@ class BufferedRowResultProxy(ResultProxy):
}
def __buffer_rows(self):
+ if self.cursor is None:
+ return
size = getattr(self, '_bufsize', 1)
self.__rowbuffer = collections.deque(self.cursor.fetchmany(size))
self._bufsize = self.size_growth.get(size, size)
+ def _soft_close(self, **kw):
+ self.__rowbuffer.clear()
+ super(BufferedRowResultProxy, self)._soft_close(**kw)
+
def _fetchone_impl(self):
- if self.closed:
- return None
+ if self.cursor is None:
+ return self._non_result(None)
if not self.__rowbuffer:
self.__buffer_rows()
if not self.__rowbuffer:
@@ -950,6 +1121,8 @@ class BufferedRowResultProxy(ResultProxy):
return result
def _fetchall_impl(self):
+ if self.cursor is None:
+ return self._non_result([])
self.__rowbuffer.extend(self.cursor.fetchall())
ret = self.__rowbuffer
self.__rowbuffer = collections.deque()
@@ -972,11 +1145,15 @@ class FullyBufferedResultProxy(ResultProxy):
def _buffer_rows(self):
return collections.deque(self.cursor.fetchall())
+ def _soft_close(self, **kw):
+ self.__rowbuffer.clear()
+ super(FullyBufferedResultProxy, self)._soft_close(**kw)
+
def _fetchone_impl(self):
if self.__rowbuffer:
return self.__rowbuffer.popleft()
else:
- return None
+ return self._non_result(None)
def _fetchmany_impl(self, size=None):
if size is None:
@@ -990,6 +1167,8 @@ class FullyBufferedResultProxy(ResultProxy):
return result
def _fetchall_impl(self):
+ if not self.cursor:
+ return self._non_result([])
ret = self.__rowbuffer
self.__rowbuffer = collections.deque()
return ret