summaryrefslogtreecommitdiff
path: root/lib/sqlalchemy/engine/cursor.py
diff options
context:
space:
mode:
authorJ. Nick Koston <nick@koston.org>2023-04-19 18:39:18 -0400
committerFederico Caselli <cfederico87@gmail.com>2023-04-26 20:19:17 +0200
commitff198e35f0e04b8d38df25df234e72259069b4d1 (patch)
treec48db9a0366b48c8caaa35ad9ab83a354aaa7d32 /lib/sqlalchemy/engine/cursor.py
parent9f675fd042b05977f1b38887c2fbbb54ecd424f7 (diff)
downloadsqlalchemy-ff198e35f0e04b8d38df25df234e72259069b4d1.tar.gz
Prebuild the row string to position lookup for Rows
Improved :class:`_engine.Row` implementation to optimize ``__getattr__`` performance. The serialization of a :class:`_engine.Row` to pickle has changed with this change. Pickle saved by older SQLAlchemy versions can still be loaded, but new pickle saved by this version cannot be loaded by older ones. Fixes: #9678 Closes: #9668 Pull-request: https://github.com/sqlalchemy/sqlalchemy/pull/9668 Pull-request-sha: 86b8ccd1959dbd91b1208f7a648a91f217e1f866 Change-Id: Ia85c26a59e1a57ba2bf0d65578c6168f82a559f2
Diffstat (limited to 'lib/sqlalchemy/engine/cursor.py')
-rw-r--r--lib/sqlalchemy/engine/cursor.py86
1 files changed, 40 insertions, 46 deletions
diff --git a/lib/sqlalchemy/engine/cursor.py b/lib/sqlalchemy/engine/cursor.py
index aaf2c1918..bd46f30ac 100644
--- a/lib/sqlalchemy/engine/cursor.py
+++ b/lib/sqlalchemy/engine/cursor.py
@@ -21,9 +21,9 @@ from typing import ClassVar
from typing import Dict
from typing import Iterator
from typing import List
+from typing import Mapping
from typing import NoReturn
from typing import Optional
-from typing import overload
from typing import Sequence
from typing import Tuple
from typing import TYPE_CHECKING
@@ -123,7 +123,7 @@ _CursorKeyMapRecType = Tuple[
Optional[str], # MD_UNTRANSLATED
]
-_CursorKeyMapType = Dict["_KeyType", _CursorKeyMapRecType]
+_CursorKeyMapType = Mapping["_KeyType", _CursorKeyMapRecType]
# same as _CursorKeyMapRecType except the MD_INDEX value is definitely
# not None
@@ -149,7 +149,8 @@ class CursorResultMetaData(ResultMetaData):
"_tuplefilter",
"_translated_indexes",
"_safe_for_cache",
- "_unpickled"
+ "_unpickled",
+ "_key_to_index"
# don't need _unique_filters support here for now. Can be added
# if a need arises.
)
@@ -193,6 +194,7 @@ class CursorResultMetaData(ResultMetaData):
new_obj._translated_indexes = translated_indexes
new_obj._safe_for_cache = safe_for_cache
new_obj._keymap_by_result_column_idx = keymap_by_result_column_idx
+ new_obj._key_to_index = self._make_key_to_index(keymap, MD_INDEX)
return new_obj
def _remove_processors(self) -> CursorResultMetaData:
@@ -217,7 +219,7 @@ class CursorResultMetaData(ResultMetaData):
assert not self._tuplefilter
- keymap = self._keymap.copy()
+ keymap = dict(self._keymap)
offset = len(self._keys)
keymap.update(
{
@@ -232,7 +234,6 @@ class CursorResultMetaData(ResultMetaData):
for key, value in other._keymap.items()
}
)
-
return self._make_new_metadata(
unpickled=self._unpickled,
processors=self._processors + other._processors, # type: ignore
@@ -258,7 +259,7 @@ class CursorResultMetaData(ResultMetaData):
tup = tuplegetter(*indexes)
new_recs = [(index,) + rec[1:] for index, rec in enumerate(recs)]
- keymap: _KeyMapType = {rec[MD_LOOKUP_KEY]: rec for rec in new_recs}
+ keymap = {rec[MD_LOOKUP_KEY]: rec for rec in new_recs}
# TODO: need unit test for:
# result = connection.execute("raw sql, no columns").scalars()
# without the "or ()" it's failing because MD_OBJECTS is None
@@ -274,7 +275,7 @@ class CursorResultMetaData(ResultMetaData):
keys=new_keys,
tuplefilter=tup,
translated_indexes=indexes,
- keymap=keymap,
+ keymap=keymap, # type: ignore[arg-type]
safe_for_cache=self._safe_for_cache,
keymap_by_result_column_idx=self._keymap_by_result_column_idx,
)
@@ -491,6 +492,8 @@ class CursorResultMetaData(ResultMetaData):
}
)
+ self._key_to_index = self._make_key_to_index(self._keymap, MD_INDEX)
+
def _merge_cursor_description(
self,
context,
@@ -807,41 +810,25 @@ class CursorResultMetaData(ResultMetaData):
untranslated,
)
- @overload
- def _key_fallback(
- self, key: Any, err: Exception, raiseerr: Literal[True] = ...
- ) -> NoReturn:
- ...
+ if not TYPE_CHECKING:
- @overload
- def _key_fallback(
- self, key: Any, err: Exception, raiseerr: Literal[False] = ...
- ) -> None:
- ...
-
- @overload
- def _key_fallback(
- self, key: Any, err: Exception, raiseerr: bool = ...
- ) -> Optional[NoReturn]:
- ...
-
- def _key_fallback(
- self, key: Any, err: Exception, raiseerr: bool = True
- ) -> Optional[NoReturn]:
-
- if raiseerr:
- if self._unpickled and isinstance(key, elements.ColumnElement):
- raise exc.NoSuchColumnError(
- "Row was unpickled; lookup by ColumnElement "
- "is unsupported"
- ) from err
+ def _key_fallback(
+ self, key: Any, err: Optional[Exception], raiseerr: bool = True
+ ) -> Optional[NoReturn]:
+
+ if raiseerr:
+ if self._unpickled and isinstance(key, elements.ColumnElement):
+ raise exc.NoSuchColumnError(
+ "Row was unpickled; lookup by ColumnElement "
+ "is unsupported"
+ ) from err
+ else:
+ raise exc.NoSuchColumnError(
+ "Could not locate column in row for column '%s'"
+ % util.string_or_unprintable(key)
+ ) from err
else:
- raise exc.NoSuchColumnError(
- "Could not locate column in row for column '%s'"
- % util.string_or_unprintable(key)
- ) from err
- else:
- return None
+ return None
def _raise_for_ambiguous_column_name(self, rec):
raise exc.InvalidRequestError(
@@ -919,8 +906,8 @@ class CursorResultMetaData(ResultMetaData):
def __setstate__(self, state):
self._processors = [None for _ in range(len(state["_keys"]))]
self._keymap = state["_keymap"]
-
self._keymap_by_result_column_idx = None
+ self._key_to_index = self._make_key_to_index(self._keymap, MD_INDEX)
self._keys = state["_keys"]
self._unpickled = True
if state["_translated_indexes"]:
@@ -1371,6 +1358,14 @@ class _NoResultMetaData(ResultMetaData):
self._we_dont_return_rows()
@property
+ def _key_to_index(self):
+ self._we_dont_return_rows()
+
+ @property
+ def _processors(self):
+ self._we_dont_return_rows()
+
+ @property
def keys(self):
self._we_dont_return_rows()
@@ -1458,12 +1453,11 @@ class CursorResult(Result[_T]):
metadata = self._init_metadata(context, cursor_description)
- keymap = metadata._keymap
- processors = metadata._processors
- process_row = Row
- key_style = process_row._default_key_style
_make_row = functools.partial(
- process_row, metadata, processors, keymap, key_style
+ Row,
+ metadata,
+ metadata._processors,
+ metadata._key_to_index,
)
if log_row: