summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--CHANGES9
-rw-r--r--lib/sqlalchemy/engine/base.py18
-rw-r--r--lib/sqlalchemy/sql/expression.py17
-rw-r--r--test/engine/test_execute.py31
4 files changed, 72 insertions, 3 deletions
diff --git a/CHANGES b/CHANGES
index 3ecac29ed..550428a44 100644
--- a/CHANGES
+++ b/CHANGES
@@ -73,6 +73,15 @@ CHANGES
- Fixed bug in execution_options() feature whereby the existing
Transaction and other state information from the parent
connection would not be propagated to the sub-connection.
+
+ - Added new 'compiled_cache' execution option. A dictionary
+ where Compiled objects will be cached when the Connection
+ compiles a clause expression into a dialect- and parameter-
+ specific Compiled object. It is the user's responsibility to
+ manage the size of this dictionary, which will have keys
+ corresponding to the dialect, clause element, the column
+ names within the VALUES or SET clause of an INSERT or UPDATE,
+ as well as the "batch" mode for an INSERT or UPDATE statement.
- ext
- the compiler extension now allows @compiles decorators
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index f040ec920..4c5a6a82b 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -1150,10 +1150,22 @@ class Connection(Connectable):
else:
keys = []
+ if 'compiled_cache' in self._execution_options:
+ key = self.dialect, elem, tuple(keys), len(params) > 1
+ if key in self._execution_options['compiled_cache']:
+ compiled_sql = self._execution_options['compiled_cache'][key]
+ else:
+ compiled_sql = elem.compile(
+ dialect=self.dialect, column_keys=keys,
+ inline=len(params) > 1)
+ self._execution_options['compiled_cache'][key] = compiled_sql
+ else:
+ compiled_sql = elem.compile(
+ dialect=self.dialect, column_keys=keys,
+ inline=len(params) > 1)
+
context = self.__create_execution_context(
- compiled_sql=elem.compile(
- dialect=self.dialect, column_keys=keys,
- inline=len(params) > 1),
+ compiled_sql=compiled_sql,
parameters=params
)
return self.__execute_context(context)
diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py
index 5958a0bc4..1222a144f 100644
--- a/lib/sqlalchemy/sql/expression.py
+++ b/lib/sqlalchemy/sql/expression.py
@@ -2276,6 +2276,23 @@ class Executable(_Generative):
of many DBAPIs. The flag is currently understood only by the
psycopg2 dialect.
+ * compiled_cache - a dictionary where :class:`Compiled` objects
+ will be cached when the :class:`Connection` compiles a clause
+ expression into a dialect- and parameter-specific
+ :class:`Compiled` object. It is the user's responsibility to
+ manage the size of this dictionary, which will have keys
+ corresponding to the dialect, clause element, the column
+ names within the VALUES or SET clause of an INSERT or UPDATE,
+ as well as the "batch" mode for an INSERT or UPDATE statement.
+ The format of this dictionary is not guaranteed to stay the
+ same in future releases.
+
+ This option is usually more appropriate
+ to use via the
+ :meth:`sqlalchemy.engine.base.Connection.execution_options()`
+ method of :class:`Connection`, rather than upon individual
+ statement objects, though the effect is the same.
+
See also:
:meth:`sqlalchemy.engine.base.Connection.execution_options()`
diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py
index 8fd5e7eb6..e83166c9a 100644
--- a/test/engine/test_execute.py
+++ b/test/engine/test_execute.py
@@ -111,6 +111,37 @@ class ExecuteTest(TestBase):
(1, None)
])
+class CompiledCacheTest(TestBase):
+ @classmethod
+ def setup_class(cls):
+ global users, metadata
+ metadata = MetaData(testing.db)
+ users = Table('users', metadata,
+ Column('user_id', INT, primary_key = True),
+ Column('user_name', VARCHAR(20)),
+ )
+ metadata.create_all()
+
+ @engines.close_first
+ def teardown(self):
+ testing.db.connect().execute(users.delete())
+
+ @classmethod
+ def teardown_class(cls):
+ metadata.drop_all()
+
+ def test_cache(self):
+ conn = testing.db.connect()
+ cache = {}
+ cached_conn = conn.execution_options(compiled_cache=cache)
+
+ ins = users.insert()
+ cached_conn.execute(ins, {'user_name':'u1'})
+ cached_conn.execute(ins, {'user_name':'u2'})
+ cached_conn.execute(ins, {'user_name':'u3'})
+ assert len(cache) == 1
+ eq_(conn.execute("select count(1) from users").scalar(), 3)
+
class LogTest(TestBase):
def _test_logger(self, eng, eng_name, pool_name):
buf = logging.handlers.BufferingHandler(100)