summaryrefslogtreecommitdiff
path: root/examples
diff options
context:
space:
mode:
Diffstat (limited to 'examples')
-rw-r--r--examples/generic_associations/discriminator_on_association.py1
-rw-r--r--examples/performance/__init__.py428
-rw-r--r--examples/performance/__main__.py7
-rw-r--r--examples/performance/bulk_inserts.py154
-rw-r--r--examples/performance/bulk_updates.py54
-rw-r--r--examples/performance/large_resultsets.py182
-rw-r--r--examples/performance/short_selects.py134
-rw-r--r--examples/performance/single_inserts.py166
-rw-r--r--examples/sharding/attribute_shard.py2
-rw-r--r--examples/versioned_history/history_meta.py104
-rw-r--r--examples/versioned_history/test_versioning.py173
11 files changed, 1330 insertions, 75 deletions
diff --git a/examples/generic_associations/discriminator_on_association.py b/examples/generic_associations/discriminator_on_association.py
index e03cfec00..7bb04cf85 100644
--- a/examples/generic_associations/discriminator_on_association.py
+++ b/examples/generic_associations/discriminator_on_association.py
@@ -84,6 +84,7 @@ class HasAddresses(object):
"%sAddressAssociation" % name,
(AddressAssociation, ),
dict(
+ __tablename__=None,
__mapper_args__={
"polymorphic_identity": discriminator
}
diff --git a/examples/performance/__init__.py b/examples/performance/__init__.py
new file mode 100644
index 000000000..6264ae9f7
--- /dev/null
+++ b/examples/performance/__init__.py
@@ -0,0 +1,428 @@
+"""A performance profiling suite for a variety of SQLAlchemy use cases.
+
+Each suite focuses on a specific use case with a particular performance
+profile and associated implications:
+
+* bulk inserts
+* individual inserts, with or without transactions
+* fetching large numbers of rows
+* running lots of short queries
+
+All suites include a variety of use patterns illustrating both Core
+and ORM use, and are generally sorted in order of performance from worst
+to greatest, inversely based on amount of functionality provided by SQLAlchemy,
+greatest to least (these two things generally correspond perfectly).
+
+A command line tool is presented at the package level which allows
+individual suites to be run::
+
+ $ python -m examples.performance --help
+ usage: python -m examples.performance [-h] [--test TEST] [--dburl DBURL]
+ [--num NUM] [--profile] [--dump]
+ [--runsnake] [--echo]
+
+ {bulk_inserts,large_resultsets,single_inserts}
+
+ positional arguments:
+ {bulk_inserts,large_resultsets,single_inserts}
+ suite to run
+
+ optional arguments:
+ -h, --help show this help message and exit
+ --test TEST run specific test name
+ --dburl DBURL database URL, default sqlite:///profile.db
+ --num NUM Number of iterations/items/etc for tests; default is 0
+ module-specific
+ --profile run profiling and dump call counts
+ --dump dump full call profile (implies --profile)
+ --runsnake invoke runsnakerun (implies --profile)
+ --echo Echo SQL output
+
+An example run looks like::
+
+ $ python -m examples.performance bulk_inserts
+
+Or with options::
+
+ $ python -m examples.performance bulk_inserts \\
+ --dburl mysql+mysqldb://scott:tiger@localhost/test \\
+ --profile --num 1000
+
+.. seealso::
+
+ :ref:`faq_how_to_profile`
+
+File Listing
+-------------
+
+.. autosource::
+
+
+Running all tests with time
+---------------------------
+
+This is the default form of run::
+
+ $ python -m examples.performance single_inserts
+ Tests to run: test_orm_commit, test_bulk_save,
+ test_bulk_insert_dictionaries, test_core,
+ test_core_query_caching, test_dbapi_raw_w_connect,
+ test_dbapi_raw_w_pool
+
+ test_orm_commit : Individual INSERT/COMMIT pairs via the
+ ORM (10000 iterations); total time 13.690218 sec
+ test_bulk_save : Individual INSERT/COMMIT pairs using
+ the "bulk" API (10000 iterations); total time 11.290371 sec
+ test_bulk_insert_dictionaries : Individual INSERT/COMMIT pairs using
+ the "bulk" API with dictionaries (10000 iterations);
+ total time 10.814626 sec
+ test_core : Individual INSERT/COMMIT pairs using Core.
+ (10000 iterations); total time 9.665620 sec
+ test_core_query_caching : Individual INSERT/COMMIT pairs using Core
+ with query caching (10000 iterations); total time 9.209010 sec
+ test_dbapi_raw_w_connect : Individual INSERT/COMMIT pairs w/ DBAPI +
+ connection each time (10000 iterations); total time 9.551103 sec
+ test_dbapi_raw_w_pool : Individual INSERT/COMMIT pairs w/ DBAPI +
+ connection pool (10000 iterations); total time 8.001813 sec
+
+Dumping Profiles for Individual Tests
+--------------------------------------
+
+A Python profile output can be dumped for all tests, or more commonly
+individual tests::
+
+ $ python -m examples.performance single_inserts --test test_core --num 1000 --dump
+ Tests to run: test_core
+ test_core : Individual INSERT/COMMIT pairs using Core. (1000 iterations); total fn calls 186109
+ 186109 function calls (186102 primitive calls) in 1.089 seconds
+
+ Ordered by: internal time, call count
+
+ ncalls tottime percall cumtime percall filename:lineno(function)
+ 1000 0.634 0.001 0.634 0.001 {method 'commit' of 'sqlite3.Connection' objects}
+ 1000 0.154 0.000 0.154 0.000 {method 'execute' of 'sqlite3.Cursor' objects}
+ 1000 0.021 0.000 0.074 0.000 /Users/classic/dev/sqlalchemy/lib/sqlalchemy/sql/compiler.py:1950(_get_colparams)
+ 1000 0.015 0.000 0.034 0.000 /Users/classic/dev/sqlalchemy/lib/sqlalchemy/engine/default.py:503(_init_compiled)
+ 1 0.012 0.012 1.091 1.091 examples/performance/single_inserts.py:79(test_core)
+
+ ...
+
+Using RunSnake
+--------------
+
+This option requires the `RunSnake <https://pypi.python.org/pypi/RunSnakeRun>`_
+command line tool be installed::
+
+ $ python -m examples.performance single_inserts --test test_core --num 1000 --runsnake
+
+A graphical RunSnake output will be displayed.
+
+.. _examples_profiling_writeyourown:
+
+Writing your Own Suites
+-----------------------
+
+The profiler suite system is extensible, and can be applied to your own set
+of tests. This is a valuable technique to use in deciding upon the proper
+approach for some performance-critical set of routines. For example,
+if we wanted to profile the difference between several kinds of loading,
+we can create a file ``test_loads.py``, with the following content::
+
+ from examples.performance import Profiler
+ from sqlalchemy import Integer, Column, create_engine, ForeignKey
+ from sqlalchemy.orm import relationship, joinedload, subqueryload, Session
+ from sqlalchemy.ext.declarative import declarative_base
+
+ Base = declarative_base()
+ engine = None
+ session = None
+
+
+ class Parent(Base):
+ __tablename__ = 'parent'
+ id = Column(Integer, primary_key=True)
+ children = relationship("Child")
+
+
+ class Child(Base):
+ __tablename__ = 'child'
+ id = Column(Integer, primary_key=True)
+ parent_id = Column(Integer, ForeignKey('parent.id'))
+
+
+ # Init with name of file, default number of items
+ Profiler.init("test_loads", 1000)
+
+
+ @Profiler.setup_once
+ def setup_once(dburl, echo, num):
+ "setup once. create an engine, insert fixture data"
+ global engine
+ engine = create_engine(dburl, echo=echo)
+ Base.metadata.drop_all(engine)
+ Base.metadata.create_all(engine)
+ sess = Session(engine)
+ sess.add_all([
+ Parent(children=[Child() for j in range(100)])
+ for i in range(num)
+ ])
+ sess.commit()
+
+
+ @Profiler.setup
+ def setup(dburl, echo, num):
+ "setup per test. create a new Session."
+ global session
+ session = Session(engine)
+ # pre-connect so this part isn't profiled (if we choose)
+ session.connection()
+
+
+ @Profiler.profile
+ def test_lazyload(n):
+ "load everything, no eager loading."
+
+ for parent in session.query(Parent):
+ parent.children
+
+
+ @Profiler.profile
+ def test_joinedload(n):
+ "load everything, joined eager loading."
+
+ for parent in session.query(Parent).options(joinedload("children")):
+ parent.children
+
+
+ @Profiler.profile
+ def test_subqueryload(n):
+ "load everything, subquery eager loading."
+
+ for parent in session.query(Parent).options(subqueryload("children")):
+ parent.children
+
+ if __name__ == '__main__':
+ Profiler.main()
+
+We can run our new script directly::
+
+ $ python test_loads.py --dburl postgresql+psycopg2://scott:tiger@localhost/test
+ Running setup once...
+ Tests to run: test_lazyload, test_joinedload, test_subqueryload
+ test_lazyload : load everything, no eager loading. (1000 iterations); total time 11.971159 sec
+ test_joinedload : load everything, joined eager loading. (1000 iterations); total time 2.754592 sec
+ test_subqueryload : load everything, subquery eager loading. (1000 iterations); total time 2.977696 sec
+
+As well as see RunSnake output for an individual test::
+
+ $ python test_loads.py --num 100 --runsnake --test test_joinedload
+
+"""
+import argparse
+import cProfile
+import pstats
+import os
+import time
+import re
+import sys
+
+
+class Profiler(object):
+ tests = []
+
+ _setup = None
+ _setup_once = None
+ name = None
+ num = 0
+
+ def __init__(self, options):
+ self.test = options.test
+ self.dburl = options.dburl
+ self.runsnake = options.runsnake
+ self.profile = options.profile
+ self.dump = options.dump
+ self.callers = options.callers
+ self.num = options.num
+ self.echo = options.echo
+ self.stats = []
+
+ @classmethod
+ def init(cls, name, num):
+ cls.name = name
+ cls.num = num
+
+ @classmethod
+ def profile(cls, fn):
+ if cls.name is None:
+ raise ValueError(
+ "Need to call Profile.init(<suitename>, <default_num>) first.")
+ cls.tests.append(fn)
+ return fn
+
+ @classmethod
+ def setup(cls, fn):
+ if cls._setup is not None:
+ raise ValueError("setup function already set to %s" % cls._setup)
+ cls._setup = staticmethod(fn)
+ return fn
+
+ @classmethod
+ def setup_once(cls, fn):
+ if cls._setup_once is not None:
+ raise ValueError(
+ "setup_once function already set to %s" % cls._setup_once)
+ cls._setup_once = staticmethod(fn)
+ return fn
+
+ def run(self):
+ if self.test:
+ tests = [fn for fn in self.tests if fn.__name__ == self.test]
+ if not tests:
+ raise ValueError("No such test: %s" % self.test)
+ else:
+ tests = self.tests
+
+ if self._setup_once:
+ print("Running setup once...")
+ self._setup_once(self.dburl, self.echo, self.num)
+ print("Tests to run: %s" % ", ".join([t.__name__ for t in tests]))
+ for test in tests:
+ self._run_test(test)
+ self.stats[-1].report()
+
+ def _run_with_profile(self, fn):
+ pr = cProfile.Profile()
+ pr.enable()
+ try:
+ result = fn(self.num)
+ finally:
+ pr.disable()
+
+ stats = pstats.Stats(pr).sort_stats('cumulative')
+
+ self.stats.append(TestResult(self, fn, stats=stats))
+ return result
+
+ def _run_with_time(self, fn):
+ now = time.time()
+ try:
+ return fn(self.num)
+ finally:
+ total = time.time() - now
+ self.stats.append(TestResult(self, fn, total_time=total))
+
+ def _run_test(self, fn):
+ if self._setup:
+ self._setup(self.dburl, self.echo, self.num)
+ if self.profile or self.runsnake or self.dump:
+ self._run_with_profile(fn)
+ else:
+ self._run_with_time(fn)
+
+ @classmethod
+ def main(cls):
+
+ parser = argparse.ArgumentParser("python -m examples.performance")
+
+ if cls.name is None:
+ parser.add_argument(
+ "name", choices=cls._suite_names(), help="suite to run")
+
+ if len(sys.argv) > 1:
+ potential_name = sys.argv[1]
+ try:
+ suite = __import__(__name__ + "." + potential_name)
+ except ImportError:
+ pass
+
+ parser.add_argument(
+ "--test", type=str,
+ help="run specific test name"
+ )
+
+ parser.add_argument(
+ '--dburl', type=str, default="sqlite:///profile.db",
+ help="database URL, default sqlite:///profile.db"
+ )
+ parser.add_argument(
+ '--num', type=int, default=cls.num,
+ help="Number of iterations/items/etc for tests; "
+ "default is %d module-specific" % cls.num
+ )
+ parser.add_argument(
+ '--profile', action='store_true',
+ help='run profiling and dump call counts')
+ parser.add_argument(
+ '--dump', action='store_true',
+ help='dump full call profile (implies --profile)')
+ parser.add_argument(
+ '--callers', action='store_true',
+ help='print callers as well (implies --dump)')
+ parser.add_argument(
+ '--runsnake', action='store_true',
+ help='invoke runsnakerun (implies --profile)')
+ parser.add_argument(
+ '--echo', action='store_true',
+ help="Echo SQL output")
+ args = parser.parse_args()
+
+ args.dump = args.dump or args.callers
+ args.profile = args.profile or args.dump or args.runsnake
+
+ if cls.name is None:
+ suite = __import__(__name__ + "." + args.name)
+
+ Profiler(args).run()
+
+ @classmethod
+ def _suite_names(cls):
+ suites = []
+ for file_ in os.listdir(os.path.dirname(__file__)):
+ match = re.match(r'^([a-z].*).py$', file_)
+ if match:
+ suites.append(match.group(1))
+ return suites
+
+
+class TestResult(object):
+ def __init__(self, profile, test, stats=None, total_time=None):
+ self.profile = profile
+ self.test = test
+ self.stats = stats
+ self.total_time = total_time
+
+ def report(self):
+ print(self._summary())
+ if self.profile.profile:
+ self.report_stats()
+
+ def _summary(self):
+ summary = "%s : %s (%d iterations)" % (
+ self.test.__name__, self.test.__doc__, self.profile.num)
+ if self.total_time:
+ summary += "; total time %f sec" % self.total_time
+ if self.stats:
+ summary += "; total fn calls %d" % self.stats.total_calls
+ return summary
+
+ def report_stats(self):
+ if self.profile.runsnake:
+ self._runsnake()
+ elif self.profile.dump:
+ self._dump()
+
+ def _dump(self):
+ self.stats.sort_stats('time', 'calls')
+ self.stats.print_stats()
+ if self.profile.callers:
+ self.stats.print_callers()
+
+ def _runsnake(self):
+ filename = "%s.profile" % self.test.__name__
+ try:
+ self.stats.dump_stats(filename)
+ os.system("runsnake %s" % filename)
+ finally:
+ os.remove(filename)
+
+
diff --git a/examples/performance/__main__.py b/examples/performance/__main__.py
new file mode 100644
index 000000000..5e05143bf
--- /dev/null
+++ b/examples/performance/__main__.py
@@ -0,0 +1,7 @@
+"""Allows the examples/performance package to be run as a script."""
+
+from . import Profiler
+
+if __name__ == '__main__':
+ Profiler.main()
+
diff --git a/examples/performance/bulk_inserts.py b/examples/performance/bulk_inserts.py
new file mode 100644
index 000000000..9c3cff5b2
--- /dev/null
+++ b/examples/performance/bulk_inserts.py
@@ -0,0 +1,154 @@
+"""This series of tests illustrates different ways to INSERT a large number
+of rows in bulk.
+
+
+"""
+from . import Profiler
+
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy import Column, Integer, String, create_engine, bindparam
+from sqlalchemy.orm import Session
+
+Base = declarative_base()
+engine = None
+
+
+class Customer(Base):
+ __tablename__ = "customer"
+ id = Column(Integer, primary_key=True)
+ name = Column(String(255))
+ description = Column(String(255))
+
+
+Profiler.init("bulk_inserts", num=100000)
+
+
+@Profiler.setup
+def setup_database(dburl, echo, num):
+ global engine
+ engine = create_engine(dburl, echo=echo)
+ Base.metadata.drop_all(engine)
+ Base.metadata.create_all(engine)
+
+
+@Profiler.profile
+def test_flush_no_pk(n):
+ """Individual INSERT statements via the ORM, calling upon last row id"""
+ session = Session(bind=engine)
+ for chunk in range(0, n, 1000):
+ session.add_all([
+ Customer(
+ name='customer name %d' % i,
+ description='customer description %d' % i)
+ for i in range(chunk, chunk + 1000)
+ ])
+ session.flush()
+ session.commit()
+
+
+@Profiler.profile
+def test_bulk_save_return_pks(n):
+ """Individual INSERT statements in "bulk", but calling upon last row id"""
+ session = Session(bind=engine)
+ session.bulk_save_objects([
+ Customer(
+ name='customer name %d' % i,
+ description='customer description %d' % i
+ )
+ for i in range(n)
+ ], return_defaults=True)
+ session.commit()
+
+
+@Profiler.profile
+def test_flush_pk_given(n):
+ """Batched INSERT statements via the ORM, PKs already defined"""
+ session = Session(bind=engine)
+ for chunk in range(0, n, 1000):
+ session.add_all([
+ Customer(
+ id=i + 1,
+ name='customer name %d' % i,
+ description='customer description %d' % i)
+ for i in range(chunk, chunk + 1000)
+ ])
+ session.flush()
+ session.commit()
+
+
+@Profiler.profile
+def test_bulk_save(n):
+ """Batched INSERT statements via the ORM in "bulk", discarding PKs."""
+ session = Session(bind=engine)
+ session.bulk_save_objects([
+ Customer(
+ name='customer name %d' % i,
+ description='customer description %d' % i
+ )
+ for i in range(n)
+ ])
+ session.commit()
+
+
+@Profiler.profile
+def test_bulk_insert_mappings(n):
+ """Batched INSERT statements via the ORM "bulk", using dictionaries."""
+ session = Session(bind=engine)
+ session.bulk_insert_mappings(Customer, [
+ dict(
+ name='customer name %d' % i,
+ description='customer description %d' % i
+ )
+ for i in range(n)
+ ])
+ session.commit()
+
+
+@Profiler.profile
+def test_core_insert(n):
+ """A single Core INSERT construct inserting mappings in bulk."""
+ conn = engine.connect()
+ conn.execute(
+ Customer.__table__.insert(),
+ [
+ dict(
+ name='customer name %d' % i,
+ description='customer description %d' % i
+ )
+ for i in range(n)
+ ])
+
+
+@Profiler.profile
+def test_dbapi_raw(n):
+ """The DBAPI's API inserting rows in bulk."""
+
+ conn = engine.pool._creator()
+ cursor = conn.cursor()
+ compiled = Customer.__table__.insert().values(
+ name=bindparam('name'),
+ description=bindparam('description')).\
+ compile(dialect=engine.dialect)
+
+ if compiled.positional:
+ args = (
+ ('customer name %d' % i, 'customer description %d' % i)
+ for i in range(n))
+ else:
+ args = (
+ dict(
+ name='customer name %d' % i,
+ description='customer description %d' % i
+ )
+ for i in range(n)
+ )
+
+ cursor.executemany(
+ str(compiled),
+ list(args)
+ )
+ conn.commit()
+ conn.close()
+
+if __name__ == '__main__':
+ Profiler.main()
diff --git a/examples/performance/bulk_updates.py b/examples/performance/bulk_updates.py
new file mode 100644
index 000000000..9522e4bf5
--- /dev/null
+++ b/examples/performance/bulk_updates.py
@@ -0,0 +1,54 @@
+"""This series of tests illustrates different ways to UPDATE a large number
+of rows in bulk.
+
+
+"""
+from . import Profiler
+
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy import Column, Integer, String, create_engine, bindparam
+from sqlalchemy.orm import Session
+
+Base = declarative_base()
+engine = None
+
+
+class Customer(Base):
+ __tablename__ = "customer"
+ id = Column(Integer, primary_key=True)
+ name = Column(String(255))
+ description = Column(String(255))
+
+
+Profiler.init("bulk_updates", num=100000)
+
+
+@Profiler.setup
+def setup_database(dburl, echo, num):
+ global engine
+ engine = create_engine(dburl, echo=echo)
+ Base.metadata.drop_all(engine)
+ Base.metadata.create_all(engine)
+
+ s = Session(engine)
+ for chunk in range(0, num, 10000):
+ s.bulk_insert_mappings(Customer, [
+ {
+ 'name': 'customer name %d' % i,
+ 'description': 'customer description %d' % i
+ } for i in range(chunk, chunk + 10000)
+ ])
+ s.commit()
+
+
+@Profiler.profile
+def test_orm_flush(n):
+ """UPDATE statements via the ORM flush process."""
+ session = Session(bind=engine)
+ for chunk in range(0, n, 1000):
+ customers = session.query(Customer).\
+ filter(Customer.id.between(chunk, chunk + 1000)).all()
+ for customer in customers:
+ customer.description += "updated"
+ session.flush()
+ session.commit()
diff --git a/examples/performance/large_resultsets.py b/examples/performance/large_resultsets.py
new file mode 100644
index 000000000..c13683040
--- /dev/null
+++ b/examples/performance/large_resultsets.py
@@ -0,0 +1,182 @@
+"""In this series of tests, we are looking at time to load a large number
+of very small and simple rows.
+
+A special test here illustrates the difference between fetching the
+rows from the raw DBAPI and throwing them away, vs. assembling each
+row into a completely basic Python object and appending to a list. The
+time spent typically more than doubles. The point is that while
+DBAPIs will give you raw rows very fast if they are written in C, the
+moment you do anything with those rows, even something trivial,
+overhead grows extremely fast in cPython. SQLAlchemy's Core and
+lighter-weight ORM options add absolutely minimal overhead, and the
+full blown ORM doesn't do terribly either even though mapped objects
+provide a huge amount of functionality.
+
+"""
+from . import Profiler
+
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy import Column, Integer, String, create_engine
+from sqlalchemy.orm import Session, Bundle
+
+Base = declarative_base()
+engine = None
+
+
+class Customer(Base):
+ __tablename__ = "customer"
+ id = Column(Integer, primary_key=True)
+ name = Column(String(255))
+ description = Column(String(255))
+
+
+Profiler.init("large_resultsets", num=500000)
+
+
+@Profiler.setup_once
+def setup_database(dburl, echo, num):
+ global engine
+ engine = create_engine(dburl, echo=echo)
+ Base.metadata.drop_all(engine)
+ Base.metadata.create_all(engine)
+
+ s = Session(engine)
+ for chunk in range(0, num, 10000):
+ s.execute(
+ Customer.__table__.insert(),
+ params=[
+ {
+ 'name': 'customer name %d' % i,
+ 'description': 'customer description %d' % i
+ } for i in range(chunk, chunk + 10000)])
+ s.commit()
+
+
+@Profiler.profile
+def test_orm_full_objects_list(n):
+ """Load fully tracked ORM objects into one big list()."""
+
+ sess = Session(engine)
+ objects = list(sess.query(Customer).limit(n))
+
+
+@Profiler.profile
+def test_orm_full_objects_chunks(n):
+ """Load fully tracked ORM objects a chunk at a time using yield_per()."""
+
+ sess = Session(engine)
+ for obj in sess.query(Customer).yield_per(1000).limit(n):
+ pass
+
+
+@Profiler.profile
+def test_orm_bundles(n):
+ """Load lightweight "bundle" objects using the ORM."""
+
+ sess = Session(engine)
+ bundle = Bundle('customer',
+ Customer.id, Customer.name, Customer.description)
+ for row in sess.query(bundle).yield_per(10000).limit(n):
+ pass
+
+
+@Profiler.profile
+def test_orm_columns(n):
+ """Load individual columns into named tuples using the ORM."""
+
+ sess = Session(engine)
+ for row in sess.query(
+ Customer.id, Customer.name,
+ Customer.description).yield_per(10000).limit(n):
+ pass
+
+
+@Profiler.profile
+def test_core_fetchall(n):
+ """Load Core result rows using fetchall."""
+
+ with engine.connect() as conn:
+ result = conn.execute(Customer.__table__.select().limit(n)).fetchall()
+ for row in result:
+ data = row['id'], row['name'], row['description']
+
+
+@Profiler.profile
+def test_core_fetchmany_w_streaming(n):
+ """Load Core result rows using fetchmany/streaming."""
+
+ with engine.connect() as conn:
+ result = conn.execution_options(stream_results=True).\
+ execute(Customer.__table__.select().limit(n))
+ while True:
+ chunk = result.fetchmany(10000)
+ if not chunk:
+ break
+ for row in chunk:
+ data = row['id'], row['name'], row['description']
+
+
+@Profiler.profile
+def test_core_fetchmany(n):
+ """Load Core result rows using Core / fetchmany."""
+
+ with engine.connect() as conn:
+ result = conn.execute(Customer.__table__.select().limit(n))
+ while True:
+ chunk = result.fetchmany(10000)
+ if not chunk:
+ break
+ for row in chunk:
+ data = row['id'], row['name'], row['description']
+
+
+@Profiler.profile
+def test_dbapi_fetchall_plus_append_objects(n):
+ """Load rows using DBAPI fetchall(), generate an object for each row."""
+
+ _test_dbapi_raw(n, True)
+
+
+@Profiler.profile
+def test_dbapi_fetchall_no_object(n):
+ """Load rows using DBAPI fetchall(), don't make any objects."""
+
+ _test_dbapi_raw(n, False)
+
+
+def _test_dbapi_raw(n, make_objects):
+ compiled = Customer.__table__.select().limit(n).\
+ compile(
+ dialect=engine.dialect,
+ compile_kwargs={"literal_binds": True})
+
+ if make_objects:
+ # because if you're going to roll your own, you're probably
+ # going to do this, so see how this pushes you right back into
+ # ORM land anyway :)
+ class SimpleCustomer(object):
+ def __init__(self, id, name, description):
+ self.id = id
+ self.name = name
+ self.description = description
+
+ sql = str(compiled)
+
+ conn = engine.raw_connection()
+ cursor = conn.cursor()
+ cursor.execute(sql)
+
+ if make_objects:
+ for row in cursor.fetchall():
+ # ensure that we fully fetch!
+ customer = SimpleCustomer(
+ id=row[0], name=row[1], description=row[2])
+ else:
+ for row in cursor.fetchall():
+ # ensure that we fully fetch!
+ data = row[0], row[1], row[2]
+
+ conn.close()
+
+if __name__ == '__main__':
+ Profiler.main()
diff --git a/examples/performance/short_selects.py b/examples/performance/short_selects.py
new file mode 100644
index 000000000..ef1fcff4a
--- /dev/null
+++ b/examples/performance/short_selects.py
@@ -0,0 +1,134 @@
+"""This series of tests illustrates different ways to INSERT a large number
+of rows in bulk.
+
+
+"""
+from . import Profiler
+
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy import Column, Integer, String, create_engine, \
+ bindparam, select
+from sqlalchemy.orm import Session, deferred
+from sqlalchemy.ext import baked
+import random
+
+Base = declarative_base()
+engine = None
+
+ids = range(1, 11000)
+
+
+class Customer(Base):
+ __tablename__ = "customer"
+ id = Column(Integer, primary_key=True)
+ name = Column(String(255))
+ description = Column(String(255))
+ q = Column(Integer)
+ p = Column(Integer)
+ x = deferred(Column(Integer))
+ y = deferred(Column(Integer))
+ z = deferred(Column(Integer))
+
+Profiler.init("short_selects", num=10000)
+
+
+@Profiler.setup
+def setup_database(dburl, echo, num):
+ global engine
+ engine = create_engine(dburl, echo=echo)
+ Base.metadata.drop_all(engine)
+ Base.metadata.create_all(engine)
+ sess = Session(engine)
+ sess.add_all([
+ Customer(
+ id=i, name='c%d' % i, description="c%d" % i,
+ q=i * 10,
+ p=i * 20,
+ x=i * 30,
+ y=i * 40,
+ )
+ for i in ids
+ ])
+ sess.commit()
+
+
+@Profiler.profile
+def test_orm_query(n):
+ """test a straight ORM query of the full entity."""
+ session = Session(bind=engine)
+ for id_ in random.sample(ids, n):
+ session.query(Customer).filter(Customer.id == id_).one()
+
+
+@Profiler.profile
+def test_orm_query_cols_only(n):
+ """test an ORM query of only the entity columns."""
+ session = Session(bind=engine)
+ for id_ in random.sample(ids, n):
+ session.query(
+ Customer.id, Customer.name, Customer.description
+ ).filter(Customer.id == id_).one()
+
+
+@Profiler.profile
+def test_baked_query(n):
+ """test a baked query of the full entity."""
+ bakery = baked.bakery()
+ s = Session(bind=engine)
+ for id_ in random.sample(ids, n):
+ q = bakery(lambda s: s.query(Customer))
+ q += lambda q: q.filter(Customer.id == bindparam('id'))
+ q(s).params(id=id_).one()
+
+
+@Profiler.profile
+def test_baked_query_cols_only(n):
+ """test a baked query of only the entity columns."""
+ bakery = baked.bakery()
+ s = Session(bind=engine)
+ for id_ in random.sample(ids, n):
+ q = bakery(
+ lambda s: s.query(
+ Customer.id, Customer.name, Customer.description))
+ q += lambda q: q.filter(Customer.id == bindparam('id'))
+ q(s).params(id=id_).one()
+
+
+@Profiler.profile
+def test_core_new_stmt_each_time(n):
+ """test core, creating a new statement each time."""
+
+ with engine.connect() as conn:
+ for id_ in random.sample(ids, n):
+ stmt = select([Customer.__table__]).where(Customer.id == id_)
+ row = conn.execute(stmt).first()
+ tuple(row)
+
+
+@Profiler.profile
+def test_core_reuse_stmt(n):
+ """test core, reusing the same statement (but recompiling each time)."""
+
+ stmt = select([Customer.__table__]).where(Customer.id == bindparam('id'))
+ with engine.connect() as conn:
+ for id_ in random.sample(ids, n):
+
+ row = conn.execute(stmt, id=id_).first()
+ tuple(row)
+
+
+@Profiler.profile
+def test_core_reuse_stmt_compiled_cache(n):
+ """test core, reusing the same statement + compiled cache."""
+
+ compiled_cache = {}
+ stmt = select([Customer.__table__]).where(Customer.id == bindparam('id'))
+ with engine.connect().\
+ execution_options(compiled_cache=compiled_cache) as conn:
+ for id_ in random.sample(ids, n):
+ row = conn.execute(stmt, id=id_).first()
+ tuple(row)
+
+
+if __name__ == '__main__':
+ Profiler.main()
diff --git a/examples/performance/single_inserts.py b/examples/performance/single_inserts.py
new file mode 100644
index 000000000..cfce90300
--- /dev/null
+++ b/examples/performance/single_inserts.py
@@ -0,0 +1,166 @@
+"""In this series of tests, we're looking at a method that inserts a row
+within a distinct transaction, and afterwards returns to essentially a
+"closed" state. This would be analogous to an API call that starts up
+a database connection, inserts the row, commits and closes.
+
+"""
+from . import Profiler
+
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy import Column, Integer, String, create_engine, bindparam, pool
+from sqlalchemy.orm import Session
+
+Base = declarative_base()
+engine = None
+
+
+class Customer(Base):
+ __tablename__ = "customer"
+ id = Column(Integer, primary_key=True)
+ name = Column(String(255))
+ description = Column(String(255))
+
+
+Profiler.init("single_inserts", num=10000)
+
+
+@Profiler.setup
+def setup_database(dburl, echo, num):
+ global engine
+ engine = create_engine(dburl, echo=echo)
+ if engine.dialect.name == 'sqlite':
+ engine.pool = pool.StaticPool(creator=engine.pool._creator)
+ Base.metadata.drop_all(engine)
+ Base.metadata.create_all(engine)
+
+
+@Profiler.profile
+def test_orm_commit(n):
+ """Individual INSERT/COMMIT pairs via the ORM"""
+
+ for i in range(n):
+ session = Session(bind=engine)
+ session.add(
+ Customer(
+ name='customer name %d' % i,
+ description='customer description %d' % i)
+ )
+ session.commit()
+
+
+@Profiler.profile
+def test_bulk_save(n):
+ """Individual INSERT/COMMIT pairs using the "bulk" API """
+
+ for i in range(n):
+ session = Session(bind=engine)
+ session.bulk_save_objects([
+ Customer(
+ name='customer name %d' % i,
+ description='customer description %d' % i
+ )])
+ session.commit()
+
+
+@Profiler.profile
+def test_bulk_insert_dictionaries(n):
+ """Individual INSERT/COMMIT pairs using the "bulk" API with dictionaries"""
+
+ for i in range(n):
+ session = Session(bind=engine)
+ session.bulk_insert_mappings(Customer, [
+ dict(
+ name='customer name %d' % i,
+ description='customer description %d' % i
+ )])
+ session.commit()
+
+
+@Profiler.profile
+def test_core(n):
+ """Individual INSERT/COMMIT pairs using Core."""
+
+ for i in range(n):
+ with engine.begin() as conn:
+ conn.execute(
+ Customer.__table__.insert(),
+ dict(
+ name='customer name %d' % i,
+ description='customer description %d' % i
+ )
+ )
+
+
+@Profiler.profile
+def test_core_query_caching(n):
+ """Individual INSERT/COMMIT pairs using Core with query caching"""
+
+ cache = {}
+ ins = Customer.__table__.insert()
+ for i in range(n):
+ with engine.begin() as conn:
+ conn.execution_options(compiled_cache=cache).execute(
+ ins,
+ dict(
+ name='customer name %d' % i,
+ description='customer description %d' % i
+ )
+ )
+
+
+@Profiler.profile
+def test_dbapi_raw_w_connect(n):
+ """Individual INSERT/COMMIT pairs w/ DBAPI + connection each time"""
+
+ _test_dbapi_raw(n, True)
+
+
+@Profiler.profile
+def test_dbapi_raw_w_pool(n):
+ """Individual INSERT/COMMIT pairs w/ DBAPI + connection pool"""
+
+ _test_dbapi_raw(n, False)
+
+
+def _test_dbapi_raw(n, connect):
+ compiled = Customer.__table__.insert().values(
+ name=bindparam('name'),
+ description=bindparam('description')).\
+ compile(dialect=engine.dialect)
+
+ if compiled.positional:
+ args = (
+ ('customer name %d' % i, 'customer description %d' % i)
+ for i in range(n))
+ else:
+ args = (
+ dict(
+ name='customer name %d' % i,
+ description='customer description %d' % i
+ )
+ for i in range(n)
+ )
+ sql = str(compiled)
+
+ if connect:
+ for arg in args:
+ # there's no connection pool, so if these were distinct
+ # calls, we'd be connecting each time
+ conn = engine.pool._creator()
+ cursor = conn.cursor()
+ cursor.execute(sql, arg)
+ lastrowid = cursor.lastrowid
+ conn.commit()
+ conn.close()
+ else:
+ for arg in args:
+ conn = engine.raw_connection()
+ cursor = conn.cursor()
+ cursor.execute(sql, arg)
+ lastrowid = cursor.lastrowid
+ conn.commit()
+ conn.close()
+
+
+if __name__ == '__main__':
+ Profiler.main()
diff --git a/examples/sharding/attribute_shard.py b/examples/sharding/attribute_shard.py
index 34b1be5b2..4ce8c247f 100644
--- a/examples/sharding/attribute_shard.py
+++ b/examples/sharding/attribute_shard.py
@@ -168,7 +168,7 @@ def _get_query_comparisons(query):
elif bind.callable:
# some ORM functions (lazy loading)
# place the bind's value as a
- # callable for deferred evaulation.
+ # callable for deferred evaluation.
value = bind.callable()
else:
# just use .value
diff --git a/examples/versioned_history/history_meta.py b/examples/versioned_history/history_meta.py
index f9e979a6a..6d7b137eb 100644
--- a/examples/versioned_history/history_meta.py
+++ b/examples/versioned_history/history_meta.py
@@ -4,19 +4,22 @@ from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import mapper, attributes, object_mapper
from sqlalchemy.orm.exc import UnmappedColumnError
from sqlalchemy import Table, Column, ForeignKeyConstraint, Integer, DateTime
-from sqlalchemy import event
+from sqlalchemy import event, util
import datetime
from sqlalchemy.orm.properties import RelationshipProperty
+
def col_references_table(col, table):
for fk in col.foreign_keys:
if fk.references(table):
return True
return False
+
def _is_versioning_col(col):
return "version_meta" in col.info
+
def _history_mapper(local_mapper):
cls = local_mapper.class_
@@ -33,52 +36,77 @@ def _history_mapper(local_mapper):
super_fks = []
def _col_copy(col):
+ orig = col
col = col.copy()
+ orig.info['history_copy'] = col
col.unique = False
col.default = col.server_default = None
return col
- if not super_mapper or local_mapper.local_table is not super_mapper.local_table:
+ properties = util.OrderedDict()
+ if not super_mapper or \
+ local_mapper.local_table is not super_mapper.local_table:
cols = []
+ version_meta = {"version_meta": True} # add column.info to identify
+ # columns specific to versioning
+
for column in local_mapper.local_table.c:
if _is_versioning_col(column):
continue
col = _col_copy(column)
- if super_mapper and col_references_table(column, super_mapper.local_table):
- super_fks.append((col.key, list(super_history_mapper.local_table.primary_key)[0]))
+ if super_mapper and \
+ col_references_table(column, super_mapper.local_table):
+ super_fks.append(
+ (
+ col.key,
+ list(super_history_mapper.local_table.primary_key)[0]
+ )
+ )
cols.append(col)
if column is local_mapper.polymorphic_on:
polymorphic_on = col
- if super_mapper:
- super_fks.append(('version', super_history_mapper.local_table.c.version))
+ orig_prop = local_mapper.get_property_by_column(column)
+ # carry over column re-mappings
+ if len(orig_prop.columns) > 1 or \
+ orig_prop.columns[0].key != orig_prop.key:
+ properties[orig_prop.key] = tuple(
+ col.info['history_copy'] for col in orig_prop.columns)
- version_meta = {"version_meta": True} # add column.info to identify
- # columns specific to versioning
+ if super_mapper:
+ super_fks.append(
+ (
+ 'version', super_history_mapper.local_table.c.version
+ )
+ )
# "version" stores the integer version id. This column is
# required.
- cols.append(Column('version', Integer, primary_key=True,
- autoincrement=False, info=version_meta))
+ cols.append(
+ Column(
+ 'version', Integer, primary_key=True,
+ autoincrement=False, info=version_meta))
# "changed" column stores the UTC timestamp of when the
# history row was created.
# This column is optional and can be omitted.
- cols.append(Column('changed', DateTime,
- default=datetime.datetime.utcnow,
- info=version_meta))
+ cols.append(Column(
+ 'changed', DateTime,
+ default=datetime.datetime.utcnow,
+ info=version_meta))
if super_fks:
cols.append(ForeignKeyConstraint(*zip(*super_fks)))
- table = Table(local_mapper.local_table.name + '_history',
- local_mapper.local_table.metadata,
- *cols,
- schema=local_mapper.local_table.schema
+ table = Table(
+ local_mapper.local_table.name + '_history',
+ local_mapper.local_table.metadata,
+ *cols,
+ schema=local_mapper.local_table.schema
)
else:
# single table inheritance. take any additional columns that may have
@@ -91,24 +119,33 @@ def _history_mapper(local_mapper):
if super_history_mapper:
bases = (super_history_mapper.class_,)
+
+ if table is not None:
+ properties['changed'] = (
+ (table.c.changed, ) +
+ tuple(super_history_mapper.attrs.changed.columns)
+ )
+
else:
bases = local_mapper.base_mapper.class_.__bases__
versioned_cls = type.__new__(type, "%sHistory" % cls.__name__, bases, {})
m = mapper(
- versioned_cls,
- table,
- inherits=super_history_mapper,
- polymorphic_on=polymorphic_on,
- polymorphic_identity=local_mapper.polymorphic_identity
- )
+ versioned_cls,
+ table,
+ inherits=super_history_mapper,
+ polymorphic_on=polymorphic_on,
+ polymorphic_identity=local_mapper.polymorphic_identity,
+ properties=properties
+ )
cls.__history_mapper__ = m
if not super_history_mapper:
local_mapper.local_table.append_column(
Column('version', Integer, default=1, nullable=False)
)
- local_mapper.add_property("version", local_mapper.local_table.c.version)
+ local_mapper.add_property(
+ "version", local_mapper.local_table.c.version)
class Versioned(object):
@@ -126,6 +163,7 @@ def versioned_objects(iter):
if hasattr(obj, '__history_mapper__'):
yield obj
+
def create_version(obj, session, deleted=False):
obj_mapper = object_mapper(obj)
history_mapper = obj.__history_mapper__
@@ -137,7 +175,10 @@ def create_version(obj, session, deleted=False):
obj_changed = False
- for om, hm in zip(obj_mapper.iterate_to_root(), history_mapper.iterate_to_root()):
+ for om, hm in zip(
+ obj_mapper.iterate_to_root(),
+ history_mapper.iterate_to_root()
+ ):
if hm.single:
continue
@@ -157,11 +198,12 @@ def create_version(obj, session, deleted=False):
# in the case of single table inheritance, there may be
# columns on the mapped table intended for the subclass only.
# the "unmapped" status of the subclass column on the
- # base class is a feature of the declarative module as of sqla 0.5.2.
+ # base class is a feature of the declarative module.
continue
- # expired object attributes and also deferred cols might not be in the
- # dict. force it to load no matter what by using getattr().
+ # expired object attributes and also deferred cols might not
+ # be in the dict. force it to load no matter what by
+ # using getattr().
if prop.key not in obj_state.dict:
getattr(obj, prop.key)
@@ -182,8 +224,9 @@ def create_version(obj, session, deleted=False):
# check those too
for prop in obj_mapper.iterate_properties:
if isinstance(prop, RelationshipProperty) and \
- attributes.get_history(obj, prop.key,
- passive=attributes.PASSIVE_NO_INITIALIZE).has_changes():
+ attributes.get_history(
+ obj, prop.key,
+ passive=attributes.PASSIVE_NO_INITIALIZE).has_changes():
for p in prop.local_columns:
if p.foreign_keys:
obj_changed = True
@@ -201,6 +244,7 @@ def create_version(obj, session, deleted=False):
session.add(hist)
obj.version += 1
+
def versioned_session(session):
@event.listens_for(session, 'before_flush')
def before_flush(session, flush_context, instances):
diff --git a/examples/versioned_history/test_versioning.py b/examples/versioned_history/test_versioning.py
index 874223d62..dde73a5ae 100644
--- a/examples/versioned_history/test_versioning.py
+++ b/examples/versioned_history/test_versioning.py
@@ -1,20 +1,28 @@
-"""Unit tests illustrating usage of the ``history_meta.py`` module functions."""
+"""Unit tests illustrating usage of the ``history_meta.py``
+module functions."""
from unittest import TestCase
from sqlalchemy.ext.declarative import declarative_base
from .history_meta import Versioned, versioned_session
-from sqlalchemy import create_engine, Column, Integer, String, ForeignKey, Boolean
-from sqlalchemy.orm import clear_mappers, Session, deferred, relationship
+from sqlalchemy import create_engine, Column, Integer, String, \
+ ForeignKey, Boolean, select
+from sqlalchemy.orm import clear_mappers, Session, deferred, relationship, \
+ column_property
from sqlalchemy.testing import AssertsCompiledSQL, eq_, assert_raises
from sqlalchemy.testing.entities import ComparableEntity
from sqlalchemy.orm import exc as orm_exc
+import warnings
+
+warnings.simplefilter("error")
engine = None
+
def setup_module():
global engine
engine = create_engine('sqlite://', echo=True)
+
class TestVersioning(TestCase, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -52,14 +60,16 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
SomeClassHistory = SomeClass.__history_mapper__.class_
eq_(
- sess.query(SomeClassHistory).filter(SomeClassHistory.version == 1).all(),
+ sess.query(SomeClassHistory).filter(
+ SomeClassHistory.version == 1).all(),
[SomeClassHistory(version=1, name='sc1')]
)
sc.name = 'sc1modified2'
eq_(
- sess.query(SomeClassHistory).order_by(SomeClassHistory.version).all(),
+ sess.query(SomeClassHistory).order_by(
+ SomeClassHistory.version).all(),
[
SomeClassHistory(version=1, name='sc1'),
SomeClassHistory(version=2, name='sc1modified')
@@ -76,7 +86,8 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
sess.commit()
eq_(
- sess.query(SomeClassHistory).order_by(SomeClassHistory.version).all(),
+ sess.query(SomeClassHistory).order_by(
+ SomeClassHistory.version).all(),
[
SomeClassHistory(version=1, name='sc1'),
SomeClassHistory(version=2, name='sc1modified')
@@ -87,7 +98,8 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
sess.commit()
eq_(
- sess.query(SomeClassHistory).order_by(SomeClassHistory.version).all(),
+ sess.query(SomeClassHistory).order_by(
+ SomeClassHistory.version).all(),
[
SomeClassHistory(version=1, name='sc1'),
SomeClassHistory(version=2, name='sc1modified'),
@@ -164,13 +176,13 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
SomeClassHistory = SomeClass.__history_mapper__.class_
eq_(
- sess.query(SomeClassHistory.boole).order_by(SomeClassHistory.id).all(),
+ sess.query(SomeClassHistory.boole).order_by(
+ SomeClassHistory.id).all(),
[(True, ), (None, )]
)
eq_(sc.version, 3)
-
def test_deferred(self):
"""test versioning of unloaded, deferred columns."""
@@ -199,11 +211,11 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
SomeClassHistory = SomeClass.__history_mapper__.class_
eq_(
- sess.query(SomeClassHistory).filter(SomeClassHistory.version == 1).all(),
+ sess.query(SomeClassHistory).filter(
+ SomeClassHistory.version == 1).all(),
[SomeClassHistory(version=1, name='sc1', data='somedata')]
)
-
def test_joined_inheritance(self):
class BaseClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'basetable'
@@ -212,12 +224,17 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
name = Column(String(50))
type = Column(String(20))
- __mapper_args__ = {'polymorphic_on': type, 'polymorphic_identity': 'base'}
+ __mapper_args__ = {
+ 'polymorphic_on': type,
+ 'polymorphic_identity': 'base'}
class SubClassSeparatePk(BaseClass):
__tablename__ = 'subtable1'
- id = Column(Integer, primary_key=True)
+ id = column_property(
+ Column(Integer, primary_key=True),
+ BaseClass.id
+ )
base_id = Column(Integer, ForeignKey('basetable.id'))
subdata1 = Column(String(50))
@@ -226,7 +243,8 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
class SubClassSamePk(BaseClass):
__tablename__ = 'subtable2'
- id = Column(Integer, ForeignKey('basetable.id'), primary_key=True)
+ id = Column(
+ Integer, ForeignKey('basetable.id'), primary_key=True)
subdata2 = Column(String(50))
__mapper_args__ = {'polymorphic_identity': 'same'}
@@ -246,38 +264,50 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
sess.commit()
BaseClassHistory = BaseClass.__history_mapper__.class_
- SubClassSeparatePkHistory = SubClassSeparatePk.__history_mapper__.class_
+ SubClassSeparatePkHistory = \
+ SubClassSeparatePk.__history_mapper__.class_
SubClassSamePkHistory = SubClassSamePk.__history_mapper__.class_
eq_(
sess.query(BaseClassHistory).order_by(BaseClassHistory.id).all(),
[
- SubClassSeparatePkHistory(id=1, name='sep1', type='sep', version=1),
+ SubClassSeparatePkHistory(
+ id=1, name='sep1', type='sep', version=1),
BaseClassHistory(id=2, name='base1', type='base', version=1),
- SubClassSamePkHistory(id=3, name='same1', type='same', version=1)
+ SubClassSamePkHistory(
+ id=3, name='same1', type='same', version=1)
]
)
same1.subdata2 = 'same1subdatamod2'
eq_(
- sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(),
+ sess.query(BaseClassHistory).order_by(
+ BaseClassHistory.id, BaseClassHistory.version).all(),
[
- SubClassSeparatePkHistory(id=1, name='sep1', type='sep', version=1),
+ SubClassSeparatePkHistory(
+ id=1, name='sep1', type='sep', version=1),
BaseClassHistory(id=2, name='base1', type='base', version=1),
- SubClassSamePkHistory(id=3, name='same1', type='same', version=1),
- SubClassSamePkHistory(id=3, name='same1', type='same', version=2)
+ SubClassSamePkHistory(
+ id=3, name='same1', type='same', version=1),
+ SubClassSamePkHistory(
+ id=3, name='same1', type='same', version=2)
]
)
base1.name = 'base1mod2'
eq_(
- sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(),
+ sess.query(BaseClassHistory).order_by(
+ BaseClassHistory.id, BaseClassHistory.version).all(),
[
- SubClassSeparatePkHistory(id=1, name='sep1', type='sep', version=1),
+ SubClassSeparatePkHistory(
+ id=1, name='sep1', type='sep', version=1),
BaseClassHistory(id=2, name='base1', type='base', version=1),
- BaseClassHistory(id=2, name='base1mod', type='base', version=2),
- SubClassSamePkHistory(id=3, name='same1', type='same', version=1),
- SubClassSamePkHistory(id=3, name='same1', type='same', version=2)
+ BaseClassHistory(
+ id=2, name='base1mod', type='base', version=2),
+ SubClassSamePkHistory(
+ id=3, name='same1', type='same', version=1),
+ SubClassSamePkHistory(
+ id=3, name='same1', type='same', version=2)
]
)
@@ -289,13 +319,17 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
name = Column(String(50))
type = Column(String(20))
- __mapper_args__ = {'polymorphic_on': type,
- 'polymorphic_identity': 'base'}
+ __mapper_args__ = {
+ 'polymorphic_on': type,
+ 'polymorphic_identity': 'base'}
class SubClass(BaseClass):
__tablename__ = 'subtable'
- id = Column(Integer, primary_key=True)
+ id = column_property(
+ Column(Integer, primary_key=True),
+ BaseClass.id
+ )
base_id = Column(Integer, ForeignKey('basetable.id'))
subdata1 = Column(String(50))
@@ -316,12 +350,18 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
q = sess.query(SubSubHistory)
self.assert_compile(
q,
+
+
"SELECT "
"subsubtable_history.id AS subsubtable_history_id, "
"subtable_history.id AS subtable_history_id, "
"basetable_history.id AS basetable_history_id, "
+ "subsubtable_history.changed AS subsubtable_history_changed, "
+ "subtable_history.changed AS subtable_history_changed, "
+ "basetable_history.changed AS basetable_history_changed, "
+
"basetable_history.name AS basetable_history_name, "
"basetable_history.type AS basetable_history_type, "
@@ -330,9 +370,6 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
"subtable_history.version AS subtable_history_version, "
"basetable_history.version AS basetable_history_version, "
- "subsubtable_history.changed AS subsubtable_history_changed, "
- "subtable_history.changed AS subtable_history_changed, "
- "basetable_history.changed AS basetable_history_changed, "
"subtable_history.base_id AS subtable_history_base_id, "
"subtable_history.subdata1 AS subtable_history_subdata1, "
@@ -342,7 +379,8 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
"ON basetable_history.id = subtable_history.base_id "
"AND basetable_history.version = subtable_history.version "
"JOIN subsubtable_history ON subtable_history.id = "
- "subsubtable_history.id AND subtable_history.version = subsubtable_history.version"
+ "subsubtable_history.id AND subtable_history.version = "
+ "subsubtable_history.version"
)
ssc = SubSubClass(name='ss1', subdata1='sd1', subdata2='sd2')
@@ -360,10 +398,53 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
[SubSubHistory(name='ss1', subdata1='sd1',
subdata2='sd2', type='subsub', version=1)]
)
- eq_(ssc, SubSubClass(name='ss1', subdata1='sd11',
- subdata2='sd22', version=2))
+ eq_(ssc, SubSubClass(
+ name='ss1', subdata1='sd11',
+ subdata2='sd22', version=2))
+
+ def test_joined_inheritance_changed(self):
+ class BaseClass(Versioned, self.Base, ComparableEntity):
+ __tablename__ = 'basetable'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50))
+ type = Column(String(20))
+
+ __mapper_args__ = {
+ 'polymorphic_on': type,
+ 'polymorphic_identity': 'base'
+ }
+
+ class SubClass(BaseClass):
+ __tablename__ = 'subtable'
+
+ id = Column(Integer, ForeignKey('basetable.id'), primary_key=True)
+
+ __mapper_args__ = {'polymorphic_identity': 'sep'}
+
+ self.create_tables()
+
+ BaseClassHistory = BaseClass.__history_mapper__.class_
+ SubClassHistory = SubClass.__history_mapper__.class_
+ sess = self.session
+ s1 = SubClass(name='s1')
+ sess.add(s1)
+ sess.commit()
+
+ s1.name = 's2'
+ sess.commit()
+ actual_changed_base = sess.scalar(
+ select([BaseClass.__history_mapper__.local_table.c.changed]))
+ actual_changed_sub = sess.scalar(
+ select([SubClass.__history_mapper__.local_table.c.changed]))
+ h1 = sess.query(BaseClassHistory).first()
+ eq_(h1.changed, actual_changed_base)
+ eq_(h1.changed, actual_changed_sub)
+ h1 = sess.query(SubClassHistory).first()
+ eq_(h1.changed, actual_changed_base)
+ eq_(h1.changed, actual_changed_sub)
def test_single_inheritance(self):
class BaseClass(Versioned, self.Base, ComparableEntity):
@@ -372,8 +453,9 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
id = Column(Integer, primary_key=True)
name = Column(String(50))
type = Column(String(50))
- __mapper_args__ = {'polymorphic_on': type,
- 'polymorphic_identity': 'base'}
+ __mapper_args__ = {
+ 'polymorphic_on': type,
+ 'polymorphic_identity': 'base'}
class SubClass(BaseClass):
@@ -396,8 +478,8 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
SubClassHistory = SubClass.__history_mapper__.class_
eq_(
- sess.query(BaseClassHistory).order_by(BaseClassHistory.id,
- BaseClassHistory.version).all(),
+ sess.query(BaseClassHistory).order_by(
+ BaseClassHistory.id, BaseClassHistory.version).all(),
[BaseClassHistory(id=1, name='b1', type='base', version=1)]
)
@@ -405,11 +487,12 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
b1.name = 'b1modified2'
eq_(
- sess.query(BaseClassHistory).order_by(BaseClassHistory.id,
- BaseClassHistory.version).all(),
+ sess.query(BaseClassHistory).order_by(
+ BaseClassHistory.id, BaseClassHistory.version).all(),
[
BaseClassHistory(id=1, name='b1', type='base', version=1),
- BaseClassHistory(id=1, name='b1modified', type='base', version=2),
+ BaseClassHistory(
+ id=1, name='b1modified', type='base', version=2),
SubClassHistory(id=2, name='s1', type='sub', version=1)
]
)
@@ -475,14 +558,16 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
assert sc.version == 2
eq_(
- sess.query(SomeClassHistory).filter(SomeClassHistory.version == 1).all(),
+ sess.query(SomeClassHistory).filter(
+ SomeClassHistory.version == 1).all(),
[SomeClassHistory(version=1, name='sc1', related_id=None)]
)
sc.related = None
eq_(
- sess.query(SomeClassHistory).order_by(SomeClassHistory.version).all(),
+ sess.query(SomeClassHistory).order_by(
+ SomeClassHistory.version).all(),
[
SomeClassHistory(version=1, name='sc1', related_id=None),
SomeClassHistory(version=2, name='sc1', related_id=sr1.id)