summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2013-06-03 13:13:16 -0400
committerMike Bayer <mike_mp@zzzcomputing.com>2013-06-03 13:13:16 -0400
commit74c98bf182a1cac1ca1837da69e1c0550beaaab5 (patch)
tree4815d198d2aa4a6497330fb5d81e53bf4acfbb2d
parentff399ac75074916045410cedae72489cb60e8b50 (diff)
parentc2a158c137ee07a146f02e5ee89ec42e486c6a37 (diff)
downloadsqlalchemy-74c98bf182a1cac1ca1837da69e1c0550beaaab5.tar.gz
Merge branch 'master' into ticket_1068
-rw-r--r--.gitignore12
-rw-r--r--README.py3k50
-rw-r--r--doc/build/changelog/changelog_08.rst36
-rw-r--r--doc/build/changelog/changelog_09.rst39
-rw-r--r--doc/build/changelog/index.rst4
-rw-r--r--doc/build/changelog/migration_09.rst127
-rw-r--r--doc/build/conf.py6
-rw-r--r--doc/build/index.rst2
-rw-r--r--doc/build/orm/session.rst2
-rw-r--r--examples/adjacency_list/adjacency_list.py6
-rw-r--r--examples/association/basic_association.py6
-rw-r--r--examples/association/dict_of_sets_with_default.py4
-rw-r--r--examples/association/proxied_association.py10
-rw-r--r--examples/custom_attributes/listen_for_events.py2
-rw-r--r--examples/dogpile_caching/advanced.py34
-rw-r--r--examples/dogpile_caching/caching_query.py6
-rw-r--r--examples/dogpile_caching/environment.py6
-rw-r--r--examples/dogpile_caching/fixture_data.py6
-rw-r--r--examples/dogpile_caching/helloworld.py18
-rw-r--r--examples/dogpile_caching/local_session_caching.py6
-rw-r--r--examples/dogpile_caching/model.py4
-rw-r--r--examples/dogpile_caching/relation_caching.py10
-rw-r--r--examples/dynamic_dict/dynamic_dict.py176
-rw-r--r--examples/elementtree/adjacency_list.py40
-rw-r--r--examples/elementtree/optimized_al.py42
-rw-r--r--examples/generic_associations/discriminator_on_association.py4
-rw-r--r--examples/generic_associations/table_per_association.py2
-rw-r--r--examples/generic_associations/table_per_related.py4
-rw-r--r--examples/inheritance/concrete.py2
-rw-r--r--examples/inheritance/joined.py14
-rw-r--r--examples/inheritance/single.py10
-rw-r--r--examples/large_collection/large_collection.py16
-rw-r--r--examples/postgis/postgis.py2
-rw-r--r--examples/versioning/_lib.py2
-rw-r--r--examples/versioning/history_meta.py2
-rw-r--r--examples/versioning/test_versioning.py36
-rw-r--r--examples/vertical/dictlike-polymorphic.py66
-rw-r--r--examples/vertical/dictlike.py58
-rw-r--r--lib/sqlalchemy/__init__.py2
-rw-r--r--lib/sqlalchemy/connectors/mxodbc.py2
-rw-r--r--lib/sqlalchemy/connectors/pyodbc.py32
-rw-r--r--lib/sqlalchemy/dialects/firebird/base.py4
-rw-r--r--lib/sqlalchemy/dialects/informix/base.py5
-rw-r--r--lib/sqlalchemy/dialects/mssql/base.py21
-rw-r--r--lib/sqlalchemy/dialects/mssql/information_schema.py5
-rw-r--r--lib/sqlalchemy/dialects/mssql/pyodbc.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/base.py33
-rw-r--r--lib/sqlalchemy/dialects/mysql/cymysql.py10
-rw-r--r--lib/sqlalchemy/dialects/mysql/oursql.py19
-rw-r--r--lib/sqlalchemy/dialects/mysql/zxjdbc.py2
-rw-r--r--lib/sqlalchemy/dialects/oracle/base.py30
-rw-r--r--lib/sqlalchemy/dialects/oracle/cx_oracle.py87
-rw-r--r--lib/sqlalchemy/dialects/oracle/zxjdbc.py4
-rw-r--r--lib/sqlalchemy/dialects/postgresql/base.py58
-rw-r--r--lib/sqlalchemy/dialects/postgresql/hstore.py41
-rw-r--r--lib/sqlalchemy/dialects/postgresql/psycopg2.py35
-rw-r--r--lib/sqlalchemy/dialects/sqlite/base.py4
-rw-r--r--lib/sqlalchemy/dialects/sqlite/pysqlite.py6
-rw-r--r--lib/sqlalchemy/dialects/sybase/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/sybase/base.py36
-rw-r--r--lib/sqlalchemy/engine/base.py38
-rw-r--r--lib/sqlalchemy/engine/ddl.py4
-rw-r--r--lib/sqlalchemy/engine/default.py55
-rw-r--r--lib/sqlalchemy/engine/reflection.py21
-rw-r--r--lib/sqlalchemy/engine/result.py38
-rw-r--r--lib/sqlalchemy/engine/strategies.py20
-rw-r--r--lib/sqlalchemy/engine/url.py13
-rw-r--r--lib/sqlalchemy/event.py12
-rw-r--r--lib/sqlalchemy/exc.py2
-rw-r--r--lib/sqlalchemy/ext/associationproxy.py57
-rw-r--r--lib/sqlalchemy/ext/declarative/base.py4
-rw-r--r--lib/sqlalchemy/ext/declarative/clsregistry.py6
-rw-r--r--lib/sqlalchemy/ext/orderinglist.py8
-rw-r--r--lib/sqlalchemy/ext/serializer.py19
-rw-r--r--lib/sqlalchemy/orm/__init__.py2
-rw-r--r--lib/sqlalchemy/orm/attributes.py9
-rw-r--r--lib/sqlalchemy/orm/collections.py68
-rw-r--r--lib/sqlalchemy/orm/descriptor_props.py2
-rw-r--r--lib/sqlalchemy/orm/evaluator.py18
-rw-r--r--lib/sqlalchemy/orm/identity.py41
-rw-r--r--lib/sqlalchemy/orm/instrumentation.py32
-rw-r--r--lib/sqlalchemy/orm/interfaces.py9
-rw-r--r--lib/sqlalchemy/orm/loading.py12
-rw-r--r--lib/sqlalchemy/orm/mapper.py25
-rw-r--r--lib/sqlalchemy/orm/persistence.py24
-rw-r--r--lib/sqlalchemy/orm/properties.py4
-rw-r--r--lib/sqlalchemy/orm/query.py177
-rw-r--r--lib/sqlalchemy/orm/session.py12
-rw-r--r--lib/sqlalchemy/orm/strategies.py64
-rw-r--r--lib/sqlalchemy/orm/unitofwork.py4
-rw-r--r--lib/sqlalchemy/orm/util.py47
-rw-r--r--lib/sqlalchemy/pool.py12
-rw-r--r--lib/sqlalchemy/processors.py6
-rw-r--r--lib/sqlalchemy/schema.py88
-rw-r--r--lib/sqlalchemy/sql/__init__.py2
-rw-r--r--lib/sqlalchemy/sql/compiler.py37
-rw-r--r--lib/sqlalchemy/sql/expression.py106
-rw-r--r--lib/sqlalchemy/sql/functions.py3
-rw-r--r--lib/sqlalchemy/sql/operators.py17
-rw-r--r--lib/sqlalchemy/sql/util.py14
-rw-r--r--lib/sqlalchemy/sql/visitors.py12
-rw-r--r--lib/sqlalchemy/testing/__init__.py2
-rw-r--r--lib/sqlalchemy/testing/assertions.py31
-rw-r--r--lib/sqlalchemy/testing/assertsql.py10
-rw-r--r--lib/sqlalchemy/testing/engines.py39
-rw-r--r--lib/sqlalchemy/testing/entities.py4
-rw-r--r--lib/sqlalchemy/testing/exclusions.py10
-rw-r--r--lib/sqlalchemy/testing/fixtures.py16
-rw-r--r--lib/sqlalchemy/testing/plugin/noseplugin.py16
-rw-r--r--lib/sqlalchemy/testing/profiling.py10
-rw-r--r--lib/sqlalchemy/testing/schema.py4
-rw-r--r--lib/sqlalchemy/testing/suite/test_ddl.py2
-rw-r--r--lib/sqlalchemy/testing/suite/test_reflection.py4
-rw-r--r--lib/sqlalchemy/testing/suite/test_types.py21
-rw-r--r--lib/sqlalchemy/testing/util.py18
-rw-r--r--lib/sqlalchemy/testing/warnings.py2
-rw-r--r--lib/sqlalchemy/types.py107
-rw-r--r--lib/sqlalchemy/util/__init__.py7
-rw-r--r--lib/sqlalchemy/util/_collections.py87
-rw-r--r--lib/sqlalchemy/util/compat.py204
-rw-r--r--lib/sqlalchemy/util/deprecations.py2
-rw-r--r--lib/sqlalchemy/util/langhelpers.py157
-rw-r--r--sa2to3.py72
-rw-r--r--setup.cfg2
-rw-r--r--setup.py72
-rw-r--r--test/aaa_profiling/test_compiler.py2
-rw-r--r--test/aaa_profiling/test_memusage.py2
-rw-r--r--test/aaa_profiling/test_orm.py16
-rw-r--r--test/aaa_profiling/test_resultset.py7
-rw-r--r--test/aaa_profiling/test_zoomark.py104
-rw-r--r--test/aaa_profiling/test_zoomark_orm.py90
-rw-r--r--test/base/test_dependency.py6
-rw-r--r--test/base/test_events.py4
-rw-r--r--test/base/test_except.py31
-rw-r--r--test/base/test_utils.py159
-rw-r--r--test/dialect/test_firebird.py2
-rw-r--r--test/dialect/test_mssql.py36
-rw-r--r--test/dialect/test_mysql.py27
-rw-r--r--test/dialect/test_oracle.py41
-rw-r--r--test/dialect/test_postgresql.py96
-rw-r--r--test/dialect/test_sqlite.py33
-rw-r--r--test/dialect/test_sybase.py2
-rw-r--r--test/engine/test_bind.py6
-rw-r--r--test/engine/test_ddlemit.py2
-rw-r--r--test/engine/test_ddlevents.py2
-rw-r--r--test/engine/test_execute.py33
-rw-r--r--test/engine/test_parseconnect.py13
-rw-r--r--test/engine/test_pool.py26
-rw-r--r--test/engine/test_processors.py4
-rw-r--r--test/engine/test_reconnect.py29
-rw-r--r--test/engine/test_reflection.py20
-rw-r--r--test/engine/test_transaction.py20
-rw-r--r--test/ext/declarative/test_basic.py17
-rw-r--r--test/ext/declarative/test_clsregistry.py2
-rw-r--r--test/ext/declarative/test_inheritance.py2
-rw-r--r--test/ext/declarative/test_mixin.py24
-rw-r--r--test/ext/test_associationproxy.py32
-rw-r--r--test/ext/test_serializer.py23
-rw-r--r--test/orm/inheritance/test_assorted_poly.py16
-rw-r--r--test/orm/inheritance/test_basic.py10
-rw-r--r--test/orm/inheritance/test_concrete.py8
-rw-r--r--test/orm/inheritance/test_magazine.py8
-rw-r--r--test/orm/inheritance/test_manytomany.py16
-rw-r--r--test/orm/inheritance/test_polymorphic_rel.py256
-rw-r--r--test/orm/inheritance/test_productspec.py20
-rw-r--r--test/orm/inheritance/test_relationship.py2
-rw-r--r--test/orm/inheritance/test_with_poly.py22
-rw-r--r--test/orm/test_assorted_eager.py26
-rw-r--r--test/orm/test_attributes.py8
-rw-r--r--test/orm/test_collection.py18
-rw-r--r--test/orm/test_composites.py4
-rw-r--r--test/orm/test_deprecations.py2
-rw-r--r--test/orm/test_dynamic.py28
-rw-r--r--test/orm/test_eager_relations.py48
-rw-r--r--test/orm/test_evaluator.py17
-rw-r--r--test/orm/test_expire.py2
-rw-r--r--test/orm/test_froms.py364
-rw-r--r--test/orm/test_generative.py21
-rw-r--r--test/orm/test_inspect.py4
-rw-r--r--test/orm/test_instrumentation.py54
-rw-r--r--test/orm/test_joins.py36
-rw-r--r--test/orm/test_loading.py4
-rw-r--r--test/orm/test_mapper.py63
-rw-r--r--test/orm/test_merge.py6
-rw-r--r--test/orm/test_naturalpks.py8
-rw-r--r--test/orm/test_pickled.py12
-rw-r--r--test/orm/test_query.py83
-rw-r--r--test/orm/test_relationships.py26
-rw-r--r--test/orm/test_session.py12
-rw-r--r--test/orm/test_subquery_relations.py6
-rw-r--r--test/orm/test_unitofwork.py45
-rw-r--r--test/orm/test_unitofworkv2.py12
-rw-r--r--test/orm/test_update_delete.py24
-rw-r--r--test/orm/test_utils.py45
-rw-r--r--test/perf/README17
-rw-r--r--test/perf/insertspeed.py109
-rw-r--r--test/perf/large_flush.py84
-rw-r--r--test/perf/objselectspeed.py146
-rw-r--r--test/perf/objupdatespeed.py94
-rw-r--r--test/perf/orm2010.py18
-rw-r--r--test/perf/ormsession.py225
-rw-r--r--test/perf/sessions.py95
-rw-r--r--test/perf/stress_all.py226
-rw-r--r--test/perf/stresstest.py174
-rw-r--r--test/perf/threaded_compile.py75
-rw-r--r--test/profiles.txt338
-rw-r--r--test/requirements.py9
-rw-r--r--test/sql/test_compiler.py22
-rw-r--r--test/sql/test_defaults.py20
-rw-r--r--test/sql/test_functions.py4
-rw-r--r--test/sql/test_generative.py16
-rw-r--r--test/sql/test_metadata.py4
-rw-r--r--test/sql/test_query.py44
-rw-r--r--test/sql/test_quote.py10
-rw-r--r--test/sql/test_rowcount.py6
-rw-r--r--test/sql/test_selectable.py34
-rw-r--r--test/sql/test_types.py69
-rw-r--r--test/sql/test_unicode.py87
-rw-r--r--test/sql/test_update.py12
219 files changed, 3339 insertions, 4203 deletions
diff --git a/.gitignore b/.gitignore
index e04e53b46..69da39e02 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,12 +1,16 @@
*.pyc
-build/
-dist/
-docs/build/output/
-dogpile_data/
+/build/
+/dist/
+/doc/build/output/
+/dogpile_data/
*.orig
tox.ini
.venv
*.egg-info
.coverage
.*,cover
+*.class
*.so
+sqlnet.log
+/mapping_setup.py
+/test.py
diff --git a/README.py3k b/README.py3k
index 1cf9c3588..2afaeb688 100644
--- a/README.py3k
+++ b/README.py3k
@@ -2,51 +2,5 @@
PYTHON 3 SUPPORT
=================
-Current Python 3k support in SQLAlchemy is provided by a customized
-2to3 script which wraps Python's 2to3 tool.
-
-Installing Distribute
----------------------
-
-Distribute should be installed with the Python3 installation. The
-distribute bootloader is included.
-
-Running as a user with permission to modify the Python distribution,
-install Distribute:
-
- python3 distribute_setup.py
-
-
-Installing SQLAlchemy in Python 3
----------------------------------
-
-Once Distribute is installed, SQLAlchemy can be installed directly.
-The 2to3 process will kick in which takes several minutes:
-
- python3 setup.py install
-
-Converting Tests, Examples, Source to Python 3
-----------------------------------------------
-
-To convert all files in the source distribution, run
-SQLAlchemys "sa2to3.py" script, which monkeypatches a preprocessor
-onto the 2to3 tool:
-
- python3 sa2to3.py --no-diffs -w lib test examples
-
-The above will rewrite all files in-place in Python 3 format.
-
-Running Tests
--------------
-
-To run unit tests in Py3k, Nose 1.0 is required, or a development
-version of Nose that supports Python 3. The tests are run
-using ./sqla_nose.py as described in README.unittests.
-
-Current 3k Issues
------------------
-
-Current bugs and tickets related to Py3k are on the Py3k milestone in trac:
-
-http://www.sqlalchemy.org/trac/query?status=new&status=assigned&status=reopened&milestone=py3k
-
+As of SQLAlchemy 0.9, SQLAlchemy installs and runs with
+Python 3 directly, with no code changes.
diff --git a/doc/build/changelog/changelog_08.rst b/doc/build/changelog/changelog_08.rst
index fbc79b108..211668867 100644
--- a/doc/build/changelog/changelog_08.rst
+++ b/doc/build/changelog/changelog_08.rst
@@ -7,6 +7,42 @@
:version: 0.8.2
.. change::
+ :tags: feature, orm
+ :tickets: 2736
+
+ Added a new method :meth:`.Query.select_entity_from` which
+ will in 0.9 replace part of the functionality of
+ :meth:`.Query.select_from`. In 0.8, the two methods perform
+ the same function, so that code can be migrated to use the
+ :meth:`.Query.select_entity_from` method as appropriate.
+ See the 0.9 migration guide for details.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2737
+
+ Fixed a regression caused by :ticket:`2682` whereby the
+ evaluation invoked by :meth:`.Query.update` and :meth:`.Query.delete`
+ would hit upon unsupported ``True`` and ``False`` symbols
+ which now appear due to the usage of ``IS``.
+
+ .. change::
+ :tags: bug, postgresql
+ :tickets: 2735
+
+ Fixed the HSTORE type to correctly encode/decode for unicode.
+ This is always on, as the hstore is a textual type, and
+ matches the behavior of psycopg2 when using Python 3.
+ Courtesy Dmitry Mugtasimov.
+
+ .. change::
+ :tags: bug, examples
+
+ Fixed a small bug in the dogpile example where the generation
+ of SQL cache keys wasn't applying deduping labels to the
+ statement the same way :class:`.Query` normally does.
+
+ .. change::
:tags: bug, engine, sybase
:tickets: 2732
diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst
new file mode 100644
index 000000000..baddf7e2a
--- /dev/null
+++ b/doc/build/changelog/changelog_09.rst
@@ -0,0 +1,39 @@
+
+==============
+0.9 Changelog
+==============
+
+.. changelog::
+ :version: 0.9.0
+
+ .. change::
+ :tags: feature, general
+ :tickets: 2671
+
+ The codebase is now "in-place" for Python
+ 2 and 3, the need to run 2to3 has been removed.
+ Compatibility is now against Python 2.6 on forward.
+
+ .. change::
+ :tags: feature, oracle, py3k
+
+ The Oracle unit tests with cx_oracle now pass
+ fully under Python 3.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2736
+
+ The "auto-aliasing" behavior of the :class:`.Query.select_from`
+ method has been turned off. The specific behavior is now
+ availble via a new method :class:`.Query.select_entity_from`.
+ The auto-aliasing behavior here was never well documented and
+ is generally not what's desired, as :class:`.Query.select_from`
+ has become more oriented towards controlling how a JOIN is
+ rendered. :class:`.Query.select_entity_from` will also be made
+ available in 0.8 so that applications which rely on the auto-aliasing
+ can shift their applications to use this method.
+
+ .. seealso::
+
+ :ref:`migration_2736` \ No newline at end of file
diff --git a/doc/build/changelog/index.rst b/doc/build/changelog/index.rst
index e17542c53..f3542d542 100644
--- a/doc/build/changelog/index.rst
+++ b/doc/build/changelog/index.rst
@@ -12,7 +12,7 @@ Current Migration Guide
.. toctree::
:maxdepth: 1
- migration_08
+ migration_09
Change logs
-----------
@@ -20,6 +20,7 @@ Change logs
.. toctree::
:maxdepth: 2
+ changelog_09
changelog_08
changelog_07
changelog_06
@@ -36,6 +37,7 @@ Older Migration Guides
.. toctree::
:maxdepth: 1
+ migration_08
migration_07
migration_06
migration_05
diff --git a/doc/build/changelog/migration_09.rst b/doc/build/changelog/migration_09.rst
new file mode 100644
index 000000000..969bfb624
--- /dev/null
+++ b/doc/build/changelog/migration_09.rst
@@ -0,0 +1,127 @@
+==============================
+What's New in SQLAlchemy 0.9?
+==============================
+
+.. admonition:: About this Document
+
+ This document describes changes between SQLAlchemy version 0.8,
+ undergoing maintenance releases as of May, 2013,
+ and SQLAlchemy version 0.9, which is expected for release
+ in late 2013.
+
+ Document date: May 29, 2013
+
+Introduction
+============
+
+This guide introduces what's new in SQLAlchemy version 0.9,
+and also documents changes which affect users migrating
+their applications from the 0.8 series of SQLAlchemy to 0.9.
+
+Version 0.9 is a faster-than-usual push from version 0.8,
+featuring a more versatile codebase with regards to modern
+Python versions. See :ref:`behavioral_changes_09` for
+potentially backwards-incompatible changes.
+
+Platform Support
+================
+
+Targeting Python 2.6 and Up Now, Python 3 without 2to3
+-------------------------------------------------------
+
+The first achievement of the 0.9 release is to remove the dependency
+on the 2to3 tool for Python 3 compatibility. To make this
+more straightforward, the lowest Python release targeted now
+is 2.6, which features a wide degree of cross-compatibility with
+Python 3. All SQLAlchemy modules and unit tests are now interpreted
+equally well with any Python interpreter from 2.6 forward, including
+the 3.1 and 3.2 interpreters.
+
+At the moment, the C extensions are still not fully ported to
+Python 3.
+
+
+.. _behavioral_changes_09:
+
+Behavioral Changes
+==================
+
+.. _migration_2736:
+
+:meth:`.Query.select_from` no longer applies the clause to corresponding entities
+---------------------------------------------------------------------------------
+
+The :meth:`.Query.select_from` method has been popularized in recent versions
+as a means of controlling the first thing that a :class:`.Query` object
+"selects from", typically for the purposes of controlling how a JOIN will
+render.
+
+Consider the following example against the usual ``User`` mapping::
+
+ select_stmt = select([User]).where(User.id == 7).alias()
+
+ q = session.query(User).\
+ join(select_stmt, User.id == select_stmt.c.id).\
+ filter(User.name == 'ed')
+
+The above statement predictably renders SQL like the following::
+
+ SELECT "user".id AS user_id, "user".name AS user_name
+ FROM "user" JOIN (SELECT "user".id AS id, "user".name AS name
+ FROM "user"
+ WHERE "user".id = :id_1) AS anon_1 ON "user".id = anon_1.id
+ WHERE "user".name = :name_1
+
+If we wanted to reverse the order of the left and right elements of the
+JOIN, the documentation would lead us to believe we could use
+:meth:`.Query.select_from` to do so::
+
+ q = session.query(User).\
+ select_from(select_stmt).\
+ join(User, User.id == select_stmt.c.id).\
+ filter(User.name == 'ed')
+
+However, in version 0.8 and earlier, the above use of :meth:`.Query.select_from`
+would apply the ``select_stmt`` to **replace** the ``User`` entity, as it
+selects from the ``user`` table which is compatible with ``User``::
+
+ -- SQLAlchemy 0.8 and earlier...
+ SELECT anon_1.id AS anon_1_id, anon_1.name AS anon_1_name
+ FROM (SELECT "user".id AS id, "user".name AS name
+ FROM "user"
+ WHERE "user".id = :id_1) AS anon_1 JOIN "user" ON anon_1.id = anon_1.id
+ WHERE anon_1.name = :name_1
+
+The above statement is a mess, the ON clause refers ``anon_1.id = anon_1.id``,
+our WHERE clause has been replaced with ``anon_1`` as well.
+
+This behavior is quite intentional, but has a different use case from that
+which has become popular for :meth:`.Query.select_from`. The above behavior
+is now available by a new method known as :meth:`.Query.select_entity_from`.
+This is a lesser used behavior that in modern SQLAlchemy is roughly equivalent
+to selecting from a customized :func:`.aliased` construct::
+
+ select_stmt = select([User]).where(User.id == 7)
+ user_from_stmt = aliased(User, select_stmt.alias())
+
+ q = session.query(user_from_stmt).filter(user_from_stmt.name == 'ed')
+
+So with SQLAlchemy 0.9, our query that selects from ``select_stmt`` produces
+the SQL we expect::
+
+ -- SQLAlchemy 0.9
+ SELECT "user".id AS user_id, "user".name AS user_name
+ FROM (SELECT "user".id AS id, "user".name AS name
+ FROM "user"
+ WHERE "user".id = :id_1) AS anon_1 JOIN "user" ON "user".id = id
+ WHERE "user".name = :name_1
+
+The :meth:`.Query.select_entity_from` method will be available in SQLAlchemy
+**0.8.2**, so applications which rely on the old behavior can transition
+to this method first, ensure all tests continue to function, then upgrade
+to 0.9 without issue.
+
+:ticket:`2736`
+
+
+
diff --git a/doc/build/conf.py b/doc/build/conf.py
index 359f7c05e..e7c116c18 100644
--- a/doc/build/conf.py
+++ b/doc/build/conf.py
@@ -81,11 +81,11 @@ copyright = u'2007-2013, the SQLAlchemy authors and contributors'
# built documents.
#
# The short X.Y version.
-version = "0.8"
+version = "0.9"
# The full version, including alpha/beta/rc tags.
-release = "0.8.1"
+release = "0.9.0"
-release_date = "April 27, 2013"
+release_date = "(not released)"
site_base = "http://www.sqlalchemy.org"
diff --git a/doc/build/index.rst b/doc/build/index.rst
index cacbf570c..c8ccc430c 100644
--- a/doc/build/index.rst
+++ b/doc/build/index.rst
@@ -11,7 +11,7 @@ A high level view and getting set up.
:ref:`Overview <overview>` |
:ref:`Installation Guide <installation>` |
-:doc:`Migration from 0.7 <changelog/migration_08>` |
+:doc:`Migration from 0.8 <changelog/migration_09>` |
:doc:`Glossary <glossary>` |
:doc:`Changelog catalog <changelog/index>`
diff --git a/doc/build/orm/session.rst b/doc/build/orm/session.rst
index 6774af2d9..3e35f02cb 100644
--- a/doc/build/orm/session.rst
+++ b/doc/build/orm/session.rst
@@ -1946,7 +1946,7 @@ Session Utilites
.. autofunction:: object_session
-.. autofunction:: was_deleted
+.. autofunction:: sqlalchemy.orm.util.was_deleted
Attribute and State Management Utilities
-----------------------------------------
diff --git a/examples/adjacency_list/adjacency_list.py b/examples/adjacency_list/adjacency_list.py
index 1020cc57d..a0683ea0c 100644
--- a/examples/adjacency_list/adjacency_list.py
+++ b/examples/adjacency_list/adjacency_list.py
@@ -55,9 +55,9 @@ if __name__ == '__main__':
def msg(msg, *args):
msg = msg % args
- print "\n\n\n" + "-" * len(msg.split("\n")[0])
- print msg
- print "-" * len(msg.split("\n")[0])
+ print("\n\n\n" + "-" * len(msg.split("\n")[0]))
+ print(msg)
+ print("-" * len(msg.split("\n")[0]))
msg("Creating Tree Table:")
diff --git a/examples/association/basic_association.py b/examples/association/basic_association.py
index 29a473fce..a175b1b89 100644
--- a/examples/association/basic_association.py
+++ b/examples/association/basic_association.py
@@ -83,12 +83,12 @@ if __name__ == '__main__':
# query the order, print items
order = session.query(Order).filter_by(customer_name='john smith').one()
- print [(order_item.item.description, order_item.price)
- for order_item in order.order_items]
+ print([(order_item.item.description, order_item.price)
+ for order_item in order.order_items])
# print customers who bought 'MySQL Crowbar' on sale
q = session.query(Order).join('order_items', 'item')
q = q.filter(and_(Item.description == 'MySQL Crowbar',
Item.price > OrderItem.price))
- print [order.customer_name for order in q]
+ print([order.customer_name for order in q])
diff --git a/examples/association/dict_of_sets_with_default.py b/examples/association/dict_of_sets_with_default.py
index 9a43e300c..f541727e7 100644
--- a/examples/association/dict_of_sets_with_default.py
+++ b/examples/association/dict_of_sets_with_default.py
@@ -75,13 +75,13 @@ if __name__ == '__main__':
session.commit()
a1 = session.query(A).first()
- print a1.collections["1"]
+ print(a1.collections["1"])
a1.collections["1"].add(4)
session.commit()
a1.collections["2"].update([7, 8, 9])
session.commit()
- print a1.collections["2"]
+ print(a1.collections["2"])
diff --git a/examples/association/proxied_association.py b/examples/association/proxied_association.py
index 7f4d611a7..4cf1c51be 100644
--- a/examples/association/proxied_association.py
+++ b/examples/association/proxied_association.py
@@ -86,16 +86,16 @@ if __name__ == '__main__':
order = session.query(Order).filter_by(customer_name='john smith').one()
# print items based on the OrderItem collection directly
- print [(assoc.item.description, assoc.price, assoc.item.price)
- for assoc in order.order_items]
+ print([(assoc.item.description, assoc.price, assoc.item.price)
+ for assoc in order.order_items])
# print items based on the "proxied" items collection
- print [(item.description, item.price)
- for item in order.items]
+ print([(item.description, item.price)
+ for item in order.items])
# print customers who bought 'MySQL Crowbar' on sale
orders = session.query(Order).\
join('order_items', 'item').\
filter(Item.description == 'MySQL Crowbar').\
filter(Item.price > OrderItem.price)
- print [o.customer_name for o in orders]
+ print([o.customer_name for o in orders])
diff --git a/examples/custom_attributes/listen_for_events.py b/examples/custom_attributes/listen_for_events.py
index 4cdf4b056..82bc860fa 100644
--- a/examples/custom_attributes/listen_for_events.py
+++ b/examples/custom_attributes/listen_for_events.py
@@ -34,7 +34,7 @@ if __name__ == '__main__':
if oldvalue:
s += "which replaced the value '%s', " % oldvalue
s += "on object %s" % self
- print s
+ print(s)
Base = declarative_base(cls=Base)
diff --git a/examples/dogpile_caching/advanced.py b/examples/dogpile_caching/advanced.py
index 6bfacfcf0..f1a18a4d7 100644
--- a/examples/dogpile_caching/advanced.py
+++ b/examples/dogpile_caching/advanced.py
@@ -6,9 +6,9 @@ and collection caching.
"""
-from environment import Session
-from model import Person, Address, cache_address_bits
-from caching_query import FromCache, RelationshipCache
+from .environment import Session
+from .model import Person, Address, cache_address_bits
+from .caching_query import FromCache, RelationshipCache
from sqlalchemy.orm import joinedload
def load_name_range(start, end, invalidate=False):
@@ -49,31 +49,31 @@ def load_name_range(start, end, invalidate=False):
return q.all()
-print "two through twelve, possibly from cache:\n"
-print ", ".join([p.name for p in load_name_range(2, 12)])
+print("two through twelve, possibly from cache:\n")
+print(", ".join([p.name for p in load_name_range(2, 12)]))
-print "\ntwenty five through forty, possibly from cache:\n"
-print ", ".join([p.name for p in load_name_range(25, 40)])
+print("\ntwenty five through forty, possibly from cache:\n")
+print(", ".join([p.name for p in load_name_range(25, 40)]))
# loading them again, no SQL is emitted
-print "\ntwo through twelve, from the cache:\n"
-print ", ".join([p.name for p in load_name_range(2, 12)])
+print("\ntwo through twelve, from the cache:\n")
+print(", ".join([p.name for p in load_name_range(2, 12)]))
# but with invalidate, they are
-print "\ntwenty five through forty, invalidate first:\n"
-print ", ".join([p.name for p in load_name_range(25, 40, True)])
+print("\ntwenty five through forty, invalidate first:\n")
+print(", ".join([p.name for p in load_name_range(25, 40, True)]))
# illustrate the address loading from either cache/already
# on the Person
-print "\n\nPeople plus addresses, two through twelve, addresses possibly from cache"
+print("\n\nPeople plus addresses, two through twelve, addresses possibly from cache")
for p in load_name_range(2, 12):
- print p.format_full()
+ print(p.format_full())
# illustrate the address loading from either cache/already
# on the Person
-print "\n\nPeople plus addresses, two through twelve, addresses from cache"
+print("\n\nPeople plus addresses, two through twelve, addresses from cache")
for p in load_name_range(2, 12):
- print p.format_full()
+ print(p.format_full())
-print "\n\nIf this was the first run of advanced.py, try "\
- "a second run. Only one SQL statement will be emitted."
+print("\n\nIf this was the first run of advanced.py, try "\
+ "a second run. Only one SQL statement will be emitted.")
diff --git a/examples/dogpile_caching/caching_query.py b/examples/dogpile_caching/caching_query.py
index f4724fb0b..7fe84bede 100644
--- a/examples/dogpile_caching/caching_query.py
+++ b/examples/dogpile_caching/caching_query.py
@@ -136,15 +136,15 @@ def _key_from_query(query, qualifier=None):
"""
- stmt = query.statement
+ stmt = query.with_labels().statement
compiled = stmt.compile()
params = compiled.params
# here we return the key as a long string. our "key mangler"
# set up with the region will boil it down to an md5.
return " ".join(
- [unicode(compiled)] +
- [unicode(params[k]) for k in sorted(params)])
+ [str(compiled)] +
+ [str(params[k]) for k in sorted(params)])
class FromCache(MapperOption):
"""Specifies that a Query should load results from a cache."""
diff --git a/examples/dogpile_caching/environment.py b/examples/dogpile_caching/environment.py
index f210d26ac..36b9585b2 100644
--- a/examples/dogpile_caching/environment.py
+++ b/examples/dogpile_caching/environment.py
@@ -4,7 +4,7 @@ Establish data / cache file paths, and configurations,
bootstrap fixture data if necessary.
"""
-import caching_query
+from . import caching_query
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
@@ -31,7 +31,7 @@ Base = declarative_base()
root = "./dogpile_data/"
if not os.path.exists(root):
- raw_input("Will create datafiles in %r.\n"
+ input("Will create datafiles in %r.\n"
"To reset the cache + database, delete this directory.\n"
"Press enter to continue.\n" % root
)
@@ -77,7 +77,7 @@ installed = False
def bootstrap():
global installed
- import fixture_data
+ from . import fixture_data
if not os.path.exists(dbfile):
fixture_data.install()
installed = True \ No newline at end of file
diff --git a/examples/dogpile_caching/fixture_data.py b/examples/dogpile_caching/fixture_data.py
index 1db75ea05..f93f32fd9 100644
--- a/examples/dogpile_caching/fixture_data.py
+++ b/examples/dogpile_caching/fixture_data.py
@@ -5,8 +5,8 @@ Canadian cities. Then, 100 Person records are installed, each with a
randomly selected postal code.
"""
-from environment import Session, Base
-from model import City, Country, PostalCode, Person, Address
+from .environment import Session, Base
+from .model import City, Country, PostalCode, Person, Address
import random
def install():
@@ -35,7 +35,7 @@ def install():
Session.add_all(pc)
all_post_codes.extend(pc)
- for i in xrange(1, 51):
+ for i in range(1, 51):
person = Person(
"person %.2d" % i,
Address(
diff --git a/examples/dogpile_caching/helloworld.py b/examples/dogpile_caching/helloworld.py
index e2e4d4f78..4561097b6 100644
--- a/examples/dogpile_caching/helloworld.py
+++ b/examples/dogpile_caching/helloworld.py
@@ -4,12 +4,12 @@ Illustrate how to load some data, and cache the results.
"""
-from environment import Session
-from model import Person
-from caching_query import FromCache
+from .environment import Session
+from .model import Person
+from .caching_query import FromCache
# load Person objects. cache the result under the namespace "all_people".
-print "loading people...."
+print("loading people....")
people = Session.query(Person).options(FromCache("default")).all()
# remove the Session. next query starts from scratch.
@@ -17,12 +17,12 @@ Session.remove()
# load again, using the same FromCache option. now they're cached
# under "all_people", no SQL is emitted.
-print "loading people....again!"
+print("loading people....again!")
people = Session.query(Person).options(FromCache("default")).all()
# want to load on some different kind of query ? change the namespace
# you send to FromCache
-print "loading people two through twelve"
+print("loading people two through twelve")
people_two_through_twelve = Session.query(Person).\
options(FromCache("default")).\
filter(Person.name.between("person 02", "person 12")).\
@@ -32,7 +32,7 @@ people_two_through_twelve = Session.query(Person).\
# the bind parameters of the query. So this query, having
# different literal parameters under "Person.name.between()" than the
# previous one, issues new SQL...
-print "loading people five through fifteen"
+print("loading people five through fifteen")
people_five_through_fifteen = Session.query(Person).\
options(FromCache("default")).\
filter(Person.name.between("person 05", "person 15")).\
@@ -40,7 +40,7 @@ people_five_through_fifteen = Session.query(Person).\
# ... but using the same params as are already cached, no SQL
-print "loading people two through twelve...again!"
+print("loading people two through twelve...again!")
people_two_through_twelve = Session.query(Person).\
options(FromCache("default")).\
filter(Person.name.between("person 02", "person 12")).\
@@ -51,7 +51,7 @@ people_two_through_twelve = Session.query(Person).\
# each Query, which includes at the very least the same FromCache,
# same list of objects to be loaded, and the same parameters in the
# same order, then call invalidate().
-print "invalidating everything"
+print("invalidating everything")
Session.query(Person).options(FromCache("default")).invalidate()
Session.query(Person).\
options(FromCache("default")).\
diff --git a/examples/dogpile_caching/local_session_caching.py b/examples/dogpile_caching/local_session_caching.py
index 383b31c11..cf0083d2e 100644
--- a/examples/dogpile_caching/local_session_caching.py
+++ b/examples/dogpile_caching/local_session_caching.py
@@ -53,8 +53,8 @@ register_backend("sqlalchemy.session", __name__, "ScopedSessionBackend")
if __name__ == '__main__':
- from environment import Session, regions
- from caching_query import FromCache
+ from .environment import Session, regions
+ from .caching_query import FromCache
from dogpile.cache import make_region
# set up a region based on the ScopedSessionBackend,
@@ -67,7 +67,7 @@ if __name__ == '__main__':
}
)
- from model import Person
+ from .model import Person
# query to load Person by name, with criterion
# of "person 10"
diff --git a/examples/dogpile_caching/model.py b/examples/dogpile_caching/model.py
index 6f1cffedf..622d31e6a 100644
--- a/examples/dogpile_caching/model.py
+++ b/examples/dogpile_caching/model.py
@@ -10,8 +10,8 @@ City --(has a)--> Country
"""
from sqlalchemy import Column, Integer, String, ForeignKey
from sqlalchemy.orm import relationship
-from caching_query import FromCache, RelationshipCache
-from environment import Base, bootstrap
+from .caching_query import FromCache, RelationshipCache
+from .environment import Base, bootstrap
class Country(Base):
__tablename__ = 'country'
diff --git a/examples/dogpile_caching/relation_caching.py b/examples/dogpile_caching/relation_caching.py
index 7a5779620..d40752e48 100644
--- a/examples/dogpile_caching/relation_caching.py
+++ b/examples/dogpile_caching/relation_caching.py
@@ -5,16 +5,16 @@ related PostalCode, City, Country objects should be pulled from long
term cache.
"""
-from environment import Session, root
-from model import Person, cache_address_bits
+from .environment import Session, root
+from .model import Person, cache_address_bits
from sqlalchemy.orm import joinedload
import os
for p in Session.query(Person).options(joinedload(Person.addresses), cache_address_bits):
- print p.format_full()
+ print(p.format_full())
-print "\n\nIf this was the first run of relationship_caching.py, SQL was likely emitted to "\
+print("\n\nIf this was the first run of relationship_caching.py, SQL was likely emitted to "\
"load postal codes, cities, countries.\n"\
"If run a second time, assuming the cache is still valid, "\
"only a single SQL statement will run - all "\
@@ -22,4 +22,4 @@ print "\n\nIf this was the first run of relationship_caching.py, SQL was likely
"To clear the cache, delete the file %r. \n"\
"This will cause a re-load of cities, postal codes and countries on "\
"the next run.\n"\
- % os.path.join(root, 'cache.dbm')
+ % os.path.join(root, 'cache.dbm'))
diff --git a/examples/dynamic_dict/dynamic_dict.py b/examples/dynamic_dict/dynamic_dict.py
index ec7c8e918..530674f2e 100644
--- a/examples/dynamic_dict/dynamic_dict.py
+++ b/examples/dynamic_dict/dynamic_dict.py
@@ -1,88 +1,88 @@
-class ProxyDict(object):
- def __init__(self, parent, collection_name, childclass, keyname):
- self.parent = parent
- self.collection_name = collection_name
- self.childclass = childclass
- self.keyname = keyname
-
- @property
- def collection(self):
- return getattr(self.parent, self.collection_name)
-
- def keys(self):
- descriptor = getattr(self.childclass, self.keyname)
- return [x[0] for x in self.collection.values(descriptor)]
-
- def __getitem__(self, key):
- x = self.collection.filter_by(**{self.keyname:key}).first()
- if x:
- return x
- else:
- raise KeyError(key)
-
- def __setitem__(self, key, value):
- try:
- existing = self[key]
- self.collection.remove(existing)
- except KeyError:
- pass
- self.collection.append(value)
-
-from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy import create_engine, Column, Integer, String, ForeignKey
-from sqlalchemy.orm import sessionmaker, relationship
-
-engine = create_engine('sqlite://', echo=True)
-Base = declarative_base(engine)
-
-class Parent(Base):
- __tablename__ = 'parent'
- id = Column(Integer, primary_key=True)
- name = Column(String(50))
- _collection = relationship("Child", lazy="dynamic",
- cascade="all, delete-orphan")
-
- @property
- def child_map(self):
- return ProxyDict(self, '_collection', Child, 'key')
-
-class Child(Base):
- __tablename__ = 'child'
- id = Column(Integer, primary_key=True)
- key = Column(String(50))
- parent_id = Column(Integer, ForeignKey('parent.id'))
-
- def __repr__(self):
- return "Child(key=%r)" % self.key
-
-Base.metadata.create_all()
-
-sess = sessionmaker()()
-
-p1 = Parent(name='p1')
-sess.add(p1)
-
-print "\n---------begin setting nodes, autoflush occurs\n"
-p1.child_map['k1'] = Child(key='k1')
-p1.child_map['k2'] = Child(key='k2')
-
-# this will autoflush the current map.
-# ['k1', 'k2']
-print "\n---------print keys - flushes first\n"
-print p1.child_map.keys()
-
-# k1
-print "\n---------print 'k1' node\n"
-print p1.child_map['k1']
-
-print "\n---------update 'k2' node - must find existing, and replace\n"
-p1.child_map['k2'] = Child(key='k2')
-
-print "\n---------print 'k2' key - flushes first\n"
-# k2
-print p1.child_map['k2']
-
-print "\n---------print all child nodes\n"
-# [k1, k2b]
-print sess.query(Child).all()
-
+class ProxyDict(object):
+ def __init__(self, parent, collection_name, childclass, keyname):
+ self.parent = parent
+ self.collection_name = collection_name
+ self.childclass = childclass
+ self.keyname = keyname
+
+ @property
+ def collection(self):
+ return getattr(self.parent, self.collection_name)
+
+ def keys(self):
+ descriptor = getattr(self.childclass, self.keyname)
+ return [x[0] for x in self.collection.values(descriptor)]
+
+ def __getitem__(self, key):
+ x = self.collection.filter_by(**{self.keyname:key}).first()
+ if x:
+ return x
+ else:
+ raise KeyError(key)
+
+ def __setitem__(self, key, value):
+ try:
+ existing = self[key]
+ self.collection.remove(existing)
+ except KeyError:
+ pass
+ self.collection.append(value)
+
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy import create_engine, Column, Integer, String, ForeignKey
+from sqlalchemy.orm import sessionmaker, relationship
+
+engine = create_engine('sqlite://', echo=True)
+Base = declarative_base(engine)
+
+class Parent(Base):
+ __tablename__ = 'parent'
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50))
+ _collection = relationship("Child", lazy="dynamic",
+ cascade="all, delete-orphan")
+
+ @property
+ def child_map(self):
+ return ProxyDict(self, '_collection', Child, 'key')
+
+class Child(Base):
+ __tablename__ = 'child'
+ id = Column(Integer, primary_key=True)
+ key = Column(String(50))
+ parent_id = Column(Integer, ForeignKey('parent.id'))
+
+ def __repr__(self):
+ return "Child(key=%r)" % self.key
+
+Base.metadata.create_all()
+
+sess = sessionmaker()()
+
+p1 = Parent(name='p1')
+sess.add(p1)
+
+print("\n---------begin setting nodes, autoflush occurs\n")
+p1.child_map['k1'] = Child(key='k1')
+p1.child_map['k2'] = Child(key='k2')
+
+# this will autoflush the current map.
+# ['k1', 'k2']
+print("\n---------print keys - flushes first\n")
+print(list(p1.child_map.keys()))
+
+# k1
+print("\n---------print 'k1' node\n")
+print(p1.child_map['k1'])
+
+print("\n---------update 'k2' node - must find existing, and replace\n")
+p1.child_map['k2'] = Child(key='k2')
+
+print("\n---------print 'k2' key - flushes first\n")
+# k2
+print(p1.child_map['k2'])
+
+print("\n---------print all child nodes\n")
+# [k1, k2b]
+print(sess.query(Child).all())
+
diff --git a/examples/elementtree/adjacency_list.py b/examples/elementtree/adjacency_list.py
index 3b9e4c523..a3ad42778 100644
--- a/examples/elementtree/adjacency_list.py
+++ b/examples/elementtree/adjacency_list.py
@@ -11,7 +11,7 @@ from sqlalchemy import (MetaData, Table, Column, Integer, String, ForeignKey,
Unicode, and_, create_engine)
from sqlalchemy.orm import mapper, relationship, Session, lazyload
-import sys, os, StringIO, re
+import sys, os, io, re
from xml.etree import ElementTree
@@ -56,7 +56,7 @@ class Document(object):
self.element = element
def __str__(self):
- buf = StringIO.StringIO()
+ buf = io.StringIO()
self.element.write(buf)
return buf.getvalue()
@@ -120,11 +120,11 @@ class ElementTreeMarshal(object):
def __set__(self, document, element):
def traverse(node):
n = _Node()
- n.tag = unicode(node.tag)
- n.text = unicode(node.text)
- n.tail = unicode(node.tail)
+ n.tag = str(node.tag)
+ n.text = str(node.text)
+ n.tail = str(node.tail)
n.children = [traverse(n2) for n2 in node]
- n.attributes = [_Attribute(unicode(k), unicode(v)) for k, v in node.attrib.iteritems()]
+ n.attributes = [_Attribute(str(k), str(v)) for k, v in node.attrib.items()]
return n
document._root = traverse(element.getroot())
@@ -150,23 +150,23 @@ for file in ('test.xml', 'test2.xml', 'test3.xml'):
doc = ElementTree.parse(filename)
session.add(Document(file, doc))
-print "\nSaving three documents...", line
+print("\nSaving three documents...", line)
session.commit()
-print "Done."
+print("Done.")
-print "\nFull text of document 'text.xml':", line
+print("\nFull text of document 'text.xml':", line)
document = session.query(Document).filter_by(filename="test.xml").first()
-print document
+print(document)
############################################ PART VI - Searching for Paths #########################
# manually search for a document which contains "/somefile/header/field1:hi"
-d = session.query(Document).join('_root', aliased=True).filter(_Node.tag==u'somefile').\
- join('children', aliased=True, from_joinpoint=True).filter(_Node.tag==u'header').\
+d = session.query(Document).join('_root', aliased=True).filter(_Node.tag=='somefile').\
+ join('children', aliased=True, from_joinpoint=True).filter(_Node.tag=='header').\
join('children', aliased=True, from_joinpoint=True).filter(
- and_(_Node.tag==u'field1', _Node.text==u'hi')).one()
-print d
+ and_(_Node.tag=='field1', _Node.text=='hi')).one()
+print(d)
# generalize the above approach into an extremely impoverished xpath function:
def find_document(path, compareto):
@@ -188,11 +188,11 @@ def find_document(path, compareto):
return query.options(lazyload('_root')).filter(_Node.text==compareto).all()
for path, compareto in (
- (u'/somefile/header/field1', u'hi'),
- (u'/somefile/field1', u'hi'),
- (u'/somefile/header/field2', u'there'),
- (u'/somefile/header/field2[@attr=foo]', u'there')
+ ('/somefile/header/field1', 'hi'),
+ ('/somefile/field1', 'hi'),
+ ('/somefile/header/field2', 'there'),
+ ('/somefile/header/field2[@attr=foo]', 'there')
):
- print "\nDocuments containing '%s=%s':" % (path, compareto), line
- print [d.filename for d in find_document(path, compareto)]
+ print("\nDocuments containing '%s=%s':" % (path, compareto), line)
+ print([d.filename for d in find_document(path, compareto)])
diff --git a/examples/elementtree/optimized_al.py b/examples/elementtree/optimized_al.py
index 1cec61366..1dbad0943 100644
--- a/examples/elementtree/optimized_al.py
+++ b/examples/elementtree/optimized_al.py
@@ -10,7 +10,7 @@ from sqlalchemy import (MetaData, Table, Column, Integer, String, ForeignKey,
Unicode, and_, create_engine)
from sqlalchemy.orm import mapper, relationship, Session, lazyload
-import sys, os, StringIO, re
+import sys, os, io, re
from xml.etree import ElementTree
@@ -55,7 +55,7 @@ class Document(object):
self.element = element
def __str__(self):
- buf = StringIO.StringIO()
+ buf = io.StringIO()
self.element.write(buf)
return buf.getvalue()
@@ -127,12 +127,12 @@ class ElementTreeMarshal(object):
def __set__(self, document, element):
def traverse(node):
n = _Node()
- n.tag = unicode(node.tag)
- n.text = unicode(node.text)
- n.tail = unicode(node.tail)
+ n.tag = str(node.tag)
+ n.text = str(node.text)
+ n.tail = str(node.tail)
document._nodes.append(n)
n.children = [traverse(n2) for n2 in node]
- n.attributes = [_Attribute(unicode(k), unicode(v)) for k, v in node.attrib.iteritems()]
+ n.attributes = [_Attribute(str(k), str(v)) for k, v in node.attrib.items()]
return n
traverse(element.getroot())
@@ -158,27 +158,27 @@ for file in ('test.xml', 'test2.xml', 'test3.xml'):
doc = ElementTree.parse(filename)
session.add(Document(file, doc))
-print "\nSaving three documents...", line
+print("\nSaving three documents...", line)
session.commit()
-print "Done."
+print("Done.")
-print "\nFull text of document 'text.xml':", line
+print("\nFull text of document 'text.xml':", line)
document = session.query(Document).filter_by(filename="test.xml").first()
-print document
+print(document)
######################## PART VI - Searching for Paths #######################
# manually search for a document which contains "/somefile/header/field1:hi"
-print "\nManual search for /somefile/header/field1=='hi':", line
+print("\nManual search for /somefile/header/field1=='hi':", line)
d = session.query(Document).join('_nodes', aliased=True).\
- filter(and_(_Node.parent_id==None, _Node.tag==u'somefile')).\
+ filter(and_(_Node.parent_id==None, _Node.tag=='somefile')).\
join('children', aliased=True, from_joinpoint=True).\
- filter(_Node.tag==u'header').\
+ filter(_Node.tag=='header').\
join('children', aliased=True, from_joinpoint=True).\
- filter(and_(_Node.tag==u'field1', _Node.text==u'hi')).\
+ filter(and_(_Node.tag=='field1', _Node.text=='hi')).\
one()
-print d
+print(d)
# generalize the above approach into an extremely impoverished xpath function:
def find_document(path, compareto):
@@ -203,11 +203,11 @@ def find_document(path, compareto):
return query.options(lazyload('_nodes')).filter(_Node.text==compareto).all()
for path, compareto in (
- (u'/somefile/header/field1', u'hi'),
- (u'/somefile/field1', u'hi'),
- (u'/somefile/header/field2', u'there'),
- (u'/somefile/header/field2[@attr=foo]', u'there')
+ ('/somefile/header/field1', 'hi'),
+ ('/somefile/field1', 'hi'),
+ ('/somefile/header/field2', 'there'),
+ ('/somefile/header/field2[@attr=foo]', 'there')
):
- print "\nDocuments containing '%s=%s':" % (path, compareto), line
- print [d.filename for d in find_document(path, compareto)]
+ print("\nDocuments containing '%s=%s':" % (path, compareto), line)
+ print([d.filename for d in find_document(path, compareto)])
diff --git a/examples/generic_associations/discriminator_on_association.py b/examples/generic_associations/discriminator_on_association.py
index 3c170d5c8..7b4565a85 100644
--- a/examples/generic_associations/discriminator_on_association.py
+++ b/examples/generic_associations/discriminator_on_association.py
@@ -144,5 +144,5 @@ session.commit()
for customer in session.query(Customer):
for address in customer.addresses:
- print address
- print address.parent \ No newline at end of file
+ print(address)
+ print(address.parent) \ No newline at end of file
diff --git a/examples/generic_associations/table_per_association.py b/examples/generic_associations/table_per_association.py
index e1ff2be5b..84e85de2f 100644
--- a/examples/generic_associations/table_per_association.py
+++ b/examples/generic_associations/table_per_association.py
@@ -102,5 +102,5 @@ session.commit()
for customer in session.query(Customer):
for address in customer.addresses:
- print address
+ print(address)
# no parent here \ No newline at end of file
diff --git a/examples/generic_associations/table_per_related.py b/examples/generic_associations/table_per_related.py
index 693908189..0ec5f29b0 100644
--- a/examples/generic_associations/table_per_related.py
+++ b/examples/generic_associations/table_per_related.py
@@ -103,5 +103,5 @@ session.commit()
for customer in session.query(Customer):
for address in customer.addresses:
- print address
- print address.parent \ No newline at end of file
+ print(address)
+ print(address.parent) \ No newline at end of file
diff --git a/examples/inheritance/concrete.py b/examples/inheritance/concrete.py
index 75741df6d..b05afa5ea 100644
--- a/examples/inheritance/concrete.py
+++ b/examples/inheritance/concrete.py
@@ -68,5 +68,5 @@ session.add(e1)
session.add(e2)
session.commit()
-print session.query(Employee).all()
+print(session.query(Employee).all())
diff --git a/examples/inheritance/joined.py b/examples/inheritance/joined.py
index 4d3dc08d0..c6ce37146 100644
--- a/examples/inheritance/joined.py
+++ b/examples/inheritance/joined.py
@@ -92,10 +92,10 @@ session.commit()
c = session.query(Company).get(1)
for e in c.employees:
- print e, inspect(e).key, e.company
+ print(e, inspect(e).key, e.company)
assert set([e.name for e in c.employees]) == set(['pointy haired boss',
'dilbert', 'joesmith', 'wally', 'jsmith'])
-print "\n"
+print("\n")
dilbert = session.query(Person).filter_by(name='dilbert').one()
dilbert2 = session.query(Engineer).filter_by(name='dilbert').one()
@@ -107,29 +107,29 @@ session.commit()
c = session.query(Company).get(1)
for e in c.employees:
- print e
+ print(e)
# query using with_polymorphic.
eng_manager = with_polymorphic(Person, [Engineer, Manager], aliased=True)
-print session.query(eng_manager).\
+print(session.query(eng_manager).\
filter(
or_(eng_manager.Engineer.engineer_name=='engineer1',
eng_manager.Manager.manager_name=='manager2'
)
- ).all()
+ ).all())
# illustrate join from Company,
# We use aliased=True
# to help when the selectable is used as the target of a join.
eng_manager = with_polymorphic(Person, [Engineer, Manager], aliased=True)
-print session.query(Company).\
+print(session.query(Company).\
join(
eng_manager,
Company.employees
).filter(
or_(eng_manager.Engineer.engineer_name=='engineer1',
eng_manager.Manager.manager_name=='manager2')
- ).all()
+ ).all())
session.commit()
diff --git a/examples/inheritance/single.py b/examples/inheritance/single.py
index b2f934120..b445f74a6 100644
--- a/examples/inheritance/single.py
+++ b/examples/inheritance/single.py
@@ -23,7 +23,7 @@ employees_table = Table('employees', metadata,
class Person(object):
def __init__(self, **kwargs):
- for key, value in kwargs.iteritems():
+ for key, value in kwargs.items():
setattr(self, key, value)
def __repr__(self):
return "Ordinary person %s" % self.name
@@ -39,7 +39,7 @@ class Manager(Person):
(self.name, self.status, self.manager_name)
class Company(object):
def __init__(self, **kwargs):
- for key, value in kwargs.iteritems():
+ for key, value in kwargs.items():
setattr(self, key, value)
def __repr__(self):
return "Company %s" % self.name
@@ -79,9 +79,9 @@ session.commit()
c = session.query(Company).get(1)
for e in c.employees:
- print e, e.company
+ print(e, e.company)
-print "\n"
+print("\n")
dilbert = session.query(Person).filter_by(name='dilbert').one()
dilbert2 = session.query(Engineer).filter_by(name='dilbert').one()
@@ -94,7 +94,7 @@ session.expunge_all()
c = session.query(Company).get(1)
for e in c.employees:
- print e
+ print(e)
session.delete(c)
session.commit()
diff --git a/examples/large_collection/large_collection.py b/examples/large_collection/large_collection.py
index 3e386ae64..82d2e554b 100644
--- a/examples/large_collection/large_collection.py
+++ b/examples/large_collection/large_collection.py
@@ -63,14 +63,14 @@ if __name__ == '__main__':
sess.add(org)
- print "-------------------------\nflush one - save org + 3 members\n"
+ print("-------------------------\nflush one - save org + 3 members\n")
sess.commit()
# the 'members' collection is a Query. it issues
# SQL as needed to load subsets of the collection.
- print "-------------------------\nload subset of members\n"
+ print("-------------------------\nload subset of members\n")
members = org.members.filter(member_table.c.name.like('%member t%')).all()
- print members
+ print(members)
# new Members can be appended without any
# SQL being emitted to load the full collection
@@ -78,19 +78,19 @@ if __name__ == '__main__':
org.members.append(Member('member five'))
org.members.append(Member('member six'))
- print "-------------------------\nflush two - save 3 more members\n"
+ print("-------------------------\nflush two - save 3 more members\n")
sess.commit()
# delete the object. Using ON DELETE CASCADE
# SQL is only emitted for the head row - the Member rows
# disappear automatically without the need for additional SQL.
sess.delete(org)
- print "-------------------------\nflush three - delete org, delete members in one statement\n"
+ print("-------------------------\nflush three - delete org, delete members in one statement\n")
sess.commit()
- print "-------------------------\nno Member rows should remain:\n"
- print sess.query(Member).count()
+ print("-------------------------\nno Member rows should remain:\n")
+ print(sess.query(Member).count())
sess.close()
- print "------------------------\ndone. dropping tables."
+ print("------------------------\ndone. dropping tables.")
meta.drop_all(engine) \ No newline at end of file
diff --git a/examples/postgis/postgis.py b/examples/postgis/postgis.py
index 0b86ad323..01671c5c4 100644
--- a/examples/postgis/postgis.py
+++ b/examples/postgis/postgis.py
@@ -251,7 +251,7 @@ if __name__ == '__main__':
road_table = Road.__table__
stmt = select([road_table]).where(road_table.c.road_geom.intersects(r1.road_geom))
- print session.execute(stmt).fetchall()
+ print(session.execute(stmt).fetchall())
# TODO: for some reason the auto-generated labels have the internal replacement
# strings exposed, even though PG doesn't complain
diff --git a/examples/versioning/_lib.py b/examples/versioning/_lib.py
index ec0da4709..9132f9b35 100644
--- a/examples/versioning/_lib.py
+++ b/examples/versioning/_lib.py
@@ -17,7 +17,7 @@ def eq_(a, b, msg=None):
_repr_stack = set()
class BasicEntity(object):
def __init__(self, **kw):
- for key, value in kw.iteritems():
+ for key, value in kw.items():
setattr(self, key, value)
def __repr__(self):
diff --git a/examples/versioning/history_meta.py b/examples/versioning/history_meta.py
index 533599394..45f1c8369 100644
--- a/examples/versioning/history_meta.py
+++ b/examples/versioning/history_meta.py
@@ -166,7 +166,7 @@ def create_version(obj, session, deleted = False):
attr['version'] = obj.version
hist = history_cls()
- for key, value in attr.iteritems():
+ for key, value in attr.items():
setattr(hist, key, value)
session.add(hist)
obj.version += 1
diff --git a/examples/versioning/test_versioning.py b/examples/versioning/test_versioning.py
index 43e2b0ae1..5b57ecaa2 100644
--- a/examples/versioning/test_versioning.py
+++ b/examples/versioning/test_versioning.py
@@ -1,9 +1,9 @@
from unittest import TestCase
from sqlalchemy.ext.declarative import declarative_base
-from history_meta import Versioned, versioned_session
+from .history_meta import Versioned, versioned_session
from sqlalchemy import create_engine, Column, Integer, String, ForeignKey
from sqlalchemy.orm import clear_mappers, sessionmaker, deferred, relationship
-from _lib import ComparableEntity, eq_
+from ._lib import ComparableEntity, eq_
engine = Session = None
@@ -188,9 +188,9 @@ class TestVersioning(TestCase):
eq_(
sess.query(BaseClassHistory).order_by(BaseClassHistory.id).all(),
[
- SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1),
- BaseClassHistory(id=2, name=u'base1', type=u'base', version=1),
- SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1)
+ SubClassSeparatePkHistory(id=1, name='sep1', type='sep', version=1),
+ BaseClassHistory(id=2, name='base1', type='base', version=1),
+ SubClassSamePkHistory(id=3, name='same1', type='same', version=1)
]
)
@@ -199,10 +199,10 @@ class TestVersioning(TestCase):
eq_(
sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(),
[
- SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1),
- BaseClassHistory(id=2, name=u'base1', type=u'base', version=1),
- SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1),
- SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=2)
+ SubClassSeparatePkHistory(id=1, name='sep1', type='sep', version=1),
+ BaseClassHistory(id=2, name='base1', type='base', version=1),
+ SubClassSamePkHistory(id=3, name='same1', type='same', version=1),
+ SubClassSamePkHistory(id=3, name='same1', type='same', version=2)
]
)
@@ -210,11 +210,11 @@ class TestVersioning(TestCase):
eq_(
sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(),
[
- SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1),
- BaseClassHistory(id=2, name=u'base1', type=u'base', version=1),
- BaseClassHistory(id=2, name=u'base1mod', type=u'base', version=2),
- SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1),
- SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=2)
+ SubClassSeparatePkHistory(id=1, name='sep1', type='sep', version=1),
+ BaseClassHistory(id=2, name='base1', type='base', version=1),
+ BaseClassHistory(id=2, name='base1mod', type='base', version=2),
+ SubClassSamePkHistory(id=3, name='same1', type='same', version=1),
+ SubClassSamePkHistory(id=3, name='same1', type='same', version=2)
]
)
@@ -249,7 +249,7 @@ class TestVersioning(TestCase):
eq_(
sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(),
- [BaseClassHistory(id=1, name=u'b1', type=u'base', version=1)]
+ [BaseClassHistory(id=1, name='b1', type='base', version=1)]
)
sc.name ='s1modified'
@@ -258,9 +258,9 @@ class TestVersioning(TestCase):
eq_(
sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(),
[
- BaseClassHistory(id=1, name=u'b1', type=u'base', version=1),
- BaseClassHistory(id=1, name=u'b1modified', type=u'base', version=2),
- SubClassHistory(id=2, name=u's1', type=u'sub', version=1)
+ BaseClassHistory(id=1, name='b1', type='base', version=1),
+ BaseClassHistory(id=1, name='b1modified', type='base', version=2),
+ SubClassHistory(id=2, name='s1', type='sub', version=1)
]
)
diff --git a/examples/vertical/dictlike-polymorphic.py b/examples/vertical/dictlike-polymorphic.py
index f800eea73..872a7c52e 100644
--- a/examples/vertical/dictlike-polymorphic.py
+++ b/examples/vertical/dictlike-polymorphic.py
@@ -33,7 +33,7 @@ from sqlalchemy.orm import comparable_property
from sqlalchemy.ext.hybrid import hybrid_property
# Using the VerticalPropertyDictMixin from the base example
-from dictlike import VerticalPropertyDictMixin
+from .dictlike import VerticalPropertyDictMixin
class PolymorphicVerticalProperty(object):
"""A key/value pair with polymorphic value storage.
@@ -150,9 +150,9 @@ if __name__ == '__main__':
class AnimalFact(PolymorphicVerticalProperty):
type_map = {
- int: (u'integer', 'int_value'),
- unicode: (u'char', 'char_value'),
- bool: (u'boolean', 'boolean_value'),
+ int: ('integer', 'int_value'),
+ str: ('char', 'char_value'),
+ bool: ('boolean', 'boolean_value'),
type(None): (None, None),
}
@@ -190,42 +190,42 @@ if __name__ == '__main__':
metadata.create_all(engine)
session = Session(engine)
- stoat = Animal(u'stoat')
- stoat[u'color'] = u'red'
- stoat[u'cuteness'] = 7
- stoat[u'weasel-like'] = True
+ stoat = Animal('stoat')
+ stoat['color'] = 'red'
+ stoat['cuteness'] = 7
+ stoat['weasel-like'] = True
session.add(stoat)
session.commit()
- critter = session.query(Animal).filter(Animal.name == u'stoat').one()
- print critter[u'color']
- print critter[u'cuteness']
+ critter = session.query(Animal).filter(Animal.name == 'stoat').one()
+ print(critter['color'])
+ print(critter['cuteness'])
- print "changing cuteness value and type:"
- critter[u'cuteness'] = u'very cute'
+ print("changing cuteness value and type:")
+ critter['cuteness'] = 'very cute'
session.commit()
- marten = Animal(u'marten')
- marten[u'cuteness'] = 5
- marten[u'weasel-like'] = True
- marten[u'poisonous'] = False
+ marten = Animal('marten')
+ marten['cuteness'] = 5
+ marten['weasel-like'] = True
+ marten['poisonous'] = False
session.add(marten)
- shrew = Animal(u'shrew')
- shrew[u'cuteness'] = 5
- shrew[u'weasel-like'] = False
- shrew[u'poisonous'] = True
+ shrew = Animal('shrew')
+ shrew['cuteness'] = 5
+ shrew['weasel-like'] = False
+ shrew['poisonous'] = True
session.add(shrew)
session.commit()
q = (session.query(Animal).
filter(Animal.facts.any(
- and_(AnimalFact.key == u'weasel-like',
+ and_(AnimalFact.key == 'weasel-like',
AnimalFact.value == True))))
- print 'weasel-like animals', q.all()
+ print('weasel-like animals', q.all())
# Save some typing by wrapping that up in a function:
with_characteristic = lambda key, value: and_(AnimalFact.key == key,
@@ -233,24 +233,24 @@ if __name__ == '__main__':
q = (session.query(Animal).
filter(Animal.facts.any(
- with_characteristic(u'weasel-like', True))))
- print 'weasel-like animals again', q.all()
+ with_characteristic('weasel-like', True))))
+ print('weasel-like animals again', q.all())
q = (session.query(Animal).
- filter(Animal.facts.any(with_characteristic(u'poisonous', False))))
- print 'animals with poisonous=False', q.all()
+ filter(Animal.facts.any(with_characteristic('poisonous', False))))
+ print('animals with poisonous=False', q.all())
q = (session.query(Animal).
filter(or_(Animal.facts.any(
- with_characteristic(u'poisonous', False)),
- not_(Animal.facts.any(AnimalFact.key == u'poisonous')))))
- print 'non-poisonous animals', q.all()
+ with_characteristic('poisonous', False)),
+ not_(Animal.facts.any(AnimalFact.key == 'poisonous')))))
+ print('non-poisonous animals', q.all())
q = (session.query(Animal).
filter(Animal.facts.any(AnimalFact.value == 5)))
- print 'any animal with a .value of 5', q.all()
+ print('any animal with a .value of 5', q.all())
# Facts can be queried as well.
q = (session.query(AnimalFact).
- filter(with_characteristic(u'cuteness', u'very cute')))
- print q.all()
+ filter(with_characteristic('cuteness', 'very cute')))
+ print(q.all())
diff --git a/examples/vertical/dictlike.py b/examples/vertical/dictlike.py
index 71ab77342..f17d1acc8 100644
--- a/examples/vertical/dictlike.py
+++ b/examples/vertical/dictlike.py
@@ -176,49 +176,49 @@ if __name__ == '__main__':
metadata.create_all(engine)
session = Session(bind=engine)
- stoat = Animal(u'stoat')
- stoat[u'color'] = u'reddish'
- stoat[u'cuteness'] = u'somewhat'
+ stoat = Animal('stoat')
+ stoat['color'] = 'reddish'
+ stoat['cuteness'] = 'somewhat'
# dict-like assignment transparently creates entries in the
# stoat.facts collection:
- print stoat.facts[u'color']
+ print(stoat.facts['color'])
session.add(stoat)
session.commit()
- critter = session.query(Animal).filter(Animal.name == u'stoat').one()
- print critter[u'color']
- print critter[u'cuteness']
+ critter = session.query(Animal).filter(Animal.name == 'stoat').one()
+ print(critter['color'])
+ print(critter['cuteness'])
- critter[u'cuteness'] = u'very'
+ critter['cuteness'] = 'very'
- print 'changing cuteness:'
+ print('changing cuteness:')
engine.echo = True
session.commit()
engine.echo = False
- marten = Animal(u'marten')
- marten[u'color'] = u'brown'
- marten[u'cuteness'] = u'somewhat'
+ marten = Animal('marten')
+ marten['color'] = 'brown'
+ marten['cuteness'] = 'somewhat'
session.add(marten)
- shrew = Animal(u'shrew')
- shrew[u'cuteness'] = u'somewhat'
- shrew[u'poisonous-part'] = u'saliva'
+ shrew = Animal('shrew')
+ shrew['cuteness'] = 'somewhat'
+ shrew['poisonous-part'] = 'saliva'
session.add(shrew)
- loris = Animal(u'slow loris')
- loris[u'cuteness'] = u'fairly'
- loris[u'poisonous-part'] = u'elbows'
+ loris = Animal('slow loris')
+ loris['cuteness'] = 'fairly'
+ loris['poisonous-part'] = 'elbows'
session.add(loris)
session.commit()
q = (session.query(Animal).
filter(Animal.facts.any(
- and_(AnimalFact.key == u'color',
- AnimalFact.value == u'reddish'))))
- print 'reddish animals', q.all()
+ and_(AnimalFact.key == 'color',
+ AnimalFact.value == 'reddish'))))
+ print('reddish animals', q.all())
# Save some typing by wrapping that up in a function:
with_characteristic = lambda key, value: and_(AnimalFact.key == key,
@@ -226,21 +226,21 @@ if __name__ == '__main__':
q = (session.query(Animal).
filter(Animal.facts.any(
- with_characteristic(u'color', u'brown'))))
- print 'brown animals', q.all()
+ with_characteristic('color', 'brown'))))
+ print('brown animals', q.all())
q = (session.query(Animal).
filter(not_(Animal.facts.any(
- with_characteristic(u'poisonous-part', u'elbows')))))
- print 'animals without poisonous-part == elbows', q.all()
+ with_characteristic('poisonous-part', 'elbows')))))
+ print('animals without poisonous-part == elbows', q.all())
q = (session.query(Animal).
- filter(Animal.facts.any(AnimalFact.value == u'somewhat')))
- print 'any animal with any .value of "somewhat"', q.all()
+ filter(Animal.facts.any(AnimalFact.value == 'somewhat')))
+ print('any animal with any .value of "somewhat"', q.all())
# Facts can be queried as well.
q = (session.query(AnimalFact).
- filter(with_characteristic(u'cuteness', u'very')))
- print 'just the facts', q.all()
+ filter(with_characteristic('cuteness', 'very')))
+ print('just the facts', q.all())
diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py
index 21e06f548..2c805e607 100644
--- a/lib/sqlalchemy/__init__.py
+++ b/lib/sqlalchemy/__init__.py
@@ -120,7 +120,7 @@ from .engine import create_engine, engine_from_config
__all__ = sorted(name for name, obj in locals().items()
if not (name.startswith('_') or _inspect.ismodule(obj)))
-__version__ = '0.8.2'
+__version__ = '0.9.0'
del _inspect, sys
diff --git a/lib/sqlalchemy/connectors/mxodbc.py b/lib/sqlalchemy/connectors/mxodbc.py
index db297c9ab..ebdcd2758 100644
--- a/lib/sqlalchemy/connectors/mxodbc.py
+++ b/lib/sqlalchemy/connectors/mxodbc.py
@@ -82,7 +82,7 @@ class MxODBCConnector(Connector):
category=errorclass,
stacklevel=2)
else:
- raise errorclass, errorvalue
+ raise errorclass(errorvalue)
return error_handler
def create_connect_args(self, url):
diff --git a/lib/sqlalchemy/connectors/pyodbc.py b/lib/sqlalchemy/connectors/pyodbc.py
index f1a979286..6b4e3036d 100644
--- a/lib/sqlalchemy/connectors/pyodbc.py
+++ b/lib/sqlalchemy/connectors/pyodbc.py
@@ -5,20 +5,23 @@
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from . import Connector
-from ..util import asbool
+from .. import util
+
import sys
import re
-import urllib
class PyODBCConnector(Connector):
driver = 'pyodbc'
supports_sane_multi_rowcount = False
- # PyODBC unicode is broken on UCS-4 builds
- supports_unicode = sys.maxunicode == 65535
- supports_unicode_statements = supports_unicode
+
+ if util.py2k:
+ # PyODBC unicode is broken on UCS-4 builds
+ supports_unicode = sys.maxunicode == 65535
+ supports_unicode_statements = supports_unicode
+
supports_native_decimal = True
default_paramstyle = 'named'
@@ -56,10 +59,10 @@ class PyODBCConnector(Connector):
connect_args = {}
for param in ('ansi', 'unicode_results', 'autocommit'):
if param in keys:
- connect_args[param] = asbool(keys.pop(param))
+ connect_args[param] = util.asbool(keys.pop(param))
if 'odbc_connect' in keys:
- connectors = [urllib.unquote_plus(keys.pop('odbc_connect'))]
+ connectors = [util.unquote_plus(keys.pop('odbc_connect'))]
else:
dsn_connection = 'dsn' in keys or \
('host' in keys and 'database' not in keys)
@@ -91,7 +94,7 @@ class PyODBCConnector(Connector):
connectors.append("AutoTranslate=%s" %
keys.pop("odbc_autotranslate"))
- connectors.extend(['%s=%s' % (k, v) for k, v in keys.iteritems()])
+ connectors.extend(['%s=%s' % (k, v) for k, v in keys.items()])
return [[";".join(connectors)], connect_args]
def is_disconnect(self, e, connection, cursor):
@@ -121,18 +124,19 @@ class PyODBCConnector(Connector):
self.freetds_driver_version = dbapi_con.getinfo(
pyodbc.SQL_DRIVER_VER)
- # the "Py2K only" part here is theoretical.
- # have not tried pyodbc + python3.1 yet.
- # Py2K
self.supports_unicode_statements = (
- not self.freetds and not self.easysoft)
+ not util.py2k or
+ (not self.freetds and not self.easysoft)
+ )
+
if self._user_supports_unicode_binds is not None:
self.supports_unicode_binds = self._user_supports_unicode_binds
- else:
+ elif util.py2k:
self.supports_unicode_binds = (
not self.freetds or self.freetds_driver_version >= '0.91'
) and not self.easysoft
- # end Py2K
+ else:
+ self.supports_unicode_binds = True
# run other initialization which asks for user name, etc.
super(PyODBCConnector, self).initialize(connection)
diff --git a/lib/sqlalchemy/dialects/firebird/base.py b/lib/sqlalchemy/dialects/firebird/base.py
index 95196f44c..bb60a591e 100644
--- a/lib/sqlalchemy/dialects/firebird/base.py
+++ b/lib/sqlalchemy/dialects/firebird/base.py
@@ -685,7 +685,7 @@ class FBDialect(default.DefaultDialect):
self.normalize_name(row['fname']))
fk['referred_columns'].append(
self.normalize_name(row['targetfname']))
- return fks.values()
+ return list(fks.values())
@reflection.cache
def get_indexes(self, connection, table_name, schema=None, **kw):
@@ -716,7 +716,7 @@ class FBDialect(default.DefaultDialect):
indexrec['column_names'].append(
self.normalize_name(row['field_name']))
- return indexes.values()
+ return list(indexes.values())
def do_execute(self, cursor, statement, parameters, context=None):
# kinterbase does not accept a None, but wants an empty list
diff --git a/lib/sqlalchemy/dialects/informix/base.py b/lib/sqlalchemy/dialects/informix/base.py
index 77361a5d0..e13ea8819 100644
--- a/lib/sqlalchemy/dialects/informix/base.py
+++ b/lib/sqlalchemy/dialects/informix/base.py
@@ -24,6 +24,7 @@ from sqlalchemy import sql, schema, exc, pool, util
from sqlalchemy.sql import compiler, text
from sqlalchemy.engine import default, reflection
from sqlalchemy import types as sqltypes
+from functools import reduce
RESERVED_WORDS = set(
["abs", "absolute", "access", "access_method", "acos", "active", "add",
@@ -298,7 +299,7 @@ class InfoDDLCompiler(compiler.DDLCompiler):
def get_column_default_string(self, column):
if (isinstance(column.server_default, schema.DefaultClause) and
- isinstance(column.server_default.arg, basestring)):
+ isinstance(column.server_default.arg, util.string_types)):
if isinstance(column.type, (sqltypes.Integer, sqltypes.Numeric)):
return self.sql_compiler.process(text(column.server_default.arg))
@@ -506,7 +507,7 @@ class InformixDialect(default.DefaultDialect):
if remote_column not in remote_cols:
remote_cols.append(remote_column)
- return fkeys.values()
+ return list(fkeys.values())
@reflection.cache
def get_pk_constraint(self, connection, table_name, schema=None, **kw):
diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py
index fc952f4b5..3c329fe5e 100644
--- a/lib/sqlalchemy/dialects/mssql/base.py
+++ b/lib/sqlalchemy/dialects/mssql/base.py
@@ -295,7 +295,7 @@ class _MSDate(sqltypes.Date):
def process(value):
if isinstance(value, datetime.datetime):
return value.date()
- elif isinstance(value, basestring):
+ elif isinstance(value, util.string_types):
return datetime.date(*[
int(x or 0)
for x in self._reg.match(value).groups()
@@ -328,7 +328,7 @@ class TIME(sqltypes.TIME):
def process(value):
if isinstance(value, datetime.datetime):
return value.time()
- elif isinstance(value, basestring):
+ elif isinstance(value, util.string_types):
return datetime.time(*[
int(x or 0)
for x in self._reg.match(value).groups()])
@@ -1008,7 +1008,7 @@ class MSDDLCompiler(compiler.DDLCompiler):
# handle other included columns
if index.kwargs.get("mssql_include"):
inclusions = [index.table.c[col]
- if isinstance(col, basestring) else col
+ if isinstance(col, util.string_types) else col
for col in index.kwargs["mssql_include"]]
text += " INCLUDE (%s)" \
@@ -1109,7 +1109,7 @@ class MSDialect(default.DefaultDialect):
query_timeout=None,
use_scope_identity=True,
max_identifier_length=None,
- schema_name=u"dbo", **opts):
+ schema_name="dbo", **opts):
self.query_timeout = int(query_timeout or 0)
self.schema_name = schema_name
@@ -1129,7 +1129,7 @@ class MSDialect(default.DefaultDialect):
def initialize(self, connection):
super(MSDialect, self).initialize(connection)
- if self.server_version_info[0] not in range(8, 17):
+ if self.server_version_info[0] not in list(range(8, 17)):
# FreeTDS with version 4.2 seems to report here
# a number like "95.10.255". Don't know what
# that is. So emit warning.
@@ -1156,7 +1156,7 @@ class MSDialect(default.DefaultDialect):
try:
default_schema_name = connection.scalar(query, name=user_name)
if default_schema_name is not None:
- return unicode(default_schema_name)
+ return util.text_type(default_schema_name)
except:
pass
return self.schema_name
@@ -1172,6 +1172,7 @@ class MSDialect(default.DefaultDialect):
columns = ischema.columns
whereclause = self._unicode_cast(columns.c.table_name) == tablename
+
if owner:
whereclause = sql.and_(whereclause,
columns.c.table_schema == owner)
@@ -1194,7 +1195,7 @@ class MSDialect(default.DefaultDialect):
s = sql.select([tables.c.table_name],
sql.and_(
tables.c.table_schema == owner,
- tables.c.table_type == u'BASE TABLE'
+ tables.c.table_type == 'BASE TABLE'
),
order_by=[tables.c.table_name]
)
@@ -1208,7 +1209,7 @@ class MSDialect(default.DefaultDialect):
s = sql.select([tables.c.table_name],
sql.and_(
tables.c.table_schema == owner,
- tables.c.table_type == u'VIEW'
+ tables.c.table_type == 'VIEW'
),
order_by=[tables.c.table_name]
)
@@ -1273,7 +1274,7 @@ class MSDialect(default.DefaultDialect):
if row['index_id'] in indexes:
indexes[row['index_id']]['column_names'].append(row['name'])
- return indexes.values()
+ return list(indexes.values())
@reflection.cache
@_db_plus_owner
@@ -1480,4 +1481,4 @@ class MSDialect(default.DefaultDialect):
local_cols.append(scol)
remote_cols.append(rcol)
- return fkeys.values()
+ return list(fkeys.values())
diff --git a/lib/sqlalchemy/dialects/mssql/information_schema.py b/lib/sqlalchemy/dialects/mssql/information_schema.py
index 80e59d323..a7628f213 100644
--- a/lib/sqlalchemy/dialects/mssql/information_schema.py
+++ b/lib/sqlalchemy/dialects/mssql/information_schema.py
@@ -9,6 +9,7 @@
from ... import Table, MetaData, Column
from ...types import String, Unicode, Integer, TypeDecorator
from ... import cast
+from ... import util
ischema = MetaData()
@@ -17,10 +18,8 @@ class CoerceUnicode(TypeDecorator):
impl = Unicode
def process_bind_param(self, value, dialect):
- # Py2K
- if isinstance(value, str):
+ if util.py2k and isinstance(value, util.binary_type):
value = value.decode(dialect.encoding)
- # end Py2K
return value
def bind_expression(self, bindvalue):
diff --git a/lib/sqlalchemy/dialects/mssql/pyodbc.py b/lib/sqlalchemy/dialects/mssql/pyodbc.py
index beb6066f5..5a359d179 100644
--- a/lib/sqlalchemy/dialects/mssql/pyodbc.py
+++ b/lib/sqlalchemy/dialects/mssql/pyodbc.py
@@ -219,7 +219,7 @@ class MSExecutionContext_pyodbc(MSExecutionContext):
# without closing it (FreeTDS particularly)
row = self.cursor.fetchall()[0]
break
- except self.dialect.dbapi.Error, e:
+ except self.dialect.dbapi.Error as e:
# no way around this - nextset() consumes the previous set
# so we need to just keep flipping
self.cursor.nextset()
diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py
index 076fa2517..ad4650f6d 100644
--- a/lib/sqlalchemy/dialects/mysql/base.py
+++ b/lib/sqlalchemy/dialects/mysql/base.py
@@ -640,7 +640,7 @@ class BIT(sqltypes.TypeEngine):
def process(value):
if value is not None:
- v = 0L
+ v = 0
for i in map(ord, value):
v = v << 8 | i
return v
@@ -1139,14 +1139,10 @@ class SET(_StringType):
# No ',' quoting issues- commas aren't allowed in SET values
# The bad news:
# Plenty of driver inconsistencies here.
- if isinstance(value, util.set_types):
+ if isinstance(value, set):
# ..some versions convert '' to an empty set
if not value:
value.add('')
- # ..some return sets.Set, even for pythons
- # that have __builtin__.set
- if not isinstance(value, set):
- value = set(value)
return value
# ...and some versions return strings
if value is not None:
@@ -1159,7 +1155,7 @@ class SET(_StringType):
super_convert = super(SET, self).bind_processor(dialect)
def process(value):
- if value is None or isinstance(value, (int, long, basestring)):
+ if value is None or isinstance(value, util.int_types + util.string_types):
pass
else:
if None in value:
@@ -1340,7 +1336,7 @@ class MySQLCompiler(compiler.SQLCompiler):
of a SELECT.
"""
- if isinstance(select._distinct, basestring):
+ if isinstance(select._distinct, util.string_types):
return select._distinct.upper() + " "
elif select._distinct:
return "DISTINCT "
@@ -1429,7 +1425,7 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
MySQLDDLCompiler, self).create_table_constraints(table)
engine_key = '%s_engine' % self.dialect.name
- is_innodb = table.kwargs.has_key(engine_key) and \
+ is_innodb = engine_key in table.kwargs and \
table.kwargs[engine_key].lower() == 'innodb'
auto_inc_column = table._autoincrement_column
@@ -2034,7 +2030,7 @@ class MySQLDialect(default.DefaultDialect):
have = rs.fetchone() is not None
rs.close()
return have
- except exc.DBAPIError, e:
+ except exc.DBAPIError as e:
if self._extract_error_code(e.orig) == 1146:
return False
raise
@@ -2317,7 +2313,7 @@ class MySQLDialect(default.DefaultDialect):
rp = None
try:
rp = connection.execute(st)
- except exc.DBAPIError, e:
+ except exc.DBAPIError as e:
if self._extract_error_code(e.orig) == 1146:
raise exc.NoSuchTableError(full_name)
else:
@@ -2341,7 +2337,7 @@ class MySQLDialect(default.DefaultDialect):
try:
try:
rp = connection.execute(st)
- except exc.DBAPIError, e:
+ except exc.DBAPIError as e:
if self._extract_error_code(e.orig) == 1146:
raise exc.NoSuchTableError(full_name)
else:
@@ -2791,11 +2787,8 @@ class _DecodingRowProxy(object):
item = self.rowproxy[index]
if isinstance(item, _array):
item = item.tostring()
- # Py2K
- if self.charset and isinstance(item, str):
- # end Py2K
- # Py3K
- #if self.charset and isinstance(item, bytes):
+
+ if self.charset and isinstance(item, util.binary_type):
return item.decode(self.charset)
else:
return item
@@ -2804,11 +2797,7 @@ class _DecodingRowProxy(object):
item = getattr(self.rowproxy, attr)
if isinstance(item, _array):
item = item.tostring()
- # Py2K
- if self.charset and isinstance(item, str):
- # end Py2K
- # Py3K
- #if self.charset and isinstance(item, bytes):
+ if self.charset and isinstance(item, util.binary_type):
return item.decode(self.charset)
else:
return item
diff --git a/lib/sqlalchemy/dialects/mysql/cymysql.py b/lib/sqlalchemy/dialects/mysql/cymysql.py
index 0806f63b4..deb2de449 100644
--- a/lib/sqlalchemy/dialects/mysql/cymysql.py
+++ b/lib/sqlalchemy/dialects/mysql/cymysql.py
@@ -25,15 +25,9 @@ class _cymysqlBIT(BIT):
def process(value):
if value is not None:
- # Py2K
- v = 0L
- for i in map(ord, value):
+ v = 0
+ for i in util.iterbytes(value):
v = v << 8 | i
- # end Py2K
- # Py3K
- #v = 0
- #for i in value:
- # v = v << 8 | i
return v
return value
return process
diff --git a/lib/sqlalchemy/dialects/mysql/oursql.py b/lib/sqlalchemy/dialects/mysql/oursql.py
index db24adf03..77370f91d 100644
--- a/lib/sqlalchemy/dialects/mysql/oursql.py
+++ b/lib/sqlalchemy/dialects/mysql/oursql.py
@@ -55,10 +55,10 @@ class MySQLExecutionContext_oursql(MySQLExecutionContext):
class MySQLDialect_oursql(MySQLDialect):
driver = 'oursql'
-# Py2K
- supports_unicode_binds = True
- supports_unicode_statements = True
-# end Py2K
+
+ if util.py2k:
+ supports_unicode_binds = True
+ supports_unicode_statements = True
supports_native_decimal = True
@@ -90,12 +90,11 @@ class MySQLDialect_oursql(MySQLDialect):
connection.cursor().execute('BEGIN', plain_query=True)
def _xa_query(self, connection, query, xid):
-# Py2K
- arg = connection.connection._escape_string(xid)
-# end Py2K
-# Py3K
-# charset = self._connection_charset
-# arg = connection.connection._escape_string(xid.encode(charset)).decode(charset)
+ if util.py2k:
+ arg = connection.connection._escape_string(xid)
+ else:
+ charset = self._connection_charset
+ arg = connection.connection._escape_string(xid.encode(charset)).decode(charset)
arg = "'%s'" % arg
connection.execution_options(_oursql_plain_query=True).execute(query % arg)
diff --git a/lib/sqlalchemy/dialects/mysql/zxjdbc.py b/lib/sqlalchemy/dialects/mysql/zxjdbc.py
index ea01da21c..20f2e7359 100644
--- a/lib/sqlalchemy/dialects/mysql/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/mysql/zxjdbc.py
@@ -37,7 +37,7 @@ class _ZxJDBCBit(BIT):
return value
if isinstance(value, bool):
return int(value)
- v = 0L
+ v = 0
for i in value:
v = v << 8 | (i & 0xff)
value = v
diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py
index 831ba5f1b..6b6c32ae0 100644
--- a/lib/sqlalchemy/dialects/oracle/base.py
+++ b/lib/sqlalchemy/dialects/oracle/base.py
@@ -654,14 +654,14 @@ class OracleDDLCompiler(compiler.DDLCompiler):
class OracleIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words = set([x.lower() for x in RESERVED_WORDS])
- illegal_initial_characters = set(xrange(0, 10)).union(["_", "$"])
+ illegal_initial_characters = set(range(0, 10)).union(["_", "$"])
def _bindparam_requires_quotes(self, value):
"""Return True if the given identifier requires quoting."""
lc_value = value.lower()
return (lc_value in self.reserved_words
or value[0] in self.illegal_initial_characters
- or not self.legal_characters.match(unicode(value))
+ or not self.legal_characters.match(util.text_type(value))
)
def format_savepoint(self, savepoint):
@@ -765,10 +765,9 @@ class OracleDialect(default.DefaultDialect):
def normalize_name(self, name):
if name is None:
return None
- # Py2K
- if isinstance(name, str):
- name = name.decode(self.encoding)
- # end Py2K
+ if util.py2k:
+ if isinstance(name, str):
+ name = name.decode(self.encoding)
if name.upper() == name and \
not self.identifier_preparer._requires_quotes(name.lower()):
return name.lower()
@@ -780,16 +779,15 @@ class OracleDialect(default.DefaultDialect):
return None
elif name.lower() == name and not self.identifier_preparer._requires_quotes(name.lower()):
name = name.upper()
- # Py2K
- if not self.supports_unicode_binds:
- name = name.encode(self.encoding)
- else:
- name = unicode(name)
- # end Py2K
+ if util.py2k:
+ if not self.supports_unicode_binds:
+ name = name.encode(self.encoding)
+ else:
+ name = unicode(name)
return name
def _get_default_schema_name(self, connection):
- return self.normalize_name(connection.execute(u'SELECT USER FROM DUAL').scalar())
+ return self.normalize_name(connection.execute('SELECT USER FROM DUAL').scalar())
def _resolve_synonym(self, connection, desired_owner=None, desired_synonym=None, desired_table=None):
"""search for a local synonym matching the given desired owner/name.
@@ -1167,7 +1165,7 @@ class OracleDialect(default.DefaultDialect):
local_cols.append(local_column)
remote_cols.append(remote_column)
- return fkeys.values()
+ return list(fkeys.values())
@reflection.cache
def get_view_definition(self, connection, view_name, schema=None,
@@ -1187,7 +1185,9 @@ class OracleDialect(default.DefaultDialect):
rp = connection.execute(sql.text(text), **params).scalar()
if rp:
- return rp.decode(self.encoding)
+ if util.py2k:
+ rp = rp.decode(self.encoding)
+ return rp
else:
return None
diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
index b8f7439f5..e013799db 100644
--- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py
+++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
@@ -268,20 +268,17 @@ class _LOBMixin(object):
class _NativeUnicodeMixin(object):
- # Py3K
- #pass
- # Py2K
- def bind_processor(self, dialect):
- if dialect._cx_oracle_with_unicode:
- def process(value):
- if value is None:
- return value
- else:
- return unicode(value)
- return process
- else:
- return super(_NativeUnicodeMixin, self).bind_processor(dialect)
- # end Py2K
+ if util.py2k:
+ def bind_processor(self, dialect):
+ if dialect._cx_oracle_with_unicode:
+ def process(value):
+ if value is None:
+ return value
+ else:
+ return unicode(value)
+ return process
+ else:
+ return super(_NativeUnicodeMixin, self).bind_processor(dialect)
# we apply a connection output handler that returns
# unicode in all cases, so the "native_unicode" flag
@@ -493,11 +490,11 @@ class OracleExecutionContext_cx_oracle_with_unicode(OracleExecutionContext_cx_or
"""
def __init__(self, *arg, **kw):
OracleExecutionContext_cx_oracle.__init__(self, *arg, **kw)
- self.statement = unicode(self.statement)
+ self.statement = util.text_type(self.statement)
def _execute_scalar(self, stmt):
return super(OracleExecutionContext_cx_oracle_with_unicode, self).\
- _execute_scalar(unicode(stmt))
+ _execute_scalar(util.text_type(stmt))
class ReturningResultProxy(_result.FullyBufferedResultProxy):
@@ -607,19 +604,23 @@ class OracleDialect_cx_oracle(OracleDialect):
self.supports_unicode_statements = True
self.supports_unicode_binds = True
self._cx_oracle_with_unicode = True
- # Py2K
- # There's really no reason to run with WITH_UNICODE under Python 2.x.
- # Give the user a hint.
- util.warn("cx_Oracle is compiled under Python 2.xx using the "
- "WITH_UNICODE flag. Consider recompiling cx_Oracle without "
- "this flag, which is in no way necessary for full support of Unicode. "
- "Otherwise, all string-holding bind parameters must "
- "be explicitly typed using SQLAlchemy's String type or one of its subtypes,"
- "or otherwise be passed as Python unicode. Plain Python strings "
- "passed as bind parameters will be silently corrupted by cx_Oracle."
- )
- self.execution_ctx_cls = OracleExecutionContext_cx_oracle_with_unicode
- # end Py2K
+
+ if util.py2k:
+ # There's really no reason to run with WITH_UNICODE under Python 2.x.
+ # Give the user a hint.
+ util.warn(
+ "cx_Oracle is compiled under Python 2.xx using the "
+ "WITH_UNICODE flag. Consider recompiling cx_Oracle "
+ "without this flag, which is in no way necessary for full "
+ "support of Unicode. Otherwise, all string-holding bind "
+ "parameters must be explicitly typed using SQLAlchemy's "
+ "String type or one of its subtypes,"
+ "or otherwise be passed as Python unicode. "
+ "Plain Python strings passed as bind parameters will be "
+ "silently corrupted by cx_Oracle."
+ )
+ self.execution_ctx_cls = \
+ OracleExecutionContext_cx_oracle_with_unicode
else:
self._cx_oracle_with_unicode = False
@@ -731,7 +732,7 @@ class OracleDialect_cx_oracle(OracleDialect):
arraysize=cursor.arraysize)
# allow all strings to come back natively as Unicode
elif defaultType in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR):
- return cursor.var(unicode, size, cursor.arraysize)
+ return cursor.var(util.text_type, size, cursor.arraysize)
def on_connect(conn):
conn.outputtypehandler = output_type_handler
@@ -766,20 +767,19 @@ class OracleDialect_cx_oracle(OracleDialect):
twophase=self.allow_twophase,
)
- # Py2K
- if self._cx_oracle_with_unicode:
- for k, v in opts.items():
- if isinstance(v, str):
- opts[k] = unicode(v)
- else:
- for k, v in opts.items():
- if isinstance(v, unicode):
- opts[k] = str(v)
- # end Py2K
+ if util.py2k:
+ if self._cx_oracle_with_unicode:
+ for k, v in opts.items():
+ if isinstance(v, str):
+ opts[k] = unicode(v)
+ else:
+ for k, v in opts.items():
+ if isinstance(v, unicode):
+ opts[k] = str(v)
if 'mode' in url.query:
opts['mode'] = url.query['mode']
- if isinstance(opts['mode'], basestring):
+ if isinstance(opts['mode'], util.string_types):
mode = opts['mode'].upper()
if mode == 'SYSDBA':
opts['mode'] = self.dbapi.SYSDBA
@@ -819,6 +819,11 @@ class OracleDialect_cx_oracle(OracleDialect):
id = random.randint(0, 2 ** 128)
return (0x1234, "%032x" % id, "%032x" % 9)
+ def do_executemany(self, cursor, statement, parameters, context=None):
+ if isinstance(parameters, tuple):
+ parameters = list(parameters)
+ cursor.executemany(statement, parameters)
+
def do_begin_twophase(self, connection, xid):
connection.connection.begin(*xid)
diff --git a/lib/sqlalchemy/dialects/oracle/zxjdbc.py b/lib/sqlalchemy/dialects/oracle/zxjdbc.py
index d74f21aca..ad53b89a1 100644
--- a/lib/sqlalchemy/dialects/oracle/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/oracle/zxjdbc.py
@@ -95,8 +95,8 @@ class OracleExecutionContext_zxjdbc(OracleExecutionContext):
try:
try:
rrs = self.statement.__statement__.getReturnResultSet()
- rrs.next()
- except SQLException, sqle:
+ next(rrs)
+ except SQLException as sqle:
msg = '%s [SQLCode: %d]' % (sqle.getMessage(), sqle.getErrorCode())
if sqle.getSQLState() is not None:
msg += ' [SQLState: %s]' % sqle.getSQLState()
diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py
index 1acdb57b9..00d0acc2c 100644
--- a/lib/sqlalchemy/dialects/postgresql/base.py
+++ b/lib/sqlalchemy/dialects/postgresql/base.py
@@ -188,7 +188,6 @@ underlying CREATE INDEX command, so it *must* be a valid index type for your
version of PostgreSQL.
"""
-
import re
from ... import sql, schema, exc, util
@@ -333,7 +332,7 @@ class UUID(sqltypes.TypeEngine):
if self.as_uuid:
def process(value):
if value is not None:
- value = str(value)
+ value = util.text_type(value)
return value
return process
else:
@@ -1419,7 +1418,7 @@ class PGDialect(default.DefaultDialect):
query,
bindparams=[
sql.bindparam(
- 'schema', unicode(schema.lower()),
+ 'schema', util.text_type(schema.lower()),
type_=sqltypes.Unicode)]
)
)
@@ -1435,7 +1434,7 @@ class PGDialect(default.DefaultDialect):
"n.oid=c.relnamespace where n.nspname=current_schema() and "
"relname=:name",
bindparams=[
- sql.bindparam('name', unicode(table_name),
+ sql.bindparam('name', util.text_type(table_name),
type_=sqltypes.Unicode)]
)
)
@@ -1447,9 +1446,9 @@ class PGDialect(default.DefaultDialect):
"relname=:name",
bindparams=[
sql.bindparam('name',
- unicode(table_name), type_=sqltypes.Unicode),
+ util.text_type(table_name), type_=sqltypes.Unicode),
sql.bindparam('schema',
- unicode(schema), type_=sqltypes.Unicode)]
+ util.text_type(schema), type_=sqltypes.Unicode)]
)
)
return bool(cursor.first())
@@ -1463,7 +1462,7 @@ class PGDialect(default.DefaultDialect):
"n.nspname=current_schema() "
"and relname=:name",
bindparams=[
- sql.bindparam('name', unicode(sequence_name),
+ sql.bindparam('name', util.text_type(sequence_name),
type_=sqltypes.Unicode)
]
)
@@ -1475,10 +1474,10 @@ class PGDialect(default.DefaultDialect):
"n.oid=c.relnamespace where relkind='S' and "
"n.nspname=:schema and relname=:name",
bindparams=[
- sql.bindparam('name', unicode(sequence_name),
+ sql.bindparam('name', util.text_type(sequence_name),
type_=sqltypes.Unicode),
sql.bindparam('schema',
- unicode(schema), type_=sqltypes.Unicode)
+ util.text_type(schema), type_=sqltypes.Unicode)
]
)
)
@@ -1488,9 +1487,9 @@ class PGDialect(default.DefaultDialect):
def has_type(self, connection, type_name, schema=None):
bindparams = [
sql.bindparam('typname',
- unicode(type_name), type_=sqltypes.Unicode),
+ util.text_type(type_name), type_=sqltypes.Unicode),
sql.bindparam('nspname',
- unicode(schema), type_=sqltypes.Unicode),
+ util.text_type(schema), type_=sqltypes.Unicode),
]
if schema is not None:
query = """
@@ -1546,9 +1545,9 @@ class PGDialect(default.DefaultDialect):
""" % schema_where_clause
# Since we're binding to unicode, table_name and schema_name must be
# unicode.
- table_name = unicode(table_name)
+ table_name = util.text_type(table_name)
if schema is not None:
- schema = unicode(schema)
+ schema = util.text_type(schema)
s = sql.text(query, bindparams=[
sql.bindparam('table_name', type_=sqltypes.Unicode),
sql.bindparam('schema', type_=sqltypes.Unicode)
@@ -1570,13 +1569,13 @@ class PGDialect(default.DefaultDialect):
"""
rp = connection.execute(s)
# what about system tables?
- # Py3K
- #schema_names = [row[0] for row in rp \
- # if not row[0].startswith('pg_')]
- # Py2K
- schema_names = [row[0].decode(self.encoding) for row in rp \
+
+ if util.py2k:
+ schema_names = [row[0].decode(self.encoding) for row in rp \
+ if not row[0].startswith('pg_')]
+ else:
+ schema_names = [row[0] for row in rp \
if not row[0].startswith('pg_')]
- # end Py2K
return schema_names
@reflection.cache
@@ -1587,7 +1586,7 @@ class PGDialect(default.DefaultDialect):
current_schema = self.default_schema_name
result = connection.execute(
- sql.text(u"SELECT relname FROM pg_class c "
+ sql.text("SELECT relname FROM pg_class c "
"WHERE relkind = 'r' "
"AND '%s' = (select nspname from pg_namespace n "
"where n.oid = c.relnamespace) " %
@@ -1610,12 +1609,12 @@ class PGDialect(default.DefaultDialect):
AND '%(schema)s' = (select nspname from pg_namespace n
where n.oid = c.relnamespace)
""" % dict(schema=current_schema)
- # Py3K
- #view_names = [row[0] for row in connection.execute(s)]
- # Py2K
- view_names = [row[0].decode(self.encoding)
+
+ if util.py2k:
+ view_names = [row[0].decode(self.encoding)
for row in connection.execute(s)]
- # end Py2K
+ else:
+ view_names = [row[0] for row in connection.execute(s)]
return view_names
@reflection.cache
@@ -1632,11 +1631,10 @@ class PGDialect(default.DefaultDialect):
rp = connection.execute(sql.text(s),
view_name=view_name, schema=current_schema)
if rp:
- # Py3K
- #view_def = rp.scalar()
- # Py2K
- view_def = rp.scalar().decode(self.encoding)
- # end Py2K
+ if util.py2k:
+ view_def = rp.scalar().decode(self.encoding)
+ else:
+ view_def = rp.scalar()
return view_def
@reflection.cache
diff --git a/lib/sqlalchemy/dialects/postgresql/hstore.py b/lib/sqlalchemy/dialects/postgresql/hstore.py
index e555a1afd..d7368ff42 100644
--- a/lib/sqlalchemy/dialects/postgresql/hstore.py
+++ b/lib/sqlalchemy/dialects/postgresql/hstore.py
@@ -10,6 +10,7 @@ from .base import ARRAY, ischema_names
from ... import types as sqltypes
from ...sql import functions as sqlfunc
from ...sql.operators import custom_op
+from ... import util
__all__ = ('HSTORE', 'hstore')
@@ -96,14 +97,14 @@ def _serialize_hstore(val):
def esc(s, position):
if position == 'value' and s is None:
return 'NULL'
- elif isinstance(s, basestring):
+ elif isinstance(s, util.string_types):
return '"%s"' % s.replace('"', r'\"')
else:
raise ValueError("%r in %s position is not a string." %
(s, position))
return ', '.join('%s=>%s' % (esc(k, 'key'), esc(v, 'value'))
- for k, v in val.iteritems())
+ for k, v in val.items())
class HSTORE(sqltypes.Concatenable, sqltypes.TypeEngine):
@@ -260,19 +261,35 @@ class HSTORE(sqltypes.Concatenable, sqltypes.TypeEngine):
_adapt_expression(self, op, other_comparator)
def bind_processor(self, dialect):
- def process(value):
- if isinstance(value, dict):
- return _serialize_hstore(value)
- else:
- return value
+ if util.py2k:
+ encoding = dialect.encoding
+ def process(value):
+ if isinstance(value, dict):
+ return _serialize_hstore(value).encode(encoding)
+ else:
+ return value
+ else:
+ def process(value):
+ if isinstance(value, dict):
+ return _serialize_hstore(value)
+ else:
+ return value
return process
def result_processor(self, dialect, coltype):
- def process(value):
- if value is not None:
- return _parse_hstore(value)
- else:
- return value
+ if util.py2k:
+ encoding = dialect.encoding
+ def process(value):
+ if value is not None:
+ return _parse_hstore(value.decode(encoding))
+ else:
+ return value
+ else:
+ def process(value):
+ if value is not None:
+ return _parse_hstore(value)
+ else:
+ return value
return process
diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
index 805fc72af..fcc1946ff 100644
--- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py
+++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
@@ -143,6 +143,7 @@ effect for other DBAPIs.
"""
from __future__ import absolute_import
+
import re
import logging
@@ -190,22 +191,20 @@ class _PGNumeric(sqltypes.Numeric):
class _PGEnum(ENUM):
def __init__(self, *arg, **kw):
super(_PGEnum, self).__init__(*arg, **kw)
- # Py2K
- if self.convert_unicode:
- self.convert_unicode = "force"
- # end Py2K
+ if util.py2k:
+ if self.convert_unicode:
+ self.convert_unicode = "force"
class _PGArray(ARRAY):
def __init__(self, *arg, **kw):
super(_PGArray, self).__init__(*arg, **kw)
- # Py2K
- # FIXME: this check won't work for setups that
- # have convert_unicode only on their create_engine().
- if isinstance(self.item_type, sqltypes.String) and \
- self.item_type.convert_unicode:
- self.item_type.convert_unicode = "force"
- # end Py2K
+ if util.py2k:
+ # FIXME: this check won't work for setups that
+ # have convert_unicode only on their create_engine().
+ if isinstance(self.item_type, sqltypes.String) and \
+ self.item_type.convert_unicode:
+ self.item_type.convert_unicode = "force"
class _PGHStore(HSTORE):
@@ -294,9 +293,9 @@ class PGIdentifierPreparer_psycopg2(PGIdentifierPreparer):
class PGDialect_psycopg2(PGDialect):
driver = 'psycopg2'
- # Py2K
- supports_unicode_statements = False
- # end Py2K
+ if util.py2k:
+ supports_unicode_statements = False
+
default_paramstyle = 'pyformat'
supports_sane_multi_rowcount = False
execution_ctx_cls = PGExecutionContext_psycopg2
@@ -393,7 +392,13 @@ class PGDialect_psycopg2(PGDialect):
hstore_oids = self._hstore_oids(conn)
if hstore_oids is not None:
oid, array_oid = hstore_oids
- extras.register_hstore(conn, oid=oid, array_oid=array_oid)
+ if util.py2k:
+ extras.register_hstore(conn, oid=oid,
+ array_oid=array_oid,
+ unicode=True)
+ else:
+ extras.register_hstore(conn, oid=oid,
+ array_oid=array_oid)
fns.append(on_connect)
if fns:
diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py
index f21a81d0c..1ca8f4e64 100644
--- a/lib/sqlalchemy/dialects/sqlite/base.py
+++ b/lib/sqlalchemy/dialects/sqlite/base.py
@@ -508,7 +508,7 @@ class SQLiteDDLCompiler(compiler.DDLCompiler):
def visit_foreign_key_constraint(self, constraint):
- local_table = constraint._elements.values()[0].parent.table
+ local_table = list(constraint._elements.values())[0].parent.table
remote_table = list(constraint._elements.values())[0].column.table
if local_table.schema != remote_table.schema:
@@ -812,7 +812,7 @@ class SQLiteDialect(default.DefaultDialect):
coltype = sqltypes.NullType()
if default is not None:
- default = unicode(default)
+ default = util.text_type(default)
return {
'name': name,
diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/sqlalchemy/dialects/sqlite/pysqlite.py
index d827607ba..ad0dd5292 100644
--- a/lib/sqlalchemy/dialects/sqlite/pysqlite.py
+++ b/lib/sqlalchemy/dialects/sqlite/pysqlite.py
@@ -267,8 +267,8 @@ class SQLiteDialect_pysqlite(SQLiteDialect):
}
)
- # Py3K
- #description_encoding = None
+ if not util.py2k:
+ description_encoding = None
driver = 'pysqlite'
@@ -288,7 +288,7 @@ class SQLiteDialect_pysqlite(SQLiteDialect):
def dbapi(cls):
try:
from pysqlite2 import dbapi2 as sqlite
- except ImportError, e:
+ except ImportError as e:
try:
from sqlite3 import dbapi2 as sqlite # try 2.5+ stdlib name.
except ImportError:
diff --git a/lib/sqlalchemy/dialects/sybase/__init__.py b/lib/sqlalchemy/dialects/sybase/__init__.py
index 7d504e54e..f61352ceb 100644
--- a/lib/sqlalchemy/dialects/sybase/__init__.py
+++ b/lib/sqlalchemy/dialects/sybase/__init__.py
@@ -9,7 +9,7 @@ from sqlalchemy.dialects.sybase import base, pysybase, pyodbc
# default dialect
base.dialect = pyodbc.dialect
-from base import CHAR, VARCHAR, TIME, NCHAR, NVARCHAR,\
+from .base import CHAR, VARCHAR, TIME, NCHAR, NVARCHAR,\
TEXT, DATE, DATETIME, FLOAT, NUMERIC,\
BIGINT, INT, INTEGER, SMALLINT, BINARY,\
VARBINARY, UNITEXT, UNICHAR, UNIVARCHAR,\
diff --git a/lib/sqlalchemy/dialects/sybase/base.py b/lib/sqlalchemy/dialects/sybase/base.py
index a9e5c5fda..6770ed8e7 100644
--- a/lib/sqlalchemy/dialects/sybase/base.py
+++ b/lib/sqlalchemy/dialects/sybase/base.py
@@ -475,12 +475,12 @@ class SybaseDialect(default.DefaultDialect):
AND o.type in ('U', 'V')
""")
- # Py2K
- if isinstance(schema, unicode):
- schema = schema.encode("ascii")
- if isinstance(table_name, unicode):
- table_name = table_name.encode("ascii")
- # end Py2K
+# start Py2K
+# if isinstance(schema, unicode):
+# schema = schema.encode("ascii")
+# if isinstance(table_name, unicode):
+# table_name = table_name.encode("ascii")
+# end Py2K
result = connection.execute(TABLEID_SQL,
schema_name=schema,
table_name=table_name)
@@ -759,10 +759,10 @@ class SybaseDialect(default.DefaultDialect):
AND o.type = 'U'
""")
- # Py2K
- if isinstance(schema, unicode):
- schema = schema.encode("ascii")
- # end Py2K
+# start Py2K
+# if isinstance(schema, unicode):
+# schema = schema.encode("ascii")
+# end Py2K
tables = connection.execute(TABLE_SQL, schema_name=schema)
return [t["name"] for t in tables]
@@ -779,10 +779,10 @@ class SybaseDialect(default.DefaultDialect):
AND o.type = 'V'
""")
- # Py2K
- if isinstance(view_name, unicode):
- view_name = view_name.encode("ascii")
- # end Py2K
+# start Py2K
+# if isinstance(view_name, unicode):
+# view_name = view_name.encode("ascii")
+# end Py2K
view = connection.execute(VIEW_DEF_SQL, view_name=view_name)
return view.scalar()
@@ -799,10 +799,10 @@ class SybaseDialect(default.DefaultDialect):
AND o.type = 'V'
""")
- # Py2K
- if isinstance(schema, unicode):
- schema = schema.encode("ascii")
- # end Py2K
+# start Py2K
+# if isinstance(schema, unicode):
+# schema = schema.encode("ascii")
+# end Py2K
views = connection.execute(VIEW_SQL, schema_name=schema)
return [v["name"] for v in views]
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index b4c9b1e1c..2d9f3af94 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -3,13 +3,13 @@
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
+from __future__ import with_statement
"""Defines :class:`.Connection` and :class:`.Engine`.
"""
-from __future__ import with_statement
+
import sys
from .. import exc, schema, util, log, interfaces
from ..sql import expression, util as sql_util
@@ -460,7 +460,7 @@ class Connection(Connectable):
try:
self.engine.dialect.do_begin(self.connection)
- except Exception, e:
+ except Exception as e:
self._handle_dbapi_exception(e, None, None, None, None)
def _rollback_impl(self):
@@ -473,7 +473,7 @@ class Connection(Connectable):
try:
self.engine.dialect.do_rollback(self.connection)
self.__transaction = None
- except Exception, e:
+ except Exception as e:
self._handle_dbapi_exception(e, None, None, None, None)
else:
self.__transaction = None
@@ -487,7 +487,7 @@ class Connection(Connectable):
try:
self.engine.dialect.do_commit(self.connection)
self.__transaction = None
- except Exception, e:
+ except Exception as e:
self._handle_dbapi_exception(e, None, None, None, None)
def _savepoint_impl(self, name=None):
@@ -688,7 +688,7 @@ class Connection(Connectable):
dialect = self.dialect
ctx = dialect.execution_ctx_cls._init_default(
dialect, self, conn)
- except Exception, e:
+ except Exception as e:
self._handle_dbapi_exception(e, None, None, None, None)
ret = ctx._exec_default(default, None)
@@ -734,6 +734,8 @@ class Connection(Connectable):
distilled_params = _distill_params(multiparams, params)
if distilled_params:
+ # note this is usually dict but we support RowProxy
+ # as well; but dict.keys() as an iterator is OK
keys = distilled_params[0].keys()
else:
keys = []
@@ -822,7 +824,7 @@ class Connection(Connectable):
conn = self._revalidate_connection()
context = constructor(dialect, self, conn, *args)
- except Exception, e:
+ except Exception as e:
self._handle_dbapi_exception(e,
str(statement), parameters,
None, None)
@@ -865,7 +867,7 @@ class Connection(Connectable):
statement,
parameters,
context)
- except Exception, e:
+ except Exception as e:
self._handle_dbapi_exception(
e,
statement,
@@ -939,7 +941,7 @@ class Connection(Connectable):
cursor,
statement,
parameters)
- except Exception, e:
+ except Exception as e:
self._handle_dbapi_exception(
e,
statement,
@@ -954,17 +956,11 @@ class Connection(Connectable):
"""
try:
cursor.close()
- except Exception, e:
- try:
- ex_text = str(e)
- except TypeError:
- ex_text = repr(e)
- if not self.closed:
- self.connection._logger.warn(
- "Error closing cursor: %s", ex_text)
-
- if isinstance(e, (SystemExit, KeyboardInterrupt)):
- raise
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except Exception:
+ self.connection._logger.error(
+ "Error closing cursor", exc_info=True)
_reentrant_error = False
_is_disconnect = False
@@ -1045,7 +1041,7 @@ class Connection(Connectable):
Compiled: _execute_compiled,
schema.SchemaItem: _execute_default,
schema.DDLElement: _execute_ddl,
- basestring: _execute_text
+ util.string_types[0]: _execute_text
}
def default_schema_name(self):
diff --git a/lib/sqlalchemy/engine/ddl.py b/lib/sqlalchemy/engine/ddl.py
index c61a9d59c..6daa9be6b 100644
--- a/lib/sqlalchemy/engine/ddl.py
+++ b/lib/sqlalchemy/engine/ddl.py
@@ -52,7 +52,7 @@ class SchemaGenerator(DDLBase):
if self.tables is not None:
tables = self.tables
else:
- tables = metadata.tables.values()
+ tables = list(metadata.tables.values())
collection = [t for t in sql_util.sort_tables(tables)
if self._can_create_table(t)]
seq_coll = [s for s in metadata._sequences.values()
@@ -120,7 +120,7 @@ class SchemaDropper(DDLBase):
if self.tables is not None:
tables = self.tables
else:
- tables = metadata.tables.values()
+ tables = list(metadata.tables.values())
collection = [
t
diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py
index dc45e12b1..91869ab75 100644
--- a/lib/sqlalchemy/engine/default.py
+++ b/lib/sqlalchemy/engine/default.py
@@ -59,17 +59,16 @@ class DefaultDialect(interfaces.Dialect):
# *not* the FLOAT type however.
supports_native_decimal = False
- # Py3K
- #supports_unicode_statements = True
- #supports_unicode_binds = True
- #returns_unicode_strings = True
- #description_encoding = None
- # Py2K
- supports_unicode_statements = False
- supports_unicode_binds = False
- returns_unicode_strings = False
- description_encoding = 'use_encoding'
- # end Py2K
+ if util.py3k:
+ supports_unicode_statements = True
+ supports_unicode_binds = True
+ returns_unicode_strings = True
+ description_encoding = None
+ else:
+ supports_unicode_statements = False
+ supports_unicode_binds = False
+ returns_unicode_strings = False
+ description_encoding = 'use_encoding'
name = 'default'
@@ -203,14 +202,10 @@ class DefaultDialect(interfaces.Dialect):
return None
def _check_unicode_returns(self, connection):
- # Py2K
- if self.supports_unicode_statements:
- cast_to = unicode
+ if util.py2k and not self.supports_unicode_statements:
+ cast_to = util.binary_type
else:
- cast_to = str
- # end Py2K
- # Py3K
- #cast_to = str
+ cast_to = util.text_type
def check_unicode(formatstr, type_):
cursor = connection.connection.cursor()
@@ -219,8 +214,8 @@ class DefaultDialect(interfaces.Dialect):
cursor.execute(
cast_to(
expression.select(
- [expression.cast(
- expression.literal_column(
+ [expression.cast(
+ expression.literal_column(
"'test %s returns'" % formatstr),
type_)
]).compile(dialect=self)
@@ -228,8 +223,8 @@ class DefaultDialect(interfaces.Dialect):
)
row = cursor.fetchone()
- return isinstance(row[0], unicode)
- except self.dbapi.Error, de:
+ return isinstance(row[0], util.text_type)
+ except self.dbapi.Error as de:
util.warn("Exception attempting to "
"detect unicode returns: %r" % de)
return False
@@ -375,10 +370,10 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
self.execution_options.update(connection._execution_options)
if not dialect.supports_unicode_statements:
- self.unicode_statement = unicode(compiled)
+ self.unicode_statement = util.text_type(compiled)
self.statement = dialect._encoder(self.unicode_statement)[0]
else:
- self.statement = self.unicode_statement = unicode(compiled)
+ self.statement = self.unicode_statement = util.text_type(compiled)
self.cursor = self.create_cursor()
self.compiled_parameters = []
@@ -416,7 +411,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
self.result_map = compiled.result_map
- self.unicode_statement = unicode(compiled)
+ self.unicode_statement = util.text_type(compiled)
if not dialect.supports_unicode_statements:
self.statement = self.unicode_statement.encode(
self.dialect.encoding)
@@ -521,7 +516,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
self.executemany = len(parameters) > 1
if not dialect.supports_unicode_statements and \
- isinstance(statement, unicode):
+ isinstance(statement, util.text_type):
self.unicode_statement = statement
self.statement = dialect._encoder(statement)[0]
else:
@@ -575,8 +570,8 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
"""
conn = self.root_connection
- if isinstance(stmt, unicode) and \
- not self.dialect.supports_unicode_statements:
+ if isinstance(stmt, util.text_type) and \
+ not self.dialect.supports_unicode_statements:
stmt = self.dialect._encoder(stmt)[0]
if self.dialect.positional:
@@ -736,7 +731,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
inputsizes.append(dbtype)
try:
self.cursor.setinputsizes(*inputsizes)
- except Exception, e:
+ except Exception as e:
self.root_connection._handle_dbapi_exception(
e, None, None, None, self)
else:
@@ -754,7 +749,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
inputsizes[key] = dbtype
try:
self.cursor.setinputsizes(**inputsizes)
- except Exception, e:
+ except Exception as e:
self.root_connection._handle_dbapi_exception(
e, None, None, None, self)
diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py
index 90f21db09..cf2caf679 100644
--- a/lib/sqlalchemy/engine/reflection.py
+++ b/lib/sqlalchemy/engine/reflection.py
@@ -41,8 +41,12 @@ def cache(fn, self, con, *args, **kw):
return fn(self, con, *args, **kw)
key = (
fn.__name__,
- tuple(a for a in args if isinstance(a, basestring)),
- tuple((k, v) for k, v in kw.iteritems() if isinstance(v, (basestring, int, float)))
+ tuple(a for a in args if isinstance(a, util.string_types)),
+ tuple((k, v) for k, v in kw.items() if
+ isinstance(v,
+ util.string_types + util.int_types + (float, )
+ )
+ )
)
ret = info_cache.get(key)
if ret is None:
@@ -381,16 +385,15 @@ class Inspector(object):
# table.kwargs will need to be passed to each reflection method. Make
# sure keywords are strings.
tblkw = table.kwargs.copy()
- for (k, v) in tblkw.items():
+ for (k, v) in list(tblkw.items()):
del tblkw[k]
tblkw[str(k)] = v
- # Py2K
- if isinstance(schema, str):
- schema = schema.decode(dialect.encoding)
- if isinstance(table_name, str):
- table_name = table_name.decode(dialect.encoding)
- # end Py2K
+ if util.py2k:
+ if isinstance(schema, str):
+ schema = schema.decode(dialect.encoding)
+ if isinstance(table_name, str):
+ table_name = table_name.decode(dialect.encoding)
# columns
found_table = False
diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py
index 88930081e..65ce3b742 100644
--- a/lib/sqlalchemy/engine/result.py
+++ b/lib/sqlalchemy/engine/result.py
@@ -8,7 +8,7 @@
and :class:`.RowProxy."""
-from itertools import izip
+
from .. import exc, types, util
from ..sql import expression
import collections
@@ -55,7 +55,7 @@ except ImportError:
return list(self)
def __iter__(self):
- for processor, value in izip(self._processors, self._row):
+ for processor, value in zip(self._processors, self._row):
if processor is None:
yield value
else:
@@ -72,7 +72,7 @@ except ImportError:
except TypeError:
if isinstance(key, slice):
l = []
- for processor, value in izip(self._processors[key],
+ for processor, value in zip(self._processors[key],
self._row[key]):
if processor is None:
l.append(value)
@@ -93,7 +93,7 @@ except ImportError:
def __getattr__(self, name):
try:
return self[name]
- except KeyError, e:
+ except KeyError as e:
raise AttributeError(e.args[0])
@@ -142,7 +142,7 @@ class RowProxy(BaseRowProxy):
def items(self):
"""Return a list of tuples, each tuple containing a key/value pair."""
# TODO: no coverage here
- return [(key, self[key]) for key in self.iterkeys()]
+ return [(key, self[key]) for key in self.keys()]
def keys(self):
"""Return the list of keys as strings represented by this RowProxy."""
@@ -274,7 +274,7 @@ class ResultMetaData(object):
def _key_fallback(self, key, raiseerr=True):
map = self._keymap
result = None
- if isinstance(key, basestring):
+ if isinstance(key, util.string_types):
result = map.get(key if self.case_sensitive else key.lower())
# fallback for targeting a ColumnElement to a textual expression
# this is a rare use case which only occurs when matching text()
@@ -328,8 +328,8 @@ class ResultMetaData(object):
return {
'_pickled_keymap': dict(
(key, index)
- for key, (processor, obj, index) in self._keymap.iteritems()
- if isinstance(key, (basestring, int))
+ for key, (processor, obj, index) in self._keymap.items()
+ if isinstance(key, util.string_types + util.int_types)
),
'keys': self.keys,
"case_sensitive": self.case_sensitive,
@@ -338,9 +338,9 @@ class ResultMetaData(object):
def __setstate__(self, state):
# the row has been processed at pickling time so we don't need any
# processor anymore
- self._processors = [None for _ in xrange(len(state['keys']))]
+ self._processors = [None for _ in range(len(state['keys']))]
self._keymap = keymap = {}
- for key, index in state['_pickled_keymap'].iteritems():
+ for key, index in state['_pickled_keymap'].items():
# not preserving "obj" here, unfortunately our
# proxy comparison fails with the unpickle
keymap[key] = (None, None, index)
@@ -440,7 +440,7 @@ class ResultProxy(object):
"""
try:
return self.context.rowcount
- except Exception, e:
+ except Exception as e:
self.connection._handle_dbapi_exception(
e, None, None, self.cursor, self.context)
@@ -462,7 +462,7 @@ class ResultProxy(object):
"""
try:
return self._saved_cursor.lastrowid
- except Exception, e:
+ except Exception as e:
self.connection._handle_dbapi_exception(
e, None, None,
self._saved_cursor, self.context)
@@ -746,7 +746,7 @@ class ResultProxy(object):
l = self.process_rows(self._fetchall_impl())
self.close()
return l
- except Exception, e:
+ except Exception as e:
self.connection._handle_dbapi_exception(
e, None, None,
self.cursor, self.context)
@@ -765,7 +765,7 @@ class ResultProxy(object):
if len(l) == 0:
self.close()
return l
- except Exception, e:
+ except Exception as e:
self.connection._handle_dbapi_exception(
e, None, None,
self.cursor, self.context)
@@ -784,7 +784,7 @@ class ResultProxy(object):
else:
self.close()
return None
- except Exception, e:
+ except Exception as e:
self.connection._handle_dbapi_exception(
e, None, None,
self.cursor, self.context)
@@ -800,7 +800,7 @@ class ResultProxy(object):
try:
row = self._fetchone_impl()
- except Exception, e:
+ except Exception as e:
self.connection._handle_dbapi_exception(
e, None, None,
self.cursor, self.context)
@@ -966,9 +966,9 @@ class BufferedColumnResultProxy(ResultProxy):
# constructed.
metadata._orig_processors = metadata._processors
# replace the all type processors by None processors.
- metadata._processors = [None for _ in xrange(len(metadata.keys))]
+ metadata._processors = [None for _ in range(len(metadata.keys))]
keymap = {}
- for k, (func, obj, index) in metadata._keymap.iteritems():
+ for k, (func, obj, index) in metadata._keymap.items():
keymap[k] = (None, obj, index)
self._metadata._keymap = keymap
@@ -989,7 +989,7 @@ class BufferedColumnResultProxy(ResultProxy):
if size is None:
return self.fetchall()
l = []
- for i in xrange(size):
+ for i in range(size):
row = self.fetchone()
if row is None:
break
diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py
index 4c81df8f0..c65986ca2 100644
--- a/lib/sqlalchemy/engine/strategies.py
+++ b/lib/sqlalchemy/engine/strategies.py
@@ -78,20 +78,14 @@ class DefaultEngineStrategy(EngineStrategy):
def connect():
try:
return dialect.connect(*cargs, **cparams)
- except Exception, e:
+ except Exception as e:
invalidated = dialect.is_disconnect(e, None, None)
- # Py3K
- #raise exc.DBAPIError.instance(None, None,
- # e, dialect.dbapi.Error,
- # connection_invalidated=invalidated
- #) from e
- # Py2K
- import sys
- raise exc.DBAPIError.instance(
- None, None, e, dialect.dbapi.Error,
- connection_invalidated=invalidated
- ), None, sys.exc_info()[2]
- # end Py2K
+ util.raise_from_cause(
+ exc.DBAPIError.instance(None, None,
+ e, dialect.dbapi.Error,
+ connection_invalidated=invalidated
+ )
+ )
creator = kwargs.pop('creator', connect)
diff --git a/lib/sqlalchemy/engine/url.py b/lib/sqlalchemy/engine/url.py
index c4931b48c..ed5729eea 100644
--- a/lib/sqlalchemy/engine/url.py
+++ b/lib/sqlalchemy/engine/url.py
@@ -14,7 +14,6 @@ be used directly and is also accepted directly by ``create_engine()``.
"""
import re
-import urllib
from .. import exc, util
from . import Dialect
@@ -67,7 +66,7 @@ class URL(object):
if self.username is not None:
s += self.username
if self.password is not None:
- s += ':' + urllib.quote_plus(self.password)
+ s += ':' + util.quote_plus(self.password)
s += "@"
if self.host is not None:
s += self.host
@@ -76,7 +75,7 @@ class URL(object):
if self.database is not None:
s += '/' + self.database
if self.query:
- keys = self.query.keys()
+ keys = list(self.query)
keys.sort()
s += '?' + "&".join("%s=%s" % (k, self.query[k]) for k in keys)
return s
@@ -150,7 +149,7 @@ def make_url(name_or_url):
existing URL object is passed, just returns the object.
"""
- if isinstance(name_or_url, basestring):
+ if isinstance(name_or_url, util.string_types):
return _parse_rfc1738_args(name_or_url)
else:
return name_or_url
@@ -177,17 +176,15 @@ def _parse_rfc1738_args(name):
tokens = components['database'].split('?', 2)
components['database'] = tokens[0]
query = (len(tokens) > 1 and dict(util.parse_qsl(tokens[1]))) or None
- # Py2K
- if query is not None:
+ if util.py2k and query is not None:
query = dict((k.encode('ascii'), query[k]) for k in query)
- # end Py2K
else:
query = None
components['query'] = query
if components['password'] is not None:
components['password'] = \
- urllib.unquote_plus(components['password'])
+ util.unquote_plus(components['password'])
name = components.pop('name')
return URL(name, **components)
diff --git a/lib/sqlalchemy/event.py b/lib/sqlalchemy/event.py
index f28f19ee9..bfd027ead 100644
--- a/lib/sqlalchemy/event.py
+++ b/lib/sqlalchemy/event.py
@@ -200,12 +200,9 @@ def _remove_dispatcher(cls):
if not _registrars[k]:
del _registrars[k]
-
-class Events(object):
+class Events(util.with_metaclass(_EventMeta, object)):
"""Define event listening functions for a particular target type."""
- __metaclass__ = _EventMeta
-
@classmethod
def _accept_with(cls, target):
# Mapper, ClassManager, Session override this to
@@ -377,9 +374,11 @@ class _EmptyListener(object):
def __iter__(self):
return iter(self.parent_listeners)
- def __nonzero__(self):
+ def __bool__(self):
return bool(self.parent_listeners)
+ __nonzero__ = __bool__
+
class _CompoundListener(object):
_exec_once = False
@@ -414,9 +413,10 @@ class _CompoundListener(object):
def __iter__(self):
return chain(self.parent_listeners, self.listeners)
- def __nonzero__(self):
+ def __bool__(self):
return bool(self.listeners or self.parent_listeners)
+ __nonzero__ = __bool__
class _ListenerCollection(_CompoundListener):
"""Instance-level attributes on instances of :class:`._Dispatch`.
diff --git a/lib/sqlalchemy/exc.py b/lib/sqlalchemy/exc.py
index 0ce5393be..f5dc1119d 100644
--- a/lib/sqlalchemy/exc.py
+++ b/lib/sqlalchemy/exc.py
@@ -285,7 +285,7 @@ class DBAPIError(StatementError):
text = str(orig)
except (KeyboardInterrupt, SystemExit):
raise
- except Exception, e:
+ except Exception as e:
text = 'Error in str() of DB-API-generated exception: ' + str(e)
StatementError.__init__(
self,
diff --git a/lib/sqlalchemy/ext/associationproxy.py b/lib/sqlalchemy/ext/associationproxy.py
index 252efcb42..0482a9205 100644
--- a/lib/sqlalchemy/ext/associationproxy.py
+++ b/lib/sqlalchemy/ext/associationproxy.py
@@ -475,9 +475,11 @@ class _AssociationCollection(object):
def __len__(self):
return len(self.col)
- def __nonzero__(self):
+ def __bool__(self):
return bool(self.col)
+ __nonzero__ = __bool__
+
def __getstate__(self):
return {'parent': self.parent, 'lazy_collection': self.lazy_collection}
@@ -514,7 +516,7 @@ class _AssociationList(_AssociationCollection):
stop = index.stop
step = index.step or 1
- rng = range(index.start or 0, stop, step)
+ rng = list(range(index.start or 0, stop, step))
if step == 1:
for i in rng:
del self[index.start]
@@ -569,7 +571,7 @@ class _AssociationList(_AssociationCollection):
def count(self, value):
return sum([1 for _ in
- itertools.ifilter(lambda v: v == value, iter(self))])
+ util.itertools_filter(lambda v: v == value, iter(self))])
def extend(self, values):
for v in values:
@@ -668,8 +670,8 @@ class _AssociationList(_AssociationCollection):
def __hash__(self):
raise TypeError("%s objects are unhashable" % type(self).__name__)
- for func_name, func in locals().items():
- if (util.callable(func) and func.func_name == func_name and
+ for func_name, func in list(locals().items()):
+ if (util.callable(func) and func.__name__ == func_name and
not func.__doc__ and hasattr(list, func_name)):
func.__doc__ = getattr(list, func_name).__doc__
del func_name, func
@@ -711,7 +713,7 @@ class _AssociationDict(_AssociationCollection):
return key in self.col
def __iter__(self):
- return self.col.iterkeys()
+ return iter(self.col.keys())
def clear(self):
self.col.clear()
@@ -756,24 +758,27 @@ class _AssociationDict(_AssociationCollection):
def keys(self):
return self.col.keys()
- def iterkeys(self):
- return self.col.iterkeys()
+ if util.py2k:
+ def iteritems(self):
+ return ((key, self._get(self.col[key])) for key in self.col)
- def values(self):
- return [self._get(member) for member in self.col.values()]
+ def itervalues(self):
+ return (self._get(self.col[key]) for key in self.col)
- def itervalues(self):
- for key in self.col:
- yield self._get(self.col[key])
- raise StopIteration
+ def iterkeys(self):
+ return self.col.iterkeys()
- def items(self):
- return [(k, self._get(self.col[k])) for k in self]
+ def values(self):
+ return [self._get(member) for member in self.col.values()]
- def iteritems(self):
- for key in self.col:
- yield (key, self._get(self.col[key]))
- raise StopIteration
+ def items(self):
+ return [(k, self._get(self.col[k])) for k in self]
+ else:
+ def items(self):
+ return ((key, self._get(self.col[key])) for key in self.col)
+
+ def values(self):
+ return (self._get(self.col[key]) for key in self.col)
def pop(self, key, default=_NotProvided):
if default is _NotProvided:
@@ -816,8 +821,8 @@ class _AssociationDict(_AssociationCollection):
def __hash__(self):
raise TypeError("%s objects are unhashable" % type(self).__name__)
- for func_name, func in locals().items():
- if (util.callable(func) and func.func_name == func_name and
+ for func_name, func in list(locals().items()):
+ if (util.callable(func) and func.__name__ == func_name and
not func.__doc__ and hasattr(dict, func_name)):
func.__doc__ = getattr(dict, func_name).__doc__
del func_name, func
@@ -838,12 +843,14 @@ class _AssociationSet(_AssociationCollection):
def __len__(self):
return len(self.col)
- def __nonzero__(self):
+ def __bool__(self):
if self.col:
return True
else:
return False
+ __nonzero__ = __bool__
+
def __contains__(self, value):
for member in self.col:
# testlib.pragma exempt:__eq__
@@ -1014,8 +1021,8 @@ class _AssociationSet(_AssociationCollection):
def __hash__(self):
raise TypeError("%s objects are unhashable" % type(self).__name__)
- for func_name, func in locals().items():
- if (util.callable(func) and func.func_name == func_name and
+ for func_name, func in list(locals().items()):
+ if (util.callable(func) and func.__name__ == func_name and
not func.__doc__ and hasattr(set, func_name)):
func.__doc__ = getattr(set, func_name).__doc__
del func_name, func
diff --git a/lib/sqlalchemy/ext/declarative/base.py b/lib/sqlalchemy/ext/declarative/base.py
index ee2f0134a..5a2b88db4 100644
--- a/lib/sqlalchemy/ext/declarative/base.py
+++ b/lib/sqlalchemy/ext/declarative/base.py
@@ -173,7 +173,7 @@ def _as_declarative(cls, classname, dict_):
# extract columns from the class dict
declared_columns = set()
- for key, c in our_stuff.iteritems():
+ for key, c in list(our_stuff.items()):
if isinstance(c, (ColumnProperty, CompositeProperty)):
for col in c.columns:
if isinstance(col, Column) and \
@@ -354,7 +354,7 @@ class _MapperConfig(object):
# in which case the mapper makes this combination).
# See if the superclass has a similar column property.
# If so, join them together.
- for k, col in properties.items():
+ for k, col in list(properties.items()):
if not isinstance(col, expression.ColumnElement):
continue
if k in inherited_mapper._props:
diff --git a/lib/sqlalchemy/ext/declarative/clsregistry.py b/lib/sqlalchemy/ext/declarative/clsregistry.py
index 89975716d..95aba93fa 100644
--- a/lib/sqlalchemy/ext/declarative/clsregistry.py
+++ b/lib/sqlalchemy/ext/declarative/clsregistry.py
@@ -255,7 +255,7 @@ def _resolver(cls, prop):
return x.cls
else:
return x
- except NameError, n:
+ except NameError as n:
raise exc.InvalidRequestError(
"When initializing mapper %s, expression %r failed to "
"locate a name (%r). If this is a class name, consider "
@@ -275,14 +275,14 @@ def _deferred_relationship(cls, prop):
for attr in ('argument', 'order_by', 'primaryjoin', 'secondaryjoin',
'secondary', '_user_defined_foreign_keys', 'remote_side'):
v = getattr(prop, attr)
- if isinstance(v, basestring):
+ if isinstance(v, str):
setattr(prop, attr, resolve_arg(v))
if prop.backref and isinstance(prop.backref, tuple):
key, kwargs = prop.backref
for attr in ('primaryjoin', 'secondaryjoin', 'secondary',
'foreign_keys', 'remote_side', 'order_by'):
- if attr in kwargs and isinstance(kwargs[attr], basestring):
+ if attr in kwargs and isinstance(kwargs[attr], str):
kwargs[attr] = resolve_arg(kwargs[attr])
return prop
diff --git a/lib/sqlalchemy/ext/orderinglist.py b/lib/sqlalchemy/ext/orderinglist.py
index ffdd971a0..24d405e39 100644
--- a/lib/sqlalchemy/ext/orderinglist.py
+++ b/lib/sqlalchemy/ext/orderinglist.py
@@ -324,7 +324,7 @@ class OrderingList(list):
if stop < 0:
stop += len(self)
- for i in xrange(start, stop, step):
+ for i in range(start, stop, step):
self.__setitem__(i, entity[i])
else:
self._order_entity(index, entity, True)
@@ -334,7 +334,6 @@ class OrderingList(list):
super(OrderingList, self).__delitem__(index)
self._reorder()
- # Py2K
def __setslice__(self, start, end, values):
super(OrderingList, self).__setslice__(start, end, values)
self._reorder()
@@ -342,13 +341,12 @@ class OrderingList(list):
def __delslice__(self, start, end):
super(OrderingList, self).__delslice__(start, end)
self._reorder()
- # end Py2K
def __reduce__(self):
return _reconstitute, (self.__class__, self.__dict__, list(self))
- for func_name, func in locals().items():
- if (util.callable(func) and func.func_name == func_name and
+ for func_name, func in list(locals().items()):
+ if (util.callable(func) and func.__name__ == func_name and
not func.__doc__ and hasattr(list, func_name)):
func.__doc__ = getattr(list, func_name).__doc__
del func_name, func
diff --git a/lib/sqlalchemy/ext/serializer.py b/lib/sqlalchemy/ext/serializer.py
index 5a3fb5937..8abd1fdf3 100644
--- a/lib/sqlalchemy/ext/serializer.py
+++ b/lib/sqlalchemy/ext/serializer.py
@@ -58,24 +58,9 @@ from ..orm.interfaces import MapperProperty
from ..orm.attributes import QueryableAttribute
from .. import Table, Column
from ..engine import Engine
-from ..util import pickle
+from ..util import pickle, byte_buffer, b64encode, b64decode
import re
-import base64
-# Py3K
-#from io import BytesIO as byte_buffer
-# Py2K
-from cStringIO import StringIO as byte_buffer
-# end Py2K
-
-# Py3K
-#def b64encode(x):
-# return base64.b64encode(x).decode('ascii')
-#def b64decode(x):
-# return base64.b64decode(x.encode('ascii'))
-# Py2K
-b64encode = base64.b64encode
-b64decode = base64.b64decode
-# end Py2K
+
__all__ = ['Serializer', 'Deserializer', 'dumps', 'loads']
diff --git a/lib/sqlalchemy/orm/__init__.py b/lib/sqlalchemy/orm/__init__.py
index 35315d5d1..1173d5d09 100644
--- a/lib/sqlalchemy/orm/__init__.py
+++ b/lib/sqlalchemy/orm/__init__.py
@@ -1661,7 +1661,7 @@ def contains_eager(*keys, **kwargs):
alias = kwargs.pop('alias', None)
if kwargs:
raise exc.ArgumentError(
- 'Invalid kwargs for contains_eager: %r' % kwargs.keys())
+ 'Invalid kwargs for contains_eager: %r' % list(kwargs.keys()))
return strategies.EagerLazyOption(keys, lazy='joined',
propagate_to_loaders=False, chained=True), \
strategies.LoadEagerFromAliasOption(keys, alias=alias, chained=True)
diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py
index 3eda127fd..bfba695b8 100644
--- a/lib/sqlalchemy/orm/attributes.py
+++ b/lib/sqlalchemy/orm/attributes.py
@@ -386,8 +386,8 @@ def create_proxied_attribute(descriptor):
return getattr(self.comparator, attribute)
except AttributeError:
raise AttributeError(
- 'Neither %r object nor %r object associated with %s '
- 'has an attribute %r' % (
+ 'Neither %r object nor %r object associated with %s '
+ 'has an attribute %r' % (
type(descriptor).__name__,
type(self.comparator).__name__,
self,
@@ -866,7 +866,7 @@ class CollectionAttributeImpl(AttributeImpl):
self.collection_factory = typecallable
def __copy(self, item):
- return [y for y in list(collections.collection_adapter(item))]
+ return [y for y in collections.collection_adapter(item)]
def get_history(self, state, dict_, passive=PASSIVE_OFF):
current = self.get(state, dict_, passive=passive)
@@ -1214,8 +1214,9 @@ class History(History):
"""
- def __nonzero__(self):
+ def __bool__(self):
return self != HISTORY_BLANK
+ __nonzero__ = __bool__
def empty(self):
"""Return True if this :class:`.History` has no changes
diff --git a/lib/sqlalchemy/orm/collections.py b/lib/sqlalchemy/orm/collections.py
index 5691acfff..03917d112 100644
--- a/lib/sqlalchemy/orm/collections.py
+++ b/lib/sqlalchemy/orm/collections.py
@@ -657,11 +657,10 @@ class CollectionAdapter(object):
if getattr(obj, '_sa_adapter', None) is not None:
return getattr(obj, '_sa_adapter')
elif setting_type == dict:
- # Py3K
- #return obj.values()
- # Py2K
- return getattr(obj, 'itervalues', getattr(obj, 'values'))()
- # end Py2K
+ if util.py3k:
+ return obj.values()
+ else:
+ return getattr(obj, 'itervalues', getattr(obj, 'values'))()
else:
return iter(obj)
@@ -705,16 +704,17 @@ class CollectionAdapter(object):
def __iter__(self):
"""Iterate over entities in the collection."""
- # Py3K requires iter() here
return iter(getattr(self._data(), '_sa_iterator')())
def __len__(self):
"""Count entities in the collection."""
return len(list(getattr(self._data(), '_sa_iterator')()))
- def __nonzero__(self):
+ def __bool__(self):
return True
+ __nonzero__ = __bool__
+
def fire_append_event(self, item, initiator=None):
"""Notify that a entity has entered the collection.
@@ -1094,14 +1094,14 @@ def _list_decorators():
stop += len(self)
if step == 1:
- for i in xrange(start, stop, step):
+ for i in range(start, stop, step):
if len(self) > start:
del self[start]
for i, item in enumerate(value):
self.insert(i + start, item)
else:
- rng = range(start, stop, step)
+ rng = list(range(start, stop, step))
if len(value) != len(rng):
raise ValueError(
"attempt to assign sequence of size %s to "
@@ -1128,24 +1128,23 @@ def _list_decorators():
_tidy(__delitem__)
return __delitem__
- # Py2K
- def __setslice__(fn):
- def __setslice__(self, start, end, values):
- for value in self[start:end]:
- __del(self, value)
- values = [__set(self, value) for value in values]
- fn(self, start, end, values)
- _tidy(__setslice__)
- return __setslice__
-
- def __delslice__(fn):
- def __delslice__(self, start, end):
- for value in self[start:end]:
- __del(self, value)
- fn(self, start, end)
- _tidy(__delslice__)
- return __delslice__
- # end Py2K
+ if util.py2k:
+ def __setslice__(fn):
+ def __setslice__(self, start, end, values):
+ for value in self[start:end]:
+ __del(self, value)
+ values = [__set(self, value) for value in values]
+ fn(self, start, end, values)
+ _tidy(__setslice__)
+ return __setslice__
+
+ def __delslice__(fn):
+ def __delslice__(self, start, end):
+ for value in self[start:end]:
+ __del(self, value)
+ fn(self, start, end)
+ _tidy(__delslice__)
+ return __delslice__
def extend(fn):
def extend(self, iterable):
@@ -1251,7 +1250,7 @@ def _dict_decorators():
def update(self, __other=Unspecified, **kw):
if __other is not Unspecified:
if hasattr(__other, 'keys'):
- for key in __other.keys():
+ for key in list(__other):
if (key not in self or
self[key] is not __other[key]):
self[key] = __other[key]
@@ -1270,11 +1269,7 @@ def _dict_decorators():
l.pop('Unspecified')
return l
-if util.py3k_warning:
- _set_binop_bases = (set, frozenset)
-else:
- import sets
- _set_binop_bases = (set, frozenset, sets.BaseSet)
+_set_binop_bases = (set, frozenset)
def _set_binops_check_strict(self, obj):
@@ -1467,11 +1462,8 @@ __interfaces = {
),
# decorators are required for dicts and object collections.
- # Py3K
- #dict: ({'iterator': 'values'}, _dict_decorators()),
- # Py2K
- dict: ({'iterator': 'itervalues'}, _dict_decorators()),
- # end Py2K
+ dict: ({'iterator': 'values'}, _dict_decorators()) if util.py3k
+ else ({'iterator': 'itervalues'}, _dict_decorators()),
}
diff --git a/lib/sqlalchemy/orm/descriptor_props.py b/lib/sqlalchemy/orm/descriptor_props.py
index 1969bd03b..86b445bb6 100644
--- a/lib/sqlalchemy/orm/descriptor_props.py
+++ b/lib/sqlalchemy/orm/descriptor_props.py
@@ -184,7 +184,7 @@ class CompositeProperty(DescriptorProperty):
def _init_props(self):
self.props = props = []
for attr in self.attrs:
- if isinstance(attr, basestring):
+ if isinstance(attr, str):
prop = self.parent.get_property(attr)
elif isinstance(attr, schema.Column):
prop = self.parent._columntoproperty[attr]
diff --git a/lib/sqlalchemy/orm/evaluator.py b/lib/sqlalchemy/orm/evaluator.py
index 0844e2f72..458eab7a1 100644
--- a/lib/sqlalchemy/orm/evaluator.py
+++ b/lib/sqlalchemy/orm/evaluator.py
@@ -13,9 +13,9 @@ class UnevaluatableError(Exception):
_straight_ops = set(getattr(operators, op)
for op in ('add', 'mul', 'sub',
- # Py2K
- 'div',
- # end Py2K
+# start Py2K
+# 'div',
+# end Py2K
'mod', 'truediv',
'lt', 'le', 'ne', 'gt', 'ge', 'eq'))
@@ -40,6 +40,12 @@ class EvaluatorCompiler(object):
def visit_null(self, clause):
return lambda obj: None
+ def visit_false(self, clause):
+ return lambda obj: False
+
+ def visit_true(self, clause):
+ return lambda obj: True
+
def visit_column(self, clause):
if 'parentmapper' in clause._annotations:
key = clause._annotations['parentmapper'].\
@@ -50,7 +56,7 @@ class EvaluatorCompiler(object):
return lambda obj: get_corresponding_attr(obj)
def visit_clauselist(self, clause):
- evaluators = map(self.process, clause.clauses)
+ evaluators = list(map(self.process, clause.clauses))
if clause.operator is operators.or_:
def evaluate(obj):
has_null = False
@@ -79,8 +85,8 @@ class EvaluatorCompiler(object):
return evaluate
def visit_binary(self, clause):
- eval_left, eval_right = map(self.process,
- [clause.left, clause.right])
+ eval_left, eval_right = list(map(self.process,
+ [clause.left, clause.right]))
operator = clause.operator
if operator is operators.is_:
def evaluate(obj):
diff --git a/lib/sqlalchemy/orm/identity.py b/lib/sqlalchemy/orm/identity.py
index 01d34428e..d0234a1d3 100644
--- a/lib/sqlalchemy/orm/identity.py
+++ b/lib/sqlalchemy/orm/identity.py
@@ -6,7 +6,7 @@
import weakref
from . import attributes
-
+from .. import util
class IdentityMap(dict):
def __init__(self):
@@ -75,7 +75,7 @@ class WeakInstanceDict(IdentityMap):
state = dict.__getitem__(self, key)
o = state.obj()
if o is None:
- raise KeyError, key
+ raise KeyError(key)
return o
def __contains__(self, key):
@@ -152,30 +152,27 @@ class WeakInstanceDict(IdentityMap):
return result
- # Py3K
- #def items(self):
- # return iter(self._items())
- #
- #def values(self):
- # return iter(self._values())
- # Py2K
- items = _items
+ if util.py2k:
+ items = _items
+ values = _values
- def iteritems(self):
- return iter(self.items())
+ def iteritems(self):
+ return iter(self.items())
- values = _values
+ def itervalues(self):
+ return iter(self.values())
+ else:
+ def items(self):
+ return iter(self._items())
- def itervalues(self):
- return iter(self.values())
- # end Py2K
+ def values(self):
+ return iter(self._values())
def all_states(self):
- # Py3K
- # return list(dict.values(self))
- # Py2K
- return dict.values(self)
- # end Py2K
+ if util.py2k:
+ return dict.values(self)
+ else:
+ return list(dict.values(self))
def discard(self, state):
st = dict.get(self, state.key, None)
@@ -189,7 +186,7 @@ class WeakInstanceDict(IdentityMap):
class StrongInstanceDict(IdentityMap):
def all_states(self):
- return [attributes.instance_state(o) for o in self.itervalues()]
+ return [attributes.instance_state(o) for o in self.values()]
def contains_state(self, state):
return (
diff --git a/lib/sqlalchemy/orm/instrumentation.py b/lib/sqlalchemy/orm/instrumentation.py
index 0e71494c4..f2d0df43f 100644
--- a/lib/sqlalchemy/orm/instrumentation.py
+++ b/lib/sqlalchemy/orm/instrumentation.py
@@ -279,7 +279,7 @@ class ClassManager(dict):
@property
def attributes(self):
- return self.itervalues()
+ return iter(self.values())
## InstanceState management
@@ -325,10 +325,12 @@ class ClassManager(dict):
"""TODO"""
return self.get_impl(key).hasparent(state, optimistic=optimistic)
- def __nonzero__(self):
+ def __bool__(self):
"""All ClassManagers are non-zero regardless of attribute state."""
return True
+ __nonzero__ = __bool__
+
def __repr__(self):
return '<%s of %r at %x>' % (
self.__class__.__name__, self.class_, id(self))
@@ -444,21 +446,23 @@ def __init__(%(apply_pos)s):
func_vars = util.format_argspec_init(original__init__, grouped=False)
func_text = func_body % func_vars
- # Py3K
- #func_defaults = getattr(original__init__, '__defaults__', None)
- #func_kw_defaults = getattr(original__init__, '__kwdefaults__', None)
- # Py2K
- func = getattr(original__init__, 'im_func', original__init__)
- func_defaults = getattr(func, 'func_defaults', None)
- # end Py2K
+# start Py3K
+ func_defaults = getattr(original__init__, '__defaults__', None)
+ func_kw_defaults = getattr(original__init__, '__kwdefaults__', None)
+# end Py3K
+# start Py2K
+# func = getattr(original__init__, 'im_func', original__init__)
+# func_defaults = getattr(func, 'func_defaults', None)
+# end Py2K
env = locals().copy()
- exec func_text in env
+ exec(func_text, env)
__init__ = env['__init__']
__init__.__doc__ = original__init__.__doc__
if func_defaults:
- __init__.func_defaults = func_defaults
- # Py3K
- #if func_kw_defaults:
- # __init__.__kwdefaults__ = func_kw_defaults
+ __init__.__defaults__ = func_defaults
+# start Py3K
+ if func_kw_defaults:
+ __init__.__kwdefaults__ = func_kw_defaults
+# end Py3K
return __init__
diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py
index 70743624c..396f234c4 100644
--- a/lib/sqlalchemy/orm/interfaces.py
+++ b/lib/sqlalchemy/orm/interfaces.py
@@ -15,6 +15,7 @@ Other than the deprecated extensions, this module and the
classes within should be considered mostly private.
"""
+
from __future__ import absolute_import
from .. import exc as sa_exc, util, inspect
@@ -659,7 +660,7 @@ class PropertyOption(MapperOption):
tokens = deque(self.key)
while tokens:
token = tokens.popleft()
- if isinstance(token, basestring):
+ if isinstance(token, str):
# wildcard token
if token.endswith(':*'):
return [path.token(token)]
@@ -744,7 +745,7 @@ class PropertyOption(MapperOption):
ext_info.mapper, aliased=True,
_use_mapper_path=True)
ext_info = inspect(ac)
- path.set(query, "path_with_polymorphic", ext_info)
+ path.set(query._attributes, "path_with_polymorphic", ext_info)
else:
path_element = mapper = getattr(prop, 'mapper', None)
if mapper is None and tokens:
@@ -775,13 +776,13 @@ class StrategizedOption(PropertyOption):
if self.chained:
for path in paths:
path.set(
- query,
+ query._attributes,
"loaderstrategy",
strategy
)
else:
paths[-1].set(
- query,
+ query._attributes,
"loaderstrategy",
strategy
)
diff --git a/lib/sqlalchemy/orm/loading.py b/lib/sqlalchemy/orm/loading.py
index 5937197fd..e1f4d1b7c 100644
--- a/lib/sqlalchemy/orm/loading.py
+++ b/lib/sqlalchemy/orm/loading.py
@@ -11,7 +11,7 @@ the functions here are called primarily by Query, Mapper,
as well as some of the attribute loading strategies.
"""
-from __future__ import absolute_import
+
from .. import util
from . import attributes, exc as orm_exc, state as statelib
@@ -47,11 +47,11 @@ def instances(query, cursor, context):
query._entities[0].mapper.dispatch.append_result
(process, labels) = \
- zip(*[
+ list(zip(*[
query_entity.row_processor(query,
context, custom_rows)
for query_entity in query._entities
- ])
+ ]))
while True:
context.progress = {}
@@ -84,11 +84,11 @@ def instances(query, cursor, context):
context.progress.pop(context.refresh_state)
statelib.InstanceState._commit_all_states(
- context.progress.items(),
+ list(context.progress.items()),
session.identity_map
)
- for state, (dict_, attrs) in context.partials.iteritems():
+ for state, (dict_, attrs) in context.partials.items():
state._commit(dict_, attrs)
for row in rows:
@@ -507,7 +507,7 @@ def _populators(mapper, context, path, row, adapter,
pops = (new_populators, existing_populators, delayed_populators,
eager_populators)
- for prop in mapper._props.itervalues():
+ for prop in mapper._props.values():
for i, pop in enumerate(prop.create_row_processor(
context,
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index c08d91b57..285d338de 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -14,6 +14,7 @@ available in :class:`~sqlalchemy.orm.`.
"""
from __future__ import absolute_import
+
import types
import weakref
from itertools import chain
@@ -26,8 +27,8 @@ from . import instrumentation, attributes, \
from .interfaces import MapperProperty, _InspectionAttr, _MappedAttribute
from .util import _INSTRUMENTOR, _class_to_mapper, \
- _state_mapper, class_mapper, \
- PathRegistry
+ _state_mapper, class_mapper, \
+ PathRegistry
import sys
properties = util.importlater("sqlalchemy.orm", "properties")
descriptor_props = util.importlater("sqlalchemy.orm", "descriptor_props")
@@ -581,7 +582,7 @@ class Mapper(_InspectionAttr):
if with_polymorphic == '*':
self.with_polymorphic = ('*', None)
elif isinstance(with_polymorphic, (tuple, list)):
- if isinstance(with_polymorphic[0], (basestring, tuple, list)):
+ if isinstance(with_polymorphic[0], util.string_types + (tuple, list)):
self.with_polymorphic = with_polymorphic
else:
self.with_polymorphic = (with_polymorphic, None)
@@ -626,7 +627,7 @@ class Mapper(_InspectionAttr):
self.inherits._inheriting_mappers.add(self)
self.passive_updates = self.inherits.passive_updates
self._all_tables = self.inherits._all_tables
- for key, prop in mapper._props.iteritems():
+ for key, prop in mapper._props.items():
if key not in self._props and \
not self._should_exclude(key, key, local=False,
column=None):
@@ -866,12 +867,12 @@ class Mapper(_InspectionAttr):
# load custom properties
if self._init_properties:
- for key, prop in self._init_properties.iteritems():
+ for key, prop in self._init_properties.items():
self._configure_property(key, prop, False)
# pull properties from the inherited mapper if any.
if self.inherits:
- for key, prop in self.inherits._props.iteritems():
+ for key, prop in self.inherits._props.items():
if key not in self._props and \
not self._should_exclude(key, key, local=False,
column=None):
@@ -919,7 +920,7 @@ class Mapper(_InspectionAttr):
if self.polymorphic_on is not None:
setter = True
- if isinstance(self.polymorphic_on, basestring):
+ if isinstance(self.polymorphic_on, util.string_types):
# polymorphic_on specified as as string - link
# it to mapped ColumnProperty
try:
@@ -1235,7 +1236,7 @@ class Mapper(_InspectionAttr):
"""
self._log("_post_configure_properties() started")
- l = [(key, prop) for key, prop in self._props.iteritems()]
+ l = [(key, prop) for key, prop in self._props.items()]
for key, prop in l:
self._log("initialize prop %s", key)
@@ -1253,7 +1254,7 @@ class Mapper(_InspectionAttr):
using `add_property`.
"""
- for key, value in dict_of_properties.iteritems():
+ for key, value in dict_of_properties.items():
self.add_property(key, value)
def add_property(self, key, prop):
@@ -1350,7 +1351,7 @@ class Mapper(_InspectionAttr):
"""return an iterator of all MapperProperty objects."""
if _new_mappers:
configure_mappers()
- return self._props.itervalues()
+ return iter(self._props.values())
def _mappers_from_spec(self, spec, selectable):
"""given a with_polymorphic() argument, return the set of mappers it
@@ -1623,7 +1624,7 @@ class Mapper(_InspectionAttr):
if _new_mappers:
configure_mappers()
return util.ImmutableProperties(util.OrderedDict(
- (k, v) for k, v in self._props.iteritems()
+ (k, v) for k, v in self._props.items()
if isinstance(v, type_)
))
@@ -2040,7 +2041,7 @@ class Mapper(_InspectionAttr):
return fk.parent not in cols
return False
- sorted_ = sql_util.sort_tables(table_to_mapper.iterkeys(),
+ sorted_ = sql_util.sort_tables(table_to_mapper,
skip_fn=skip,
extra_dependencies=extra_dependencies)
diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py
index e225a7c83..a773786c4 100644
--- a/lib/sqlalchemy/orm/persistence.py
+++ b/lib/sqlalchemy/orm/persistence.py
@@ -19,6 +19,7 @@ from .. import sql, util, exc as sa_exc, schema
from . import attributes, sync, exc as orm_exc, evaluator
from .util import _state_mapper, state_str, _attr_as_key
from ..sql import expression
+from . import loading
def save_obj(base_mapper, states, uowtransaction, single=False):
@@ -45,7 +46,7 @@ def save_obj(base_mapper, states, uowtransaction, single=False):
cached_connections = _cached_connection_dict(base_mapper)
- for table, mapper in base_mapper._sorted_tables.iteritems():
+ for table, mapper in base_mapper._sorted_tables.items():
insert = _collect_insert_commands(base_mapper, uowtransaction,
table, states_to_insert)
@@ -77,7 +78,7 @@ def post_update(base_mapper, states, uowtransaction, post_update_cols):
base_mapper,
states, uowtransaction)
- for table, mapper in base_mapper._sorted_tables.iteritems():
+ for table, mapper in base_mapper._sorted_tables.items():
update = _collect_post_update_commands(base_mapper, uowtransaction,
table, states_to_update,
post_update_cols)
@@ -105,7 +106,7 @@ def delete_obj(base_mapper, states, uowtransaction):
table_to_mapper = base_mapper._sorted_tables
- for table in reversed(table_to_mapper.keys()):
+ for table in reversed(list(table_to_mapper.keys())):
delete = _collect_delete_commands(base_mapper, uowtransaction,
table, states_to_delete)
@@ -318,7 +319,7 @@ def _collect_update_commands(base_mapper, uowtransaction,
# history is only
# in a different table than the one
# where the version_id_col is.
- for prop in mapper._columntoproperty.itervalues():
+ for prop in mapper._columntoproperty.values():
history = attributes.get_state_history(
state, prop.key,
attributes.PASSIVE_NO_INITIALIZE)
@@ -526,7 +527,7 @@ def _emit_insert_statements(base_mapper, uowtransaction,
for (connection, pkeys, hasvalue, has_all_pks), \
records in groupby(insert,
lambda rec: (rec[4],
- rec[2].keys(),
+ list(rec[2].keys()),
bool(rec[5]),
rec[6])
):
@@ -612,7 +613,7 @@ def _emit_post_update_statements(base_mapper, uowtransaction,
# also group them into common (connection, cols) sets
# to support executemany().
for key, grouper in groupby(
- update, lambda rec: (rec[4], rec[2].keys())
+ update, lambda rec: (rec[4], list(rec[2].keys()))
):
connection = key[0]
multiparams = [params for state, state_dict,
@@ -646,7 +647,7 @@ def _emit_delete_statements(base_mapper, uowtransaction, cached_connections,
return table.delete(clause)
- for connection, del_objects in delete.iteritems():
+ for connection, del_objects in delete.items():
statement = base_mapper._memo(('delete', table), delete_stmt)
connection = cached_connections[connection]
@@ -699,7 +700,6 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction,
# refresh whatever has been expired.
if base_mapper.eager_defaults and state.unloaded:
state.key = base_mapper._identity_key_from_state(state)
- from . import loading
loading.load_on_ident(
uowtransaction.session.query(base_mapper),
state.key, refresh_state=state,
@@ -803,7 +803,7 @@ class BulkUD(object):
raise sa_exc.ArgumentError(
"Valid strategies for session synchronization "
"are %s" % (", ".join(sorted(repr(x)
- for x in lookup.keys()))))
+ for x in lookup))))
else:
return klass(*arg)
@@ -868,7 +868,7 @@ class BulkEvaluate(BulkUD):
#TODO: detect when the where clause is a trivial primary key match
self.matched_objects = [
obj for (cls, pk), obj in
- query.session.identity_map.iteritems()
+ query.session.identity_map.items()
if issubclass(cls, target_cls) and
eval_condition(obj)]
@@ -951,7 +951,7 @@ class BulkUpdateEvaluate(BulkEvaluate, BulkUpdate):
def _additional_evaluators(self, evaluator_compiler):
self.value_evaluators = {}
- for key, value in self.values.iteritems():
+ for key, value in self.values.items():
key = _attr_as_key(key)
self.value_evaluators[key] = evaluator_compiler.process(
expression._literal_as_binds(value))
@@ -959,7 +959,7 @@ class BulkUpdateEvaluate(BulkEvaluate, BulkUpdate):
def _do_post_synchronize(self):
session = self.query.session
states = set()
- evaluated_keys = self.value_evaluators.keys()
+ evaluated_keys = list(self.value_evaluators.keys())
for obj in self.matched_objects:
state, dict_ = attributes.instance_state(obj),\
attributes.instance_dict(obj)
diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py
index 9f8721de9..8c0576527 100644
--- a/lib/sqlalchemy/orm/properties.py
+++ b/lib/sqlalchemy/orm/properties.py
@@ -27,7 +27,7 @@ from .interfaces import MANYTOMANY, MANYTOONE, ONETOMANY,\
mapperlib = util.importlater("sqlalchemy.orm", "mapperlib")
NoneType = type(None)
-from descriptor_props import CompositeProperty, SynonymProperty, \
+from .descriptor_props import CompositeProperty, SynonymProperty, \
ComparableProperty, ConcreteInheritedProperty
__all__ = ['ColumnProperty', 'CompositeProperty', 'SynonymProperty',
@@ -1204,7 +1204,7 @@ class RelationshipProperty(StrategizedProperty):
if not self.is_primary():
return
if self.backref is not None and not self.back_populates:
- if isinstance(self.backref, basestring):
+ if isinstance(self.backref, str):
backref_key, kwargs = self.backref, {}
else:
backref_key, kwargs = self.backref
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index c9f3a2699..beae7aba0 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -47,7 +47,7 @@ def _generative(*assertions):
def generate(fn, *args, **kw):
self = args[0]._clone()
for assertion in assertions:
- assertion(self, fn.func_name)
+ assertion(self, fn.__name__)
fn(self, *args[1:], **kw)
return self
return generate
@@ -162,15 +162,19 @@ class Query(object):
for m in m2.iterate_to_root():
self._polymorphic_adapters[m.local_table] = adapter
- def _set_select_from(self, *obj):
+ def _set_select_from(self, obj, set_base_alias):
fa = []
select_from_alias = None
+
for from_obj in obj:
info = inspect(from_obj)
if hasattr(info, 'mapper') and \
(info.is_mapper or info.is_aliased_class):
- self._select_from_entity = from_obj
+ if set_base_alias:
+ raise sa_exc.ArgumentError(
+ "A selectable (FromClause) instance is "
+ "expected when the base alias is being set.")
fa.append(info.selectable)
elif not info.is_selectable:
raise sa_exc.ArgumentError(
@@ -179,12 +183,14 @@ class Query(object):
else:
if isinstance(from_obj, expression.SelectBase):
from_obj = from_obj.alias()
- select_from_alias = from_obj
+ if set_base_alias:
+ select_from_alias = from_obj
fa.append(from_obj)
self._from_obj = tuple(fa)
- if len(self._from_obj) == 1 and \
+ if set_base_alias and \
+ len(self._from_obj) == 1 and \
isinstance(select_from_alias, expression.Alias):
equivs = self.__all_equivs()
self._from_obj_alias = sql_util.ColumnAdapter(
@@ -953,7 +959,7 @@ class Query(object):
'_prefixes',
):
self.__dict__.pop(attr, None)
- self._set_select_from(fromclause)
+ self._set_select_from([fromclause], True)
# this enables clause adaptation for non-ORM
# expressions.
@@ -981,11 +987,7 @@ class Query(object):
"""Return a scalar result corresponding to the given
column expression."""
try:
- # Py3K
- #return self.values(column).__next__()[0]
- # Py2K
- return self.values(column).next()[0]
- # end Py2K
+ return next(self.values(column))[0]
except StopIteration:
return None
@@ -1231,7 +1233,7 @@ class Query(object):
"""
clauses = [_entity_descriptor(self._joinpoint_zero(), key) == value
- for key, value in kwargs.iteritems()]
+ for key, value in kwargs.items()]
return self.filter(sql.and_(*clauses))
@_generative(_no_statement_condition, _no_limit_offset)
@@ -1296,7 +1298,7 @@ class Query(object):
"""
- if isinstance(criterion, basestring):
+ if isinstance(criterion, util.string_types):
criterion = sql.text(criterion)
if criterion is not None and \
@@ -1655,7 +1657,7 @@ class Query(object):
kwargs.pop('from_joinpoint', False)
if kwargs:
raise TypeError("unknown arguments: %s" %
- ','.join(kwargs.iterkeys()))
+ ','.join(kwargs.keys))
return self._join(props,
outerjoin=False, create_aliases=aliased,
from_joinpoint=from_joinpoint)
@@ -1671,7 +1673,7 @@ class Query(object):
kwargs.pop('from_joinpoint', False)
if kwargs:
raise TypeError("unknown arguments: %s" %
- ','.join(kwargs.iterkeys()))
+ ','.join(kwargs))
return self._join(props,
outerjoin=True, create_aliases=aliased,
from_joinpoint=from_joinpoint)
@@ -1701,7 +1703,7 @@ class Query(object):
if len(keys) == 2 and \
isinstance(keys[0], (expression.FromClause,
type, AliasedClass)) and \
- isinstance(keys[1], (basestring, expression.ClauseElement,
+ isinstance(keys[1], (str, expression.ClauseElement,
interfaces.PropComparator)):
# detect 2-arg form of join and
# convert to a tuple.
@@ -1721,14 +1723,14 @@ class Query(object):
# is a little bit of legacy behavior still at work here
# which means they might be in either order. may possibly
# lock this down to (right_entity, onclause) in 0.6.
- if isinstance(arg1, (interfaces.PropComparator, basestring)):
+ if isinstance(arg1, (interfaces.PropComparator, util.string_types)):
right_entity, onclause = arg2, arg1
else:
right_entity, onclause = arg1, arg2
left_entity = prop = None
- if isinstance(onclause, basestring):
+ if isinstance(onclause, util.string_types):
left_entity = self._joinpoint_zero()
descriptor = _entity_descriptor(left_entity, onclause)
@@ -1922,7 +1924,7 @@ class Query(object):
clause = orm_join(clause,
right,
onclause, isouter=outerjoin)
- except sa_exc.ArgumentError, ae:
+ except sa_exc.ArgumentError as ae:
raise sa_exc.InvalidRequestError(
"Could not find a FROM clause to join from. "
"Tried joining to %s, but got: %s" % (right, ae))
@@ -1947,7 +1949,7 @@ class Query(object):
try:
clause = orm_join(clause, right, onclause, isouter=outerjoin)
- except sa_exc.ArgumentError, ae:
+ except sa_exc.ArgumentError as ae:
raise sa_exc.InvalidRequestError(
"Could not find a FROM clause to join from. "
"Tried joining to %s, but got: %s" % (right, ae))
@@ -1974,21 +1976,134 @@ class Query(object):
def select_from(self, *from_obj):
"""Set the FROM clause of this :class:`.Query` explicitly.
- Sending a mapped class or entity here effectively replaces the
+ :meth:`.Query.select_from` is often used in conjunction with
+ :meth:`.Query.join` in order to control which entity is selected
+ from on the "left" side of the join.
+
+ The entity or selectable object here effectively replaces the
"left edge" of any calls to :meth:`~.Query.join`, when no
joinpoint is otherwise established - usually, the default "join
point" is the leftmost entity in the :class:`~.Query` object's
list of entities to be selected.
- Mapped entities or plain :class:`~.Table` or other selectables
- can be sent here which will form the default FROM clause.
+ A typical example::
+
+ q = session.query(Address).select_from(User).\\
+ join(User.addresses).\\
+ filter(User.name == 'ed')
+
+ Which produces SQL equivalent to::
+
+ SELECT address.* FROM user
+ JOIN address ON user.id=address.user_id
+ WHERE user.name = :name_1
+
+ :param \*from_obj: collection of one or more entities to apply
+ to the FROM clause. Entities can be mapped classes,
+ :class:`.AliasedClass` objects, :class:`.Mapper` objects
+ as well as core :class:`.FromClause` elements like subqueries.
+
+ .. versionchanged:: 0.9
+ This method no longer applies the given FROM object
+ to be the selectable from which matching entities
+ select from; the :meth:`.select_entity_from` method
+ now accomplishes this. See that method for a description
+ of this behavior.
+
+ .. seealso::
+
+ :meth:`~.Query.join`
+
+ :meth:`.Query.select_entity_from`
+
+ """
+
+ self._set_select_from(from_obj, False)
+
+ @_generative(_no_clauseelement_condition)
+ def select_entity_from(self, from_obj):
+ """Set the FROM clause of this :class:`.Query` to a
+ core selectable, applying it as a replacement FROM clause
+ for corresponding mapped entities.
+
+ This method is similar to the :meth:`.Query.select_from`
+ method, in that it sets the FROM clause of the query. However,
+ where :meth:`.Query.select_from` only affects what is placed
+ in the FROM, this method also applies the given selectable
+ to replace the FROM which the selected entities would normally
+ select from.
+
+ The given ``from_obj`` must be an instance of a :class:`.FromClause`,
+ e.g. a :func:`.select` or :class:`.Alias` construct.
+
+ An example would be a :class:`.Query` that selects ``User`` entities,
+ but uses :meth:`.Query.select_entity_from` to have the entities
+ selected from a :func:`.select` construct instead of the
+ base ``user`` table::
+
+ select_stmt = select([User]).where(User.id == 7)
+
+ q = session.query(User).\\
+ select_entity_from(select_stmt).\\
+ filter(User.name == 'ed')
+
+ The query generated will select ``User`` entities directly
+ from the given :func:`.select` construct, and will be::
+
+ SELECT anon_1.id AS anon_1_id, anon_1.name AS anon_1_name
+ FROM (SELECT "user".id AS id, "user".name AS name
+ FROM "user"
+ WHERE "user".id = :id_1) AS anon_1
+ WHERE anon_1.name = :name_1
+
+ Notice above that even the WHERE criterion was "adapted" such that
+ the ``anon_1`` subquery effectively replaces all references to the
+ ``user`` table, except for the one that it refers to internally.
+
+ Compare this to :meth:`.Query.select_from`, which as of
+ version 0.9, does not affect existing entities. The
+ statement below::
+
+ q = session.query(User).\\
+ select_from(select_stmt).\\
+ filter(User.name == 'ed')
+
+ Produces SQL where both the ``user`` table as well as the
+ ``select_stmt`` construct are present as separate elements
+ in the FROM clause. No "adaptation" of the ``user`` table
+ is applied::
+
+ SELECT "user".id AS user_id, "user".name AS user_name
+ FROM "user", (SELECT "user".id AS id, "user".name AS name
+ FROM "user"
+ WHERE "user".id = :id_1) AS anon_1
+ WHERE "user".name = :name_1
+
+ :meth:`.Query.select_entity_from` maintains an older
+ behavior of :meth:`.Query.select_from`. In modern usage,
+ similar results can also be achieved using :func:`.aliased`::
+
+ select_stmt = select([User]).where(User.id == 7)
+ user_from_select = aliased(User, select_stmt.alias())
+
+ q = session.query(user_from_select)
+
+ :param from_obj: a :class:`.FromClause` object that will replace
+ the FROM clause of this :class:`.Query`.
+
+ .. seealso::
+
+ :meth:`.Query.select_from`
- See the example in :meth:`~.Query.join` for a typical
- usage of :meth:`~.Query.select_from`.
+ .. versionadded:: 0.8
+ :meth:`.Query.select_entity_from` was added to specify
+ the specific behavior of entity replacement, however
+ the :meth:`.Query.select_from` maintains this behavior
+ as well until 0.9.
"""
- self._set_select_from(*from_obj)
+ self._set_select_from([from_obj], True)
def __getitem__(self, item):
if isinstance(item, slice):
@@ -2115,7 +2230,7 @@ class Query(object):
appropriate to the entity class represented by this ``Query``.
"""
- if isinstance(statement, basestring):
+ if isinstance(statement, util.string_types):
statement = sql.text(statement)
if not isinstance(statement,
@@ -2697,7 +2812,7 @@ class _QueryEntity(object):
def __new__(cls, *args, **kwargs):
if cls is _QueryEntity:
entity = args[1]
- if not isinstance(entity, basestring) and \
+ if not isinstance(entity, util.string_types) and \
_is_mapped_class(entity):
cls = _MapperEntity
else:
@@ -2905,7 +3020,7 @@ class _ColumnEntity(_QueryEntity):
self.expr = column
self.namespace = namespace
- if isinstance(column, basestring):
+ if isinstance(column, util.string_types):
column = sql.literal_column(column)
self._label_name = column.name
elif isinstance(column, (
@@ -3071,7 +3186,7 @@ class QueryContext(object):
self.create_eager_joins = []
self.propagate_options = set(o for o in query._with_options if
o.propagate_to_loaders)
- self.attributes = self._attributes = query._attributes.copy()
+ self.attributes = query._attributes.copy()
class AliasOption(interfaces.MapperOption):
@@ -3080,7 +3195,7 @@ class AliasOption(interfaces.MapperOption):
self.alias = alias
def process_query(self, query):
- if isinstance(self.alias, basestring):
+ if isinstance(self.alias, util.string_types):
alias = query._mapper_zero().mapped_table.alias(self.alias)
else:
alias = self.alias
diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py
index ffb8a4e03..5a4486eef 100644
--- a/lib/sqlalchemy/orm/session.py
+++ b/lib/sqlalchemy/orm/session.py
@@ -5,7 +5,7 @@
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Provides the Session class and related utilities."""
-from __future__ import with_statement
+
import weakref
from .. import util, sql, engine, exc as sa_exc, event
@@ -328,7 +328,7 @@ class SessionTransaction(object):
subtransaction.commit()
if not self.session._flushing:
- for _flush_guard in xrange(100):
+ for _flush_guard in range(100):
if self.session._is_clean():
break
self.session.flush()
@@ -605,7 +605,7 @@ class Session(_SessionClassMethods):
SessionExtension._adapt_listener(self, ext)
if binds is not None:
- for mapperortable, bind in binds.iteritems():
+ for mapperortable, bind in binds.items():
if isinstance(mapperortable, (type, Mapper)):
self.bind_mapper(mapperortable, bind)
else:
@@ -1776,7 +1776,7 @@ class Session(_SessionClassMethods):
Session.
"""
- return iter(list(self._new.values()) + self.identity_map.values())
+ return iter(list(self._new.values()) + list(self.identity_map.values()))
def _contains_state(self, state):
return state in self._new or self.identity_map.contains_state(state)
@@ -2139,13 +2139,13 @@ class Session(_SessionClassMethods):
def deleted(self):
"The set of all instances marked as 'deleted' within this ``Session``"
- return util.IdentitySet(self._deleted.values())
+ return util.IdentitySet(list(self._deleted.values()))
@property
def new(self):
"The set of all instances marked as 'new' within this ``Session``."
- return util.IdentitySet(self._new.values())
+ return util.IdentitySet(list(self._new.values()))
class sessionmaker(_SessionClassMethods):
diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py
index 6660a39ef..cabfb35b9 100644
--- a/lib/sqlalchemy/orm/strategies.py
+++ b/lib/sqlalchemy/orm/strategies.py
@@ -359,7 +359,7 @@ class LazyLoader(AbstractRelationshipLoader):
)
if self.use_get:
- for col in self._equated_columns.keys():
+ for col in list(self._equated_columns):
if col in self.mapper._equivalent_columns:
for c in self.mapper._equivalent_columns[col]:
self._equated_columns[c] = self._equated_columns[col]
@@ -688,7 +688,8 @@ class SubqueryLoader(AbstractRelationshipLoader):
# build up a path indicating the path from the leftmost
# entity to the thing we're subquery loading.
- with_poly_info = path.get(context, "path_with_polymorphic", None)
+ with_poly_info = path.get(context.attributes,
+ "path_with_polymorphic", None)
if with_poly_info is not None:
effective_entity = with_poly_info.entity
else:
@@ -701,7 +702,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
# if not via query option, check for
# a cycle
- if not path.contains(context, "loaderstrategy"):
+ if not path.contains(context.attributes, "loaderstrategy"):
if self.join_depth:
if path.length / 2 > self.join_depth:
return
@@ -747,7 +748,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
# add new query to attributes to be picked up
# by create_row_processor
- path.set(context, "subquery", q)
+ path.set(context.attributes, "subquery", q)
def _get_leftmost(self, subq_path):
subq_path = subq_path.path
@@ -781,7 +782,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
# set a real "from" if not present, as this is more
# accurate than just going off of the column expression
if not q._from_obj and entity_mapper.isa(leftmost_mapper):
- q._set_select_from(entity_mapper)
+ q._set_select_from([entity_mapper], False)
# select from the identity columns of the outer
q._set_entities(q._adapt_col_list(leftmost_attr))
@@ -924,7 +925,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
path = path[self.parent_property]
- subq = path.get(context, 'subquery')
+ subq = path.get(context.attributes, 'subquery')
if subq is None:
return None, None, None
@@ -934,7 +935,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
# cache the loaded collections in the context
# so that inheriting mappers don't re-load when they
# call upon create_row_processor again
- collections = path.get(context, "collections")
+ collections = path.get(context.attributes, "collections")
if collections is None:
collections = dict(
(k, [v[0] for v in v])
@@ -942,7 +943,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
subq,
lambda x: x[1:]
))
- path.set(context, 'collections', collections)
+ path.set(context.attributes, 'collections', collections)
if adapter:
local_cols = [adapter.columns[c] for c in local_cols]
@@ -1011,7 +1012,7 @@ class JoinedLoader(AbstractRelationshipLoader):
with_polymorphic = None
- user_defined_adapter = path.get(context,
+ user_defined_adapter = path.get(context.attributes,
"user_defined_eager_row_processor",
False)
if user_defined_adapter is not False:
@@ -1023,7 +1024,7 @@ class JoinedLoader(AbstractRelationshipLoader):
else:
# if not via query option, check for
# a cycle
- if not path.contains(context, "loaderstrategy"):
+ if not path.contains(context.attributes, "loaderstrategy"):
if self.join_depth:
if path.length / 2 > self.join_depth:
return
@@ -1037,7 +1038,7 @@ class JoinedLoader(AbstractRelationshipLoader):
)
with_poly_info = path.get(
- context,
+ context.attributes,
"path_with_polymorphic",
None
)
@@ -1065,11 +1066,11 @@ class JoinedLoader(AbstractRelationshipLoader):
adapter = entity._get_entity_clauses(context.query, context)
if adapter and user_defined_adapter:
user_defined_adapter = user_defined_adapter.wrap(adapter)
- path.set(context, "user_defined_eager_row_processor",
+ path.set(context.attributes, "user_defined_eager_row_processor",
user_defined_adapter)
elif adapter:
user_defined_adapter = adapter
- path.set(context, "user_defined_eager_row_processor",
+ path.set(context.attributes, "user_defined_eager_row_processor",
user_defined_adapter)
add_to_collection = context.primary_columns
@@ -1080,7 +1081,7 @@ class JoinedLoader(AbstractRelationshipLoader):
column_collection, parentmapper, allow_innerjoin
):
with_poly_info = path.get(
- context,
+ context.attributes,
"path_with_polymorphic",
None
)
@@ -1098,7 +1099,7 @@ class JoinedLoader(AbstractRelationshipLoader):
if self.parent_property.direction != interfaces.MANYTOONE:
context.multi_row_eager_loaders = True
- innerjoin = allow_innerjoin and path.get(context,
+ innerjoin = allow_innerjoin and path.get(context.attributes,
"eager_join_type",
self.parent_property.innerjoin)
if not innerjoin:
@@ -1113,7 +1114,7 @@ class JoinedLoader(AbstractRelationshipLoader):
)
add_to_collection = context.secondary_columns
- path.set(context, "eager_row_processor", clauses)
+ path.set(context.attributes, "eager_row_processor", clauses)
return clauses, adapter, add_to_collection, allow_innerjoin
@@ -1208,7 +1209,7 @@ class JoinedLoader(AbstractRelationshipLoader):
)
def _create_eager_adapter(self, context, row, adapter, path):
- user_defined_adapter = path.get(context,
+ user_defined_adapter = path.get(context.attributes,
"user_defined_eager_row_processor",
False)
if user_defined_adapter is not False:
@@ -1221,7 +1222,7 @@ class JoinedLoader(AbstractRelationshipLoader):
elif context.adapter:
decorator = context.adapter
else:
- decorator = path.get(context, "eager_row_processor")
+ decorator = path.get(context.attributes, "eager_row_processor")
if decorator is None:
return False
@@ -1332,7 +1333,7 @@ class EagerLazyOption(StrategizedOption):
def __init__(self, key, lazy=True, chained=False,
propagate_to_loaders=True
):
- if isinstance(key[0], basestring) and key[0] == '*':
+ if isinstance(key[0], str) and key[0] == '*':
if len(key) != 1:
raise sa_exc.ArgumentError(
"Wildcard identifier '*' must "
@@ -1374,9 +1375,9 @@ class EagerJoinOption(PropertyOption):
def process_query_property(self, query, paths):
if self.chained:
for path in paths:
- path.set(query, "eager_join_type", self.innerjoin)
+ path.set(query._attributes, "eager_join_type", self.innerjoin)
else:
- paths[-1].set(query, "eager_join_type", self.innerjoin)
+ paths[-1].set(query._attributes, "eager_join_type", self.innerjoin)
class LoadEagerFromAliasOption(PropertyOption):
@@ -1384,7 +1385,7 @@ class LoadEagerFromAliasOption(PropertyOption):
def __init__(self, key, alias=None, chained=False):
super(LoadEagerFromAliasOption, self).__init__(key)
if alias is not None:
- if not isinstance(alias, basestring):
+ if not isinstance(alias, str):
info = inspect(alias)
alias = info.selectable
self.alias = alias
@@ -1395,29 +1396,32 @@ class LoadEagerFromAliasOption(PropertyOption):
for path in paths[0:-1]:
(root_mapper, prop) = path.path[-2:]
adapter = query._polymorphic_adapters.get(prop.mapper, None)
- path.setdefault(query,
+ path.setdefault(query._attributes,
"user_defined_eager_row_processor",
adapter)
root_mapper, prop = paths[-1].path[-2:]
if self.alias is not None:
- if isinstance(self.alias, basestring):
+ if isinstance(self.alias, str):
self.alias = prop.target.alias(self.alias)
- paths[-1].set(query, "user_defined_eager_row_processor",
- sql_util.ColumnAdapter(self.alias,
+ paths[-1].set(query._attributes,
+ "user_defined_eager_row_processor",
+ sql_util.ColumnAdapter(self.alias,
equivalents=prop.mapper._equivalent_columns)
)
else:
- if paths[-1].contains(query, "path_with_polymorphic"):
- with_poly_info = paths[-1].get(query, "path_with_polymorphic")
+ if paths[-1].contains(query._attributes, "path_with_polymorphic"):
+ with_poly_info = paths[-1].get(query._attributes,
+ "path_with_polymorphic")
adapter = orm_util.ORMAdapter(
with_poly_info.entity,
equivalents=prop.mapper._equivalent_columns,
adapt_required=True)
else:
adapter = query._polymorphic_adapters.get(prop.mapper, None)
- paths[-1].set(query, "user_defined_eager_row_processor",
- adapter)
+ paths[-1].set(query._attributes,
+ "user_defined_eager_row_processor",
+ adapter)
def single_parent_validator(desc, prop):
diff --git a/lib/sqlalchemy/orm/unitofwork.py b/lib/sqlalchemy/orm/unitofwork.py
index 1f5115c41..aa5f7836c 100644
--- a/lib/sqlalchemy/orm/unitofwork.py
+++ b/lib/sqlalchemy/orm/unitofwork.py
@@ -315,7 +315,7 @@ class UOWTransaction(object):
# see if the graph of mapper dependencies has cycles.
self.cycles = cycles = topological.find_cycles(
self.dependencies,
- self.postsort_actions.values())
+ list(self.postsort_actions.values()))
if cycles:
# if yes, break the per-mapper actions into
@@ -381,7 +381,7 @@ class UOWTransaction(object):
"""
states = set(self.states)
isdel = set(
- s for (s, (isdelete, listonly)) in self.states.iteritems()
+ s for (s, (isdelete, listonly)) in self.states.items()
if isdelete
)
other = states.difference(isdel)
diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py
index 390e83538..bd8228f2c 100644
--- a/lib/sqlalchemy/orm/util.py
+++ b/lib/sqlalchemy/orm/util.py
@@ -120,7 +120,7 @@ def polymorphic_union(table_map, typecolname,
colnames = util.OrderedSet()
colnamemaps = {}
types = {}
- for key in table_map.keys():
+ for key in table_map:
table = table_map[key]
# mysql doesnt like selecting from a select;
@@ -146,7 +146,7 @@ def polymorphic_union(table_map, typecolname,
return sql.type_coerce(sql.null(), types[name]).label(name)
result = []
- for type, table in table_map.iteritems():
+ for type, table in table_map.items():
if typecolname is not None:
result.append(
sql.select([col(name, table) for name in colnames] +
@@ -203,7 +203,7 @@ def identity_key(*args, **kwargs):
"positional arguments, got %s" % len(args))
if kwargs:
raise sa_exc.ArgumentError("unknown keyword arguments: %s"
- % ", ".join(kwargs.keys()))
+ % ", ".join(kwargs))
mapper = class_mapper(class_)
if "ident" in locals():
return mapper.identity_key_from_primary_key(util.to_list(ident))
@@ -211,7 +211,7 @@ def identity_key(*args, **kwargs):
instance = kwargs.pop("instance")
if kwargs:
raise sa_exc.ArgumentError("unknown keyword arguments: %s"
- % ", ".join(kwargs.keys()))
+ % ", ".join(kwargs.keys))
mapper = object_mapper(instance)
return mapper.identity_key_from_instance(instance)
@@ -278,16 +278,16 @@ class PathRegistry(object):
return other is not None and \
self.path == other.path
- def set(self, reg, key, value):
- reg._attributes[(key, self.path)] = value
+ def set(self, attributes, key, value):
+ attributes[(key, self.path)] = value
- def setdefault(self, reg, key, value):
- reg._attributes.setdefault((key, self.path), value)
+ def setdefault(self, attributes, key, value):
+ attributes.setdefault((key, self.path), value)
- def get(self, reg, key, value=None):
+ def get(self, attributes, key, value=None):
key = (key, self.path)
- if key in reg._attributes:
- return reg._attributes[key]
+ if key in attributes:
+ return attributes[key]
else:
return value
@@ -300,7 +300,7 @@ class PathRegistry(object):
def pairs(self):
path = self.path
- for i in xrange(0, len(path), 2):
+ for i in range(0, len(path), 2):
yield path[i], path[i + 1]
def contains_mapper(self, mapper):
@@ -313,18 +313,18 @@ class PathRegistry(object):
else:
return False
- def contains(self, reg, key):
- return (key, self.path) in reg._attributes
+ def contains(self, attributes, key):
+ return (key, self.path) in attributes
def __reduce__(self):
return _unreduce_path, (self.serialize(), )
def serialize(self):
path = self.path
- return zip(
+ return list(zip(
[m.class_ for m in [path[i] for i in range(0, len(path), 2)]],
[path[i].key for i in range(1, len(path), 2)] + [None]
- )
+ ))
@classmethod
def deserialize(cls, path):
@@ -418,8 +418,9 @@ class EntityRegistry(PathRegistry, dict):
self.path = parent.path + (entity,)
- def __nonzero__(self):
+ def __bool__(self):
return True
+ __nonzero__ = __bool__
def __getitem__(self, entity):
if isinstance(entity, (int, slice)):
@@ -440,8 +441,8 @@ class EntityRegistry(PathRegistry, dict):
"""
path = dict.__getitem__(self, prop)
path_key = (key, path.path)
- if path_key in context._attributes:
- return context._attributes[path_key]
+ if path_key in context.attributes:
+ return context.attributes[path_key]
else:
return None
@@ -596,8 +597,8 @@ class AliasedClass(object):
return self.__adapt_prop(attr, key)
elif hasattr(attr, 'func_code'):
is_method = getattr(self.__target, key, None)
- if is_method and is_method.im_self is not None:
- return util.types.MethodType(attr.im_func, self, self)
+ if is_method and is_method.__self__ is not None:
+ return util.types.MethodType(attr.__func__, self, self)
else:
return None
elif hasattr(attr, '__get__'):
@@ -887,7 +888,7 @@ class _ORMJoin(expression.Join):
self._joined_from_info = right_info
- if isinstance(onclause, basestring):
+ if isinstance(onclause, util.string_types):
onclause = getattr(left_orm_info.entity, onclause)
if isinstance(onclause, attributes.QueryableAttribute):
@@ -1008,7 +1009,7 @@ def with_parent(instance, prop):
parent/child relationship.
"""
- if isinstance(prop, basestring):
+ if isinstance(prop, util.string_types):
mapper = object_mapper(instance)
prop = getattr(mapper.class_, prop).property
elif isinstance(prop, attributes.QueryableAttribute):
diff --git a/lib/sqlalchemy/pool.py b/lib/sqlalchemy/pool.py
index 501b6d2a0..0470e9e48 100644
--- a/lib/sqlalchemy/pool.py
+++ b/lib/sqlalchemy/pool.py
@@ -57,7 +57,7 @@ def clear_managers():
All pools and connections are disposed.
"""
- for manager in proxies.itervalues():
+ for manager in proxies.values():
manager.close()
proxies.clear()
@@ -368,7 +368,7 @@ class _ConnectionRecord(object):
connection = self.__pool._creator()
self.__pool.logger.debug("Created new connection %r", connection)
return connection
- except Exception, e:
+ except Exception as e:
self.__pool.logger.debug("Error on connect(): %s", e)
raise
@@ -391,7 +391,7 @@ def _finalize_fairy(connection, connection_record, pool, ref, echo):
# Immediately close detached instances
if connection_record is None:
pool._close_connection(connection)
- except Exception, e:
+ except Exception as e:
if connection_record is not None:
connection_record.invalidate(e=e)
if isinstance(e, (SystemExit, KeyboardInterrupt)):
@@ -499,7 +499,7 @@ class _ConnectionFairy(object):
self._connection_record,
self)
return self
- except exc.DisconnectionError, e:
+ except exc.DisconnectionError as e:
self._pool.logger.info(
"Disconnection detected on checkout: %s", e)
self._connection_record.invalidate(e)
@@ -755,7 +755,7 @@ class QueuePool(Pool):
wait = self._max_overflow > -1 and \
self._overflow >= self._max_overflow
return self._pool.get(wait, self._timeout)
- except sqla_queue.SAAbort, aborted:
+ except sqla_queue.SAAbort as aborted:
return aborted.context._do_get()
except sqla_queue.Empty:
if self._max_overflow > -1 and \
@@ -1004,7 +1004,7 @@ class _DBProxy(object):
self._create_pool_mutex = threading.Lock()
def close(self):
- for key in self.pools.keys():
+ for key in list(self.pools):
del self.pools[key]
def __del__(self):
diff --git a/lib/sqlalchemy/processors.py b/lib/sqlalchemy/processors.py
index 9963ca9a3..bf95d146b 100644
--- a/lib/sqlalchemy/processors.py
+++ b/lib/sqlalchemy/processors.py
@@ -38,10 +38,10 @@ def str_to_datetime_processor_factory(regexp, type_):
"'%s'" % (type_.__name__, value))
if has_named_groups:
groups = m.groupdict(0)
- return type_(**dict(zip(groups.iterkeys(),
- map(int, groups.itervalues()))))
+ return type_(**dict(list(zip(iter(groups.keys()),
+ list(map(int, iter(groups.values())))))))
else:
- return type_(*map(int, m.groups(0)))
+ return type_(*list(map(int, m.groups(0))))
return process
diff --git a/lib/sqlalchemy/schema.py b/lib/sqlalchemy/schema.py
index f894e6c31..3a74cbd59 100644
--- a/lib/sqlalchemy/schema.py
+++ b/lib/sqlalchemy/schema.py
@@ -27,7 +27,7 @@ Since these objects are part of the SQL expression language, they are usable
as components in SQL expressions.
"""
-from __future__ import with_statement
+
import re
import inspect
from . import exc, util, dialects, event, events, inspection
@@ -679,7 +679,7 @@ class Table(SchemaItem, expression.TableClause):
# skip indexes that would be generated
# by the 'index' flag on Column
if len(index.columns) == 1 and \
- list(index.columns)[0].index:
+ list(index.columns)[0].index:
continue
Index(index.name,
unique=index.unique,
@@ -898,7 +898,7 @@ class Column(SchemaItem, expression.ColumnClause):
type_ = kwargs.pop('type_', None)
args = list(args)
if args:
- if isinstance(args[0], basestring):
+ if isinstance(args[0], util.string_types):
if name is not None:
raise exc.ArgumentError(
"May not pass name positionally and as a keyword.")
@@ -944,11 +944,7 @@ class Column(SchemaItem, expression.ColumnClause):
args.append(self.default)
else:
if getattr(self.type, '_warn_on_bytestring', False):
- # Py3K
- #if isinstance(self.default, bytes):
- # Py2K
- if isinstance(self.default, str):
- # end Py2K
+ if isinstance(self.default, util.binary_type):
util.warn("Unicode column received non-unicode "
"default value.")
args.append(ColumnDefault(self.default))
@@ -983,7 +979,7 @@ class Column(SchemaItem, expression.ColumnClause):
if kwargs:
raise exc.ArgumentError(
- "Unknown arguments passed to Column: " + repr(kwargs.keys()))
+ "Unknown arguments passed to Column: " + repr(list(kwargs)))
def __str__(self):
if self.name is None:
@@ -1048,7 +1044,7 @@ class Column(SchemaItem, expression.ColumnClause):
if self.key in table._columns:
col = table._columns.get(self.key)
if col is not self:
- for fk in list(col.foreign_keys):
+ for fk in col.foreign_keys:
table.foreign_keys.remove(fk)
if fk.constraint in table.constraints:
# this might have been removed
@@ -1069,7 +1065,7 @@ class Column(SchemaItem, expression.ColumnClause):
self.table = table
if self.index:
- if isinstance(self.index, basestring):
+ if isinstance(self.index, util.string_types):
raise exc.ArgumentError(
"The 'index' keyword argument on Column is boolean only. "
"To create indexes with a specific name, create an "
@@ -1077,7 +1073,7 @@ class Column(SchemaItem, expression.ColumnClause):
Index(expression._truncated_label('ix_%s' % self._label),
self, unique=self.unique)
elif self.unique:
- if isinstance(self.unique, basestring):
+ if isinstance(self.unique, util.string_types):
raise exc.ArgumentError(
"The 'unique' keyword argument on Column is boolean "
"only. To create unique constraints or indexes with a "
@@ -1153,23 +1149,15 @@ class Column(SchemaItem, expression.ColumnClause):
nullable=self.nullable,
quote=self.quote,
_proxies=[self], *fk)
- except TypeError, e:
- # Py3K
- #raise TypeError(
- # "Could not create a copy of this %r object. "
- # "Ensure the class includes a _constructor() "
- # "attribute or method which accepts the "
- # "standard Column constructor arguments, or "
- # "references the Column class itself." % self.__class__) from e
- # Py2K
- raise TypeError(
- "Could not create a copy of this %r object. "
- "Ensure the class includes a _constructor() "
- "attribute or method which accepts the "
- "standard Column constructor arguments, or "
- "references the Column class itself. "
- "Original error: %s" % (self.__class__, e))
- # end Py2K
+ except TypeError:
+ util.raise_from_cause(
+ TypeError(
+ "Could not create a copy of this %r object. "
+ "Ensure the class includes a _constructor() "
+ "attribute or method which accepts the "
+ "standard Column constructor arguments, or "
+ "references the Column class itself." % self.__class__)
+ )
c.table = selectable
selectable._columns.add(c)
@@ -1345,7 +1333,7 @@ class ForeignKey(SchemaItem):
if schema:
return schema + "." + self.column.table.name + \
"." + self.column.key
- elif isinstance(self._colspec, basestring):
+ elif isinstance(self._colspec, util.string_types):
return self._colspec
elif hasattr(self._colspec, '__clause_element__'):
_column = self._colspec.__clause_element__()
@@ -1390,7 +1378,7 @@ class ForeignKey(SchemaItem):
"""
# ForeignKey inits its remote column as late as possible, so tables
# can be defined without dependencies
- if isinstance(self._colspec, basestring):
+ if isinstance(self._colspec, util.string_types):
# locate the parent table this foreign key is attached to. we
# use the "original" column which our parent column represents
# (its a list of columns/other ColumnElements if the parent
@@ -1919,7 +1907,7 @@ class DefaultClause(FetchedValue):
has_argument = True
def __init__(self, arg, for_update=False, _reflected=False):
- util.assert_arg_type(arg, (basestring,
+ util.assert_arg_type(arg, (util.string_types[0],
expression.ClauseElement,
expression.TextClause), 'arg')
super(DefaultClause, self).__init__(for_update)
@@ -2029,7 +2017,7 @@ class ColumnCollectionMixin(object):
def _set_parent(self, table):
for col in self._pending_colargs:
- if isinstance(col, basestring):
+ if isinstance(col, util.string_types):
col = table.c[col]
self.columns.add(col)
@@ -2247,7 +2235,7 @@ class ForeignKeyConstraint(Constraint):
self._set_parent_with_dispatch(table)
elif columns and \
isinstance(columns[0], Column) and \
- columns[0].table is not None:
+ columns[0].table is not None:
self._set_parent_with_dispatch(columns[0].table)
@property
@@ -2256,19 +2244,19 @@ class ForeignKeyConstraint(Constraint):
@property
def columns(self):
- return self._elements.keys()
+ return list(self._elements)
@property
def elements(self):
- return self._elements.values()
+ return list(self._elements.values())
def _set_parent(self, table):
super(ForeignKeyConstraint, self)._set_parent(table)
- for col, fk in self._elements.iteritems():
+ for col, fk in self._elements.items():
# string-specified column names now get
# resolved to Column objects
- if isinstance(col, basestring):
+ if isinstance(col, util.string_types):
try:
col = table.c[col]
except KeyError:
@@ -2278,7 +2266,7 @@ class ForeignKeyConstraint(Constraint):
"named '%s' is present." % (table.description, col))
if not hasattr(fk, 'parent') or \
- fk.parent is not col:
+ fk.parent is not col:
fk._set_parent_with_dispatch(col)
if self.use_alter:
@@ -2569,7 +2557,7 @@ class MetaData(SchemaItem):
return 'MetaData(bind=%r)' % self.bind
def __contains__(self, table_or_key):
- if not isinstance(table_or_key, basestring):
+ if not isinstance(table_or_key, util.string_types):
table_or_key = table_or_key.key
return table_or_key in self.tables
@@ -2629,7 +2617,7 @@ class MetaData(SchemaItem):
def _bind_to(self, bind):
"""Bind this MetaData to an Engine, Connection, string or URL."""
- if isinstance(bind, (basestring, url.URL)):
+ if isinstance(bind, util.string_types + (url.URL, )):
from sqlalchemy import create_engine
self._bind = create_engine(bind)
else:
@@ -2662,7 +2650,7 @@ class MetaData(SchemaItem):
:meth:`.Inspector.sorted_tables`
"""
- return sqlutil.sort_tables(self.tables.itervalues())
+ return sqlutil.sort_tables(self.tables.values())
def reflect(self, bind=None, schema=None, views=False, only=None):
"""Load all available table definitions from the database.
@@ -2723,7 +2711,7 @@ class MetaData(SchemaItem):
bind.dialect.get_view_names(conn, schema)
)
- current = set(self.tables.iterkeys())
+ current = set(self.tables)
if only is None:
load = [name for name in available if name not in current]
@@ -2845,7 +2833,7 @@ class ThreadLocalMetaData(MetaData):
def _bind_to(self, bind):
"""Bind to a Connectable in the caller's thread."""
- if isinstance(bind, (basestring, url.URL)):
+ if isinstance(bind, util.string_types + (url.URL, )):
try:
self.context._engine = self.__engines[bind]
except KeyError:
@@ -2870,7 +2858,7 @@ class ThreadLocalMetaData(MetaData):
def dispose(self):
"""Dispose all bound engines, in all thread contexts."""
- for e in self.__engines.itervalues():
+ for e in self.__engines.values():
if hasattr(e, 'dispose'):
e.dispose()
@@ -3075,7 +3063,7 @@ class DDLElement(expression.Executable, _DDLCompiles):
not self._should_execute_deprecated(None, target, bind, **kw):
return False
- if isinstance(self.dialect, basestring):
+ if isinstance(self.dialect, util.string_types):
if self.dialect != bind.engine.name:
return False
elif isinstance(self.dialect, (tuple, list, set)):
@@ -3090,7 +3078,7 @@ class DDLElement(expression.Executable, _DDLCompiles):
def _should_execute_deprecated(self, event, target, bind, **kw):
if self.on is None:
return True
- elif isinstance(self.on, basestring):
+ elif isinstance(self.on, util.string_types):
return self.on == bind.engine.name
elif isinstance(self.on, (tuple, list, set)):
return bind.engine.name in self.on
@@ -3105,7 +3093,7 @@ class DDLElement(expression.Executable, _DDLCompiles):
def _check_ddl_on(self, on):
if (on is not None and
- (not isinstance(on, (basestring, tuple, list, set)) and
+ (not isinstance(on, util.string_types + (tuple, list, set)) and
not util.callable(on))):
raise exc.ArgumentError(
"Expected the name of a database dialect, a tuple "
@@ -3230,7 +3218,7 @@ class DDL(DDLElement):
"""
- if not isinstance(statement, basestring):
+ if not isinstance(statement, util.string_types):
raise exc.ArgumentError(
"Expected a string or unicode SQL statement, got '%r'" %
statement)
@@ -3262,7 +3250,7 @@ def _to_schema_column(element):
def _to_schema_column_or_string(element):
if hasattr(element, '__clause_element__'):
element = element.__clause_element__()
- if not isinstance(element, (basestring, expression.ColumnElement)):
+ if not isinstance(element, util.string_types + (expression.ColumnElement, )):
msg = "Element %r is not a string name or column element"
raise exc.ArgumentError(msg % element)
return element
diff --git a/lib/sqlalchemy/sql/__init__.py b/lib/sqlalchemy/sql/__init__.py
index 1b81a18c5..9700f26a0 100644
--- a/lib/sqlalchemy/sql/__init__.py
+++ b/lib/sqlalchemy/sql/__init__.py
@@ -64,5 +64,5 @@ from .expression import (
from .visitors import ClauseVisitor
-__tmp = locals().keys()
+__tmp = list(locals().keys())
__all__ = sorted([i for i in __tmp if not i.startswith('__')])
diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py
index 5dd7ec564..d475f54ac 100644
--- a/lib/sqlalchemy/sql/compiler.py
+++ b/lib/sqlalchemy/sql/compiler.py
@@ -51,7 +51,7 @@ RESERVED_WORDS = set([
'using', 'verbose', 'when', 'where'])
LEGAL_CHARACTERS = re.compile(r'^[A-Z0-9_$]+$', re.I)
-ILLEGAL_INITIAL_CHARACTERS = set([str(x) for x in xrange(0, 10)]).union(['$'])
+ILLEGAL_INITIAL_CHARACTERS = set([str(x) for x in range(0, 10)]).union(['$'])
BIND_PARAMS = re.compile(r'(?<![:\w\$\x5c]):([\w\$]+)(?![:\w\$])', re.UNICODE)
BIND_PARAMS_ESC = re.compile(r'\x5c(:[\w\$]+)(?![:\w\$])', re.UNICODE)
@@ -83,9 +83,7 @@ OPERATORS = {
operators.add: ' + ',
operators.mul: ' * ',
operators.sub: ' - ',
- # Py2K
operators.div: ' / ',
- # end Py2K
operators.mod: ' % ',
operators.truediv: ' / ',
operators.neg: '-',
@@ -334,7 +332,7 @@ class SQLCompiler(engine.Compiled):
if params:
pd = {}
- for bindparam, name in self.bind_names.iteritems():
+ for bindparam, name in self.bind_names.items():
if bindparam.key in params:
pd[name] = params[bindparam.key]
elif name in params:
@@ -488,7 +486,7 @@ class SQLCompiler(engine.Compiled):
def visit_textclause(self, textclause, **kwargs):
if textclause.typemap is not None:
- for colname, type_ in textclause.typemap.iteritems():
+ for colname, type_ in textclause.typemap.items():
self.result_map[colname
if self.dialect.case_sensitive
else colname.lower()] = \
@@ -862,12 +860,12 @@ class SQLCompiler(engine.Compiled):
of the DBAPI.
"""
- if isinstance(value, basestring):
+ if isinstance(value, util.string_types):
value = value.replace("'", "''")
return "'%s'" % value
elif value is None:
return "NULL"
- elif isinstance(value, (float, int, long)):
+ elif isinstance(value, (float, ) + util.int_types):
return repr(value)
elif isinstance(value, decimal.Decimal):
return str(value)
@@ -1172,7 +1170,7 @@ class SQLCompiler(engine.Compiled):
self, ashint=True)
})
for (from_, dialect), hinttext in
- select._hints.iteritems()
+ select._hints.items()
if dialect in ('*', self.dialect.name)
])
hint_text = self.get_select_hint_text(byfrom)
@@ -1570,7 +1568,7 @@ class SQLCompiler(engine.Compiled):
values = []
if stmt_parameters is not None:
- for k, v in stmt_parameters.iteritems():
+ for k, v in stmt_parameters.items():
colkey = sql._column_as_key(k)
if colkey is not None:
parameters.setdefault(colkey, v)
@@ -1910,22 +1908,13 @@ class DDLCompiler(engine.Compiled):
and not first_pk)
if column.primary_key:
first_pk = True
- except exc.CompileError, ce:
- # Py3K
- #raise exc.CompileError("(in table '%s', column '%s'): %s"
- # % (
- # table.description,
- # column.name,
- # ce.args[0]
- # )) from ce
- # Py2K
- raise exc.CompileError("(in table '%s', column '%s'): %s"
- % (
+ except exc.CompileError as ce:
+ util.raise_from_cause(
+ exc.CompileError(util.u("(in table '%s', column '%s'): %s") % (
table.description,
column.name,
ce.args[0]
- )), None, sys.exc_info()[2]
- # end Py2K
+ )))
const = self.create_table_constraints(table)
if const:
@@ -2078,7 +2067,7 @@ class DDLCompiler(engine.Compiled):
def get_column_default_string(self, column):
if isinstance(column.server_default, schema.DefaultClause):
- if isinstance(column.server_default.arg, basestring):
+ if isinstance(column.server_default.arg, util.string_types):
return "'%s'" % column.server_default.arg
else:
return self.sql_compiler.process(column.server_default.arg)
@@ -2397,7 +2386,7 @@ class IdentifierPreparer(object):
lc_value = value.lower()
return (lc_value in self.reserved_words
or value[0] in self.illegal_initial_characters
- or not self.legal_characters.match(unicode(value))
+ or not self.legal_characters.match(util.text_type(value))
or (lc_value != value))
def quote_schema(self, schema, force):
diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py
index 402e52272..6dc134d98 100644
--- a/lib/sqlalchemy/sql/expression.py
+++ b/lib/sqlalchemy/sql/expression.py
@@ -26,7 +26,7 @@ to stay the same in future releases.
"""
-
+from __future__ import unicode_literals
import itertools
import re
from operator import attrgetter
@@ -1375,7 +1375,7 @@ func = _FunctionGenerator()
modifier = _FunctionGenerator(group=False)
-class _truncated_label(unicode):
+class _truncated_label(util.text_type):
"""A unicode subclass used to identify symbolic "
"names that may require truncation."""
@@ -1395,13 +1395,13 @@ class _anonymous_label(_truncated_label):
def __add__(self, other):
return _anonymous_label(
- unicode(self) +
- unicode(other))
+ util.text_type(self) +
+ util.text_type(other))
def __radd__(self, other):
return _anonymous_label(
- unicode(other) +
- unicode(self))
+ util.text_type(other) +
+ util.text_type(self))
def apply_map(self, map_):
return self % map_
@@ -1422,7 +1422,7 @@ def _as_truncated(value):
def _string_or_unprintable(element):
- if isinstance(element, basestring):
+ if isinstance(element, util.string_types):
return element
else:
try:
@@ -1486,7 +1486,7 @@ def _labeled(element):
def _column_as_key(element):
- if isinstance(element, basestring):
+ if isinstance(element, util.string_types):
return element
if hasattr(element, '__clause_element__'):
element = element.__clause_element__()
@@ -1508,8 +1508,8 @@ def _literal_as_text(element):
return element
elif hasattr(element, '__clause_element__'):
return element.__clause_element__()
- elif isinstance(element, basestring):
- return TextClause(unicode(element))
+ elif isinstance(element, util.string_types):
+ return TextClause(util.text_type(element))
elif isinstance(element, (util.NoneType, bool)):
return _const_expr(element)
else:
@@ -1583,8 +1583,8 @@ def _interpret_as_column_or_from(element):
def _interpret_as_from(element):
insp = inspection.inspect(element, raiseerr=False)
if insp is None:
- if isinstance(element, basestring):
- return TextClause(unicode(element))
+ if isinstance(element, util.string_types):
+ return TextClause(util.text_type(element))
elif hasattr(insp, "selectable"):
return insp.selectable
raise exc.ArgumentError("FROM expression expected")
@@ -1717,17 +1717,6 @@ class ClauseElement(Visitable):
d.pop('_is_clone_of', None)
return d
- if util.jython:
- def __hash__(self):
- """Return a distinct hash code.
-
- ClauseElements may have special equality comparisons which
- makes us rely on them having unique hash codes for use in
- hash-based collections. Stock __hash__ doesn't guarantee
- unique values on platforms with moving GCs.
- """
- return id(self)
-
def _annotate(self, values):
"""return a copy of this ClauseElement with annotations
updated by the given dictionary.
@@ -1916,11 +1905,10 @@ class ClauseElement(Visitable):
return dialect.statement_compiler(dialect, self, **kw)
def __str__(self):
- # Py3K
- #return unicode(self.compile())
- # Py2K
- return unicode(self.compile()).encode('ascii', 'backslashreplace')
- # end Py2K
+ if util.py3k:
+ return str(self.compile())
+ else:
+ return unicode(self.compile()).encode('ascii', 'backslashreplace')
def __and__(self, other):
return and_(self, other)
@@ -1931,9 +1919,11 @@ class ClauseElement(Visitable):
def __invert__(self):
return self._negate()
- def __nonzero__(self):
+ def __bool__(self):
raise TypeError("Boolean value of this clause is not defined")
+ __nonzero__ = __bool__
+
def _negate(self):
if hasattr(self, 'negation_clause'):
return self.negation_clause
@@ -2205,7 +2195,7 @@ class _DefaultColumnComparator(operators.ColumnOperators):
def _check_literal(self, expr, operator, other):
if isinstance(other, (ColumnElement, TextClause)):
if isinstance(other, BindParameter) and \
- isinstance(other.type, sqltypes.NullType):
+ isinstance(other.type, sqltypes.NullType):
# TODO: perhaps we should not mutate the incoming
# bindparam() here and instead make a copy of it.
# this might be the only place that we're mutating
@@ -2525,7 +2515,7 @@ class ColumnCollection(util.OrderedProperties):
return and_(*l)
def __contains__(self, other):
- if not isinstance(other, basestring):
+ if not isinstance(other, util.string_types):
raise exc.ArgumentError("__contains__ requires a string argument")
return util.OrderedProperties.__contains__(self, other)
@@ -3117,7 +3107,6 @@ class Executable(Generative):
def execute(self, *multiparams, **params):
"""Compile and execute this :class:`.Executable`."""
-
e = self.bind
if e is None:
label = getattr(self, 'description', self.__class__.__name__)
@@ -3186,13 +3175,13 @@ class TextClause(Executable, ClauseElement):
_hide_froms = []
def __init__(
- self,
- text='',
- bind=None,
- bindparams=None,
- typemap=None,
- autocommit=None,
- ):
+ self,
+ text='',
+ bind=None,
+ bindparams=None,
+ typemap=None,
+ autocommit=None):
+
self._bind = bind
self.bindparams = {}
self.typemap = typemap
@@ -3202,9 +3191,9 @@ class TextClause(Executable, ClauseElement):
'e)')
self._execution_options = \
self._execution_options.union(
- {'autocommit': autocommit})
+ {'autocommit': autocommit})
if typemap is not None:
- for key in typemap.keys():
+ for key in typemap:
typemap[key] = sqltypes.to_instance(typemap[key])
def repl(m):
@@ -3241,7 +3230,7 @@ class TextClause(Executable, ClauseElement):
for b in self.bindparams.values())
def get_children(self, **kwargs):
- return self.bindparams.values()
+ return list(self.bindparams.values())
class Null(ColumnElement):
@@ -3759,7 +3748,7 @@ class BinaryExpression(ColumnElement):
negate=None, modifiers=None):
# allow compatibility with libraries that
# refer to BinaryExpression directly and pass strings
- if isinstance(operator, basestring):
+ if isinstance(operator, util.string_types):
operator = operators.custom_op(operator)
self._orig = (left, right)
self.left = _literal_as_text(left).self_group(against=operator)
@@ -3773,12 +3762,14 @@ class BinaryExpression(ColumnElement):
else:
self.modifiers = modifiers
- def __nonzero__(self):
+ def __bool__(self):
if self.operator in (operator.eq, operator.ne):
return self.operator(hash(self._orig[0]), hash(self._orig[1]))
else:
raise TypeError("Boolean value of this clause is not defined")
+ __nonzero__ = __bool__
+
@property
def is_comparison(self):
return operators.is_comparison(self.operator)
@@ -4066,11 +4057,10 @@ class Alias(FromClause):
@property
def description(self):
- # Py3K
- #return self.name
- # Py2K
- return self.name.encode('ascii', 'backslashreplace')
- # end Py2K
+ if util.py3k:
+ return self.name
+ else:
+ return self.name.encode('ascii', 'backslashreplace')
def as_scalar(self):
try:
@@ -4484,11 +4474,10 @@ class ColumnClause(Immutable, ColumnElement):
@util.memoized_property
def description(self):
- # Py3K
- #return self.name
- # Py2K
- return self.name.encode('ascii', 'backslashreplace')
- # end Py2K
+ if util.py3k:
+ return self.name
+ else:
+ return self.name.encode('ascii', 'backslashreplace')
@_memoized_property
def _key_label(self):
@@ -4615,11 +4604,10 @@ class TableClause(Immutable, FromClause):
@util.memoized_property
def description(self):
- # Py3K
- #return self.name
- # Py2K
- return self.name.encode('ascii', 'backslashreplace')
- # end Py2K
+ if util.py3k:
+ return self.name
+ else:
+ return self.name.encode('ascii', 'backslashreplace')
def append_column(self, c):
self._columns[c.key] = c
diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py
index c1c07dfb6..5e2d0792c 100644
--- a/lib/sqlalchemy/sql/functions.py
+++ b/lib/sqlalchemy/sql/functions.py
@@ -41,7 +41,7 @@ class _GenericMeta(VisitableType):
super(_GenericMeta, cls).__init__(clsname, bases, clsdict)
-class GenericFunction(Function):
+class GenericFunction(util.with_metaclass(_GenericMeta, Function)):
"""Define a 'generic' function.
A generic function is a pre-established :class:`.Function`
@@ -112,7 +112,6 @@ class GenericFunction(Function):
name is still recognized for backwards-compatibility.
"""
- __metaclass__ = _GenericMeta
coerce_arguments = True
diff --git a/lib/sqlalchemy/sql/operators.py b/lib/sqlalchemy/sql/operators.py
index a7e6af116..4afb3db48 100644
--- a/lib/sqlalchemy/sql/operators.py
+++ b/lib/sqlalchemy/sql/operators.py
@@ -9,16 +9,19 @@
"""Defines operators used in SQL expressions."""
+from .. import util
+
+
from operator import (
and_, or_, inv, add, mul, sub, mod, truediv, lt, le, ne, gt, ge, eq, neg,
getitem, lshift, rshift
)
-# Py2K
-from operator import (div,)
-# end Py2K
+if util.py2k:
+ from operator import div
+else:
+ div = truediv
-from ..util import symbol
class Operators(object):
@@ -781,17 +784,15 @@ parenthesize (a op b).
"""
-_smallest = symbol('_smallest', canonical=-100)
-_largest = symbol('_largest', canonical=100)
+_smallest = util.symbol('_smallest', canonical=-100)
+_largest = util.symbol('_largest', canonical=100)
_PRECEDENCE = {
from_: 15,
getitem: 15,
mul: 8,
truediv: 8,
- # Py2K
div: 8,
- # end Py2K
mod: 8,
neg: 8,
add: 7,
diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py
index 4aa2d7496..91740dc16 100644
--- a/lib/sqlalchemy/sql/util.py
+++ b/lib/sqlalchemy/sql/util.py
@@ -232,7 +232,7 @@ def bind_values(clause):
def _quote_ddl_expr(element):
- if isinstance(element, basestring):
+ if isinstance(element, util.string_types):
element = element.replace("'", "''")
return "'%s'" % element
else:
@@ -349,7 +349,7 @@ def join_condition(a, b, ignore_nonexistent_tables=False,
continue
try:
col = fk.get_referent(left)
- except exc.NoReferenceError, nrte:
+ except exc.NoReferenceError as nrte:
if nrte.table_name == left.name:
raise
else:
@@ -367,7 +367,7 @@ def join_condition(a, b, ignore_nonexistent_tables=False,
continue
try:
col = fk.get_referent(b)
- except exc.NoReferenceError, nrte:
+ except exc.NoReferenceError as nrte:
if nrte.table_name == b.name:
raise
else:
@@ -518,15 +518,15 @@ class AnnotatedColumnElement(Annotated):
# so that the resulting objects are pickleable.
annotated_classes = {}
-for cls in expression.__dict__.values() + [schema.Column, schema.Table]:
+for cls in list(expression.__dict__.values()) + [schema.Column, schema.Table]:
if isinstance(cls, type) and issubclass(cls, expression.ClauseElement):
if issubclass(cls, expression.ColumnElement):
annotation_cls = "AnnotatedColumnElement"
else:
annotation_cls = "Annotated"
- exec "class Annotated%s(%s, cls):\n" \
- " pass" % (cls.__name__, annotation_cls) in locals()
- exec "annotated_classes[cls] = Annotated%s" % (cls.__name__,)
+ exec("class Annotated%s(%s, cls):\n" \
+ " pass" % (cls.__name__, annotation_cls), locals())
+ exec("annotated_classes[cls] = Annotated%s" % (cls.__name__,))
def _deep_annotate(element, annotations, exclude=None):
diff --git a/lib/sqlalchemy/sql/visitors.py b/lib/sqlalchemy/sql/visitors.py
index f1dbb9e32..62f46ab64 100644
--- a/lib/sqlalchemy/sql/visitors.py
+++ b/lib/sqlalchemy/sql/visitors.py
@@ -49,11 +49,9 @@ class VisitableType(type):
Classes having no __visit_name__ attribute will remain unaffected.
"""
def __init__(cls, clsname, bases, clsdict):
- if cls.__name__ == 'Visitable' or not hasattr(cls, '__visit_name__'):
- super(VisitableType, cls).__init__(clsname, bases, clsdict)
- return
-
- _generate_dispatch(cls)
+ if clsname != 'Visitable' and \
+ hasattr(cls, '__visit_name__'):
+ _generate_dispatch(cls)
super(VisitableType, cls).__init__(clsname, bases, clsdict)
@@ -87,14 +85,12 @@ def _generate_dispatch(cls):
cls._compiler_dispatch = _compiler_dispatch
-class Visitable(object):
+class Visitable(util.with_metaclass(VisitableType, object)):
"""Base class for visitable objects, applies the
``VisitableType`` metaclass.
"""
- __metaclass__ = VisitableType
-
class ClauseVisitor(object):
"""Base class for visitor objects which can traverse using
diff --git a/lib/sqlalchemy/testing/__init__.py b/lib/sqlalchemy/testing/__init__.py
index e571a5045..d5522213d 100644
--- a/lib/sqlalchemy/testing/__init__.py
+++ b/lib/sqlalchemy/testing/__init__.py
@@ -1,4 +1,4 @@
-from __future__ import absolute_import
+
from .warnings import testing_warn, assert_warnings, resetwarnings
diff --git a/lib/sqlalchemy/testing/assertions.py b/lib/sqlalchemy/testing/assertions.py
index ebd10b130..c04153961 100644
--- a/lib/sqlalchemy/testing/assertions.py
+++ b/lib/sqlalchemy/testing/assertions.py
@@ -63,7 +63,7 @@ def emits_warning_on(db, *warnings):
@decorator
def decorate(fn, *args, **kw):
- if isinstance(db, basestring):
+ if isinstance(db, util.string_types):
if not spec(config.db):
return fn(*args, **kw)
else:
@@ -171,9 +171,9 @@ def assert_raises_message(except_cls, msg, callable_, *args, **kwargs):
try:
callable_(*args, **kwargs)
assert False, "Callable did not raise an exception"
- except except_cls, e:
- assert re.search(msg, unicode(e), re.UNICODE), u"%r !~ %s" % (msg, e)
- print unicode(e).encode('utf-8')
+ except except_cls as e:
+ assert re.search(msg, util.text_type(e), re.UNICODE), "%r !~ %s" % (msg, e)
+ print(util.text_type(e).encode('utf-8'))
class AssertsCompiledSQL(object):
@@ -190,12 +190,12 @@ class AssertsCompiledSQL(object):
dialect = default.DefaultDialect()
elif dialect is None:
dialect = config.db.dialect
- elif isinstance(dialect, basestring):
+ elif isinstance(dialect, util.string_types):
dialect = create_engine("%s://" % dialect).dialect
kw = {}
if params is not None:
- kw['column_keys'] = params.keys()
+ kw['column_keys'] = list(params)
if isinstance(clause, orm.Query):
context = clause._compile_context()
@@ -205,12 +205,13 @@ class AssertsCompiledSQL(object):
c = clause.compile(dialect=dialect, **kw)
param_str = repr(getattr(c, 'params', {}))
- # Py3K
- #param_str = param_str.encode('utf-8').decode('ascii', 'ignore')
- print "\nSQL String:\n" + str(c) + param_str
+ if util.py3k:
+ param_str = param_str.encode('utf-8').decode('ascii', 'ignore')
- cc = re.sub(r'[\n\t]', '', str(c))
+ print("\nSQL String:\n" + util.text_type(c) + param_str)
+
+ cc = re.sub(r'[\n\t]', '', util.text_type(c))
eq_(cc, result, "%r != %r on dialect %r" % (cc, result, dialect))
@@ -262,7 +263,7 @@ class ComparesTables(object):
class AssertsExecutionResults(object):
def assert_result(self, result, class_, *objects):
result = list(result)
- print repr(result)
+ print(repr(result))
self.assert_list(result, class_, objects)
def assert_list(self, result, class_, list):
@@ -275,7 +276,7 @@ class AssertsExecutionResults(object):
def assert_row(self, class_, rowobj, desc):
self.assert_(rowobj.__class__ is class_,
"item class is not " + repr(class_))
- for key, value in desc.iteritems():
+ for key, value in desc.items():
if isinstance(value, tuple):
if isinstance(value[1], list):
self.assert_list(getattr(rowobj, key), value[0], value[1])
@@ -300,7 +301,7 @@ class AssertsExecutionResults(object):
found = util.IdentitySet(result)
expected = set([immutabledict(e) for e in expected])
- for wrong in itertools.ifilterfalse(lambda o: type(o) == cls, found):
+ for wrong in util.itertools_filterfalse(lambda o: type(o) == cls, found):
fail('Unexpected type "%s", expected "%s"' % (
type(wrong).__name__, cls.__name__))
@@ -311,7 +312,7 @@ class AssertsExecutionResults(object):
NOVALUE = object()
def _compare_item(obj, spec):
- for key, value in spec.iteritems():
+ for key, value in spec.items():
if isinstance(value, tuple):
try:
self.assert_unordered_result(
@@ -352,7 +353,7 @@ class AssertsExecutionResults(object):
for rule in rules:
if isinstance(rule, dict):
newrule = assertsql.AllOf(*[
- assertsql.ExactSQL(k, v) for k, v in rule.iteritems()
+ assertsql.ExactSQL(k, v) for k, v in rule.items()
])
else:
newrule = assertsql.ExactSQL(*rule)
diff --git a/lib/sqlalchemy/testing/assertsql.py b/lib/sqlalchemy/testing/assertsql.py
index 0e250f356..a6b63b2c3 100644
--- a/lib/sqlalchemy/testing/assertsql.py
+++ b/lib/sqlalchemy/testing/assertsql.py
@@ -127,7 +127,7 @@ class RegexSQL(SQLMatchRule):
# do a positive compare only
for param, received in zip(params, _received_parameters):
- for k, v in param.iteritems():
+ for k, v in param.items():
if k not in received or received[k] != v:
equivalent = False
break
@@ -180,7 +180,7 @@ class CompiledSQL(SQLMatchRule):
all_received = list(_received_parameters)
while params:
param = dict(params.pop(0))
- for k, v in context.compiled.params.iteritems():
+ for k, v in context.compiled.params.items():
param.setdefault(k, v)
if param not in _received_parameters:
equivalent = False
@@ -195,9 +195,9 @@ class CompiledSQL(SQLMatchRule):
all_received = []
self._result = equivalent
if not self._result:
- print 'Testing for compiled statement %r partial params '\
+ print('Testing for compiled statement %r partial params '\
'%r, received %r with params %r' % (self.statement,
- all_params, _received_statement, all_received)
+ all_params, _received_statement, all_received))
self._errmsg = \
'Testing for compiled statement %r partial params %r, '\
'received %r with params %r' % (self.statement,
@@ -262,7 +262,7 @@ def _process_engine_statement(query, context):
# oracle+zxjdbc passes a PyStatement when returning into
- query = unicode(query)
+ query = str(query)
if context.engine.name == 'mssql' \
and query.endswith('; select scope_identity()'):
query = query[:-25]
diff --git a/lib/sqlalchemy/testing/engines.py b/lib/sqlalchemy/testing/engines.py
index 26f561016..efc0103f2 100644
--- a/lib/sqlalchemy/testing/engines.py
+++ b/lib/sqlalchemy/testing/engines.py
@@ -8,7 +8,7 @@ from .util import decorator
from .. import event, pool
import re
import warnings
-
+from .. import util
class ConnectionKiller(object):
@@ -31,18 +31,18 @@ class ConnectionKiller(object):
fn()
except (SystemExit, KeyboardInterrupt):
raise
- except Exception, e:
+ except Exception as e:
warnings.warn(
"testing_reaper couldn't "
"rollback/close connection: %s" % e)
def rollback_all(self):
- for rec in self.proxy_refs.keys():
+ for rec in list(self.proxy_refs):
if rec is not None and rec.is_valid:
self._safe(rec.rollback)
def close_all(self):
- for rec in self.proxy_refs.keys():
+ for rec in list(self.proxy_refs):
if rec is not None:
self._safe(rec._close)
@@ -66,7 +66,7 @@ class ConnectionKiller(object):
self.conns = set()
- for rec in self.testing_engines.keys():
+ for rec in list(self.testing_engines):
if rec is not config.db:
rec.dispose()
@@ -75,7 +75,7 @@ class ConnectionKiller(object):
for conn in self.conns:
self._safe(conn.close)
self.conns = set()
- for rec in self.testing_engines.keys():
+ for rec in list(self.testing_engines):
rec.dispose()
def assert_all_closed(self):
@@ -160,7 +160,7 @@ class ReconnectFixture(object):
fn()
except (SystemExit, KeyboardInterrupt):
raise
- except Exception, e:
+ except Exception as e:
warnings.warn(
"ReconnectFixture couldn't "
"close connection: %s" % e)
@@ -353,23 +353,22 @@ class ReplayableSession(object):
Callable = object()
NoAttribute = object()
- # Py3K
- #Natives = set([getattr(types, t)
- # for t in dir(types) if not t.startswith('_')]). \
- # union([type(t) if not isinstance(t, type)
- # else t for t in __builtins__.values()]).\
- # difference([getattr(types, t)
- # for t in ('FunctionType', 'BuiltinFunctionType',
- # 'MethodType', 'BuiltinMethodType',
- # 'LambdaType', )])
- # Py2K
- Natives = set([getattr(types, t)
- for t in dir(types) if not t.startswith('_')]). \
+ if util.py2k:
+ Natives = set([getattr(types, t)
+ for t in dir(types) if not t.startswith('_')]).\
difference([getattr(types, t)
for t in ('FunctionType', 'BuiltinFunctionType',
'MethodType', 'BuiltinMethodType',
'LambdaType', 'UnboundMethodType',)])
- # end Py2K
+ else:
+ Natives = set([getattr(types, t)
+ for t in dir(types) if not t.startswith('_')]).\
+ union([type(t) if not isinstance(t, type)
+ else t for t in __builtins__.values()]).\
+ difference([getattr(types, t)
+ for t in ('FunctionType', 'BuiltinFunctionType',
+ 'MethodType', 'BuiltinMethodType',
+ 'LambdaType', )])
def __init__(self):
self.buffer = deque()
diff --git a/lib/sqlalchemy/testing/entities.py b/lib/sqlalchemy/testing/entities.py
index 5c5e69154..c0dd58650 100644
--- a/lib/sqlalchemy/testing/entities.py
+++ b/lib/sqlalchemy/testing/entities.py
@@ -7,7 +7,7 @@ _repr_stack = set()
class BasicEntity(object):
def __init__(self, **kw):
- for key, value in kw.iteritems():
+ for key, value in kw.items():
setattr(self, key, value)
def __repr__(self):
@@ -67,7 +67,7 @@ class ComparableEntity(BasicEntity):
a = self
b = other
- for attr in a.__dict__.keys():
+ for attr in list(a.__dict__):
if attr.startswith('_'):
continue
value = getattr(a, attr)
diff --git a/lib/sqlalchemy/testing/exclusions.py b/lib/sqlalchemy/testing/exclusions.py
index 2c0679e1d..f580f3fde 100644
--- a/lib/sqlalchemy/testing/exclusions.py
+++ b/lib/sqlalchemy/testing/exclusions.py
@@ -1,4 +1,4 @@
-from __future__ import with_statement
+
import operator
from nose import SkipTest
@@ -23,10 +23,10 @@ class skip_if(object):
def fail_if(self, name='block'):
try:
yield
- except Exception, ex:
+ except Exception as ex:
if self.predicate():
- print ("%s failed as expected (%s): %s " % (
- name, self.predicate, str(ex)))
+ print(("%s failed as expected (%s): %s " % (
+ name, self.predicate, str(ex))))
else:
raise
else:
@@ -92,7 +92,7 @@ class Predicate(object):
return OrPredicate([cls.as_predicate(pred) for pred in predicate])
elif isinstance(predicate, tuple):
return SpecPredicate(*predicate)
- elif isinstance(predicate, basestring):
+ elif isinstance(predicate, util.string_types):
return SpecPredicate(predicate, None, None)
elif util.callable(predicate):
return LambdaPredicate(predicate)
diff --git a/lib/sqlalchemy/testing/fixtures.py b/lib/sqlalchemy/testing/fixtures.py
index 5c587cb2f..daa779ae3 100644
--- a/lib/sqlalchemy/testing/fixtures.py
+++ b/lib/sqlalchemy/testing/fixtures.py
@@ -1,6 +1,7 @@
from . import config
from . import assertions, schema
from .util import adict
+from .. import util
from .engines import drop_all_tables
from .entities import BasicEntity, ComparableEntity
import sys
@@ -125,9 +126,10 @@ class TablesTest(TestBase):
for table in reversed(self.metadata.sorted_tables):
try:
table.delete().execute().close()
- except sa.exc.DBAPIError, ex:
- print >> sys.stderr, "Error emptying table %s: %r" % (
- table, ex)
+ except sa.exc.DBAPIError as ex:
+ util.print_(
+ ("Error emptying table %s: %r" % (table, ex)),
+ file=sys.stderr)
def setup(self):
self._setup_each_tables()
@@ -187,10 +189,10 @@ class TablesTest(TestBase):
def _load_fixtures(cls):
"""Insert rows as represented by the fixtures() method."""
headers, rows = {}, {}
- for table, data in cls.fixtures().iteritems():
+ for table, data in cls.fixtures().items():
if len(data) < 2:
continue
- if isinstance(table, basestring):
+ if isinstance(table, util.string_types):
table = cls.tables[table]
headers[table] = data[0]
rows[table] = data[1:]
@@ -284,8 +286,8 @@ class MappedTest(_ORMTest, TablesTest, assertions.AssertsExecutionResults):
cls_registry[classname] = cls
return type.__init__(cls, classname, bases, dict_)
- class _Base(object):
- __metaclass__ = FindFixture
+ class _Base(util.with_metaclass(FindFixture, object)):
+ pass
class Basic(BasicEntity, _Base):
pass
diff --git a/lib/sqlalchemy/testing/plugin/noseplugin.py b/lib/sqlalchemy/testing/plugin/noseplugin.py
index 5bd7ff3cd..b3cd3a4e3 100644
--- a/lib/sqlalchemy/testing/plugin/noseplugin.py
+++ b/lib/sqlalchemy/testing/plugin/noseplugin.py
@@ -9,14 +9,20 @@ When third party libraries use this plugin, it can be imported
normally as "from sqlalchemy.testing.plugin import noseplugin".
"""
+
from __future__ import absolute_import
import os
-import ConfigParser
+import sys
+py3k = sys.version_info >= (3, 0)
+
+if py3k:
+ import configparser
+else:
+ import ConfigParser as configparser
from nose.plugins import Plugin
from nose import SkipTest
-import time
import sys
import re
@@ -55,9 +61,9 @@ def _log(option, opt_str, value, parser):
def _list_dbs(*args):
- print "Available --db options (use --dburi to override)"
+ print("Available --db options (use --dburi to override)")
for macro in sorted(file_config.options('db')):
- print "%20s\t%s" % (macro, file_config.get('db', macro))
+ print("%20s\t%s" % (macro, file_config.get('db', macro)))
sys.exit(0)
@@ -318,7 +324,7 @@ class NoseSQLAlchemy(Plugin):
opt("--write-profiles", action="store_true", dest="write_profiles", default=False,
help="Write/update profiling data.")
global file_config
- file_config = ConfigParser.ConfigParser()
+ file_config = configparser.ConfigParser()
file_config.read(['setup.cfg', 'test.cfg'])
def configure(self, options, conf):
diff --git a/lib/sqlalchemy/testing/profiling.py b/lib/sqlalchemy/testing/profiling.py
index 19a9731be..bda44d80c 100644
--- a/lib/sqlalchemy/testing/profiling.py
+++ b/lib/sqlalchemy/testing/profiling.py
@@ -60,9 +60,9 @@ def profiled(target=None, **target_opts):
if report:
sort_ = target_opts.get('sort', profile_config['sort'])
limit = target_opts.get('limit', profile_config['limit'])
- print ("Profile report for target '%s' (%s)" % (
+ print(("Profile report for target '%s' (%s)" % (
target, filename)
- )
+ ))
stats = load_stats()
stats.sort_stats(*sort_)
@@ -198,7 +198,7 @@ class ProfileStatsFile(object):
profile_f.close()
def _write(self):
- print("Writing profile file %s" % self.fname)
+ print(("Writing profile file %s" % self.fname))
profile_f = open(self.fname, "w")
profile_f.write(self._header())
for test_key in sorted(self.data):
@@ -253,11 +253,11 @@ def function_call_count(variance=0.05):
else:
line_no, expected_count = expected
- print("Pstats calls: %d Expected %s" % (
+ print(("Pstats calls: %d Expected %s" % (
callcount,
expected_count
)
- )
+ ))
stats.print_stats()
#stats.print_callers()
diff --git a/lib/sqlalchemy/testing/schema.py b/lib/sqlalchemy/testing/schema.py
index 325d74f1e..025bbaabe 100644
--- a/lib/sqlalchemy/testing/schema.py
+++ b/lib/sqlalchemy/testing/schema.py
@@ -11,7 +11,7 @@ table_options = {}
def Table(*args, **kw):
"""A schema.Table wrapper/hook for dialect-specific tweaks."""
- test_opts = dict([(k, kw.pop(k)) for k in kw.keys()
+ test_opts = dict([(k, kw.pop(k)) for k in list(kw)
if k.startswith('test_')])
kw.update(table_options)
@@ -58,7 +58,7 @@ def Table(*args, **kw):
def Column(*args, **kw):
"""A schema.Column wrapper/hook for dialect-specific tweaks."""
- test_opts = dict([(k, kw.pop(k)) for k in kw.keys()
+ test_opts = dict([(k, kw.pop(k)) for k in list(kw)
if k.startswith('test_')])
if not config.requirements.foreign_key_ddl.enabled:
diff --git a/lib/sqlalchemy/testing/suite/test_ddl.py b/lib/sqlalchemy/testing/suite/test_ddl.py
index fc1c19362..28251b807 100644
--- a/lib/sqlalchemy/testing/suite/test_ddl.py
+++ b/lib/sqlalchemy/testing/suite/test_ddl.py
@@ -1,4 +1,4 @@
-from __future__ import with_statement
+
from .. import fixtures, config, util
from ..config import requirements
diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py
index 7cae48572..ff76bd2cd 100644
--- a/lib/sqlalchemy/testing/suite/test_reflection.py
+++ b/lib/sqlalchemy/testing/suite/test_reflection.py
@@ -1,4 +1,4 @@
-from __future__ import with_statement
+
import sqlalchemy as sa
from sqlalchemy import exc as sa_exc
@@ -386,7 +386,7 @@ class ComponentReflectionTest(fixtures.TablesTest):
self.tables.email_addresses, self.tables.dingalings
insp = inspect(meta.bind)
oid = insp.get_table_oid(table_name, schema)
- self.assert_(isinstance(oid, (int, long)))
+ self.assert_(isinstance(oid, int))
def test_get_table_oid(self):
self._test_get_table_oid('users')
diff --git a/lib/sqlalchemy/testing/suite/test_types.py b/lib/sqlalchemy/testing/suite/test_types.py
index 0716b1b91..0d8bfdc0f 100644
--- a/lib/sqlalchemy/testing/suite/test_types.py
+++ b/lib/sqlalchemy/testing/suite/test_types.py
@@ -10,14 +10,15 @@ from ..schema import Table, Column
from ... import testing
import decimal
import datetime
-
+from ...util import u
+from ... import util
class _UnicodeFixture(object):
__requires__ = 'unicode_data',
- data = u"Alors vous imaginez ma surprise, au lever du jour, "\
- u"quand une drôle de petite voix m’a réveillé. Elle "\
- u"disait: « S’il vous plaît… dessine-moi un mouton! »"
+ data = u("Alors vous imaginez ma surprise, au lever du jour, "\
+ "quand une drôle de petite voix m’a réveillé. Elle "\
+ "disait: « S’il vous plaît… dessine-moi un mouton! »")
@classmethod
def define_tables(cls, metadata):
@@ -47,7 +48,7 @@ class _UnicodeFixture(object):
row,
(self.data, )
)
- assert isinstance(row[0], unicode)
+ assert isinstance(row[0], util.text_type)
def test_round_trip_executemany(self):
unicode_table = self.tables.unicode_table
@@ -58,7 +59,7 @@ class _UnicodeFixture(object):
{
'unicode_data': self.data,
}
- for i in xrange(3)
+ for i in range(3)
]
)
@@ -69,22 +70,22 @@ class _UnicodeFixture(object):
).fetchall()
eq_(
rows,
- [(self.data, ) for i in xrange(3)]
+ [(self.data, ) for i in range(3)]
)
for row in rows:
- assert isinstance(row[0], unicode)
+ assert isinstance(row[0], util.text_type)
def _test_empty_strings(self):
unicode_table = self.tables.unicode_table
config.db.execute(
unicode_table.insert(),
- {"unicode_data": u''}
+ {"unicode_data": u('')}
)
row = config.db.execute(
select([unicode_table.c.unicode_data])
).first()
- eq_(row, (u'',))
+ eq_(row, (u(''),))
class UnicodeVarcharTest(_UnicodeFixture, fixtures.TablesTest):
diff --git a/lib/sqlalchemy/testing/util.py b/lib/sqlalchemy/testing/util.py
index d9ff14eaf..5facd2f06 100644
--- a/lib/sqlalchemy/testing/util.py
+++ b/lib/sqlalchemy/testing/util.py
@@ -32,13 +32,13 @@ else:
def picklers():
picklers = set()
- # Py2K
- try:
- import cPickle
- picklers.add(cPickle)
- except ImportError:
- pass
- # end Py2K
+# start Py2K
+# try:
+# import cPickle
+# picklers.add(cPickle)
+# except ImportError:
+# pass
+# end Py2K
import pickle
picklers.add(pickle)
@@ -130,8 +130,8 @@ def function_named(fn, name):
try:
fn.__name__ = name
except TypeError:
- fn = types.FunctionType(fn.func_code, fn.func_globals, name,
- fn.func_defaults, fn.func_closure)
+ fn = types.FunctionType(fn.__code__, fn.__globals__, name,
+ fn.__defaults__, fn.__closure__)
return fn
diff --git a/lib/sqlalchemy/testing/warnings.py b/lib/sqlalchemy/testing/warnings.py
index 41f3dbfed..6193acd88 100644
--- a/lib/sqlalchemy/testing/warnings.py
+++ b/lib/sqlalchemy/testing/warnings.py
@@ -10,7 +10,7 @@ def testing_warn(msg, stacklevel=3):
filename = "sqlalchemy.testing.warnings"
lineno = 1
- if isinstance(msg, basestring):
+ if isinstance(msg, util.string_types):
warnings.warn_explicit(msg, sa_exc.SAWarning, filename, lineno)
else:
warnings.warn_explicit(msg, filename, lineno)
diff --git a/lib/sqlalchemy/types.py b/lib/sqlalchemy/types.py
index 46cf9e2a1..bfff05362 100644
--- a/lib/sqlalchemy/types.py
+++ b/lib/sqlalchemy/types.py
@@ -156,8 +156,8 @@ class TypeEngine(AbstractType):
"""
- return self.__class__.column_expression.func_code \
- is not TypeEngine.column_expression.func_code
+ return self.__class__.column_expression.__code__ \
+ is not TypeEngine.column_expression.__code__
def bind_expression(self, bindvalue):
""""Given a bind value (i.e. a :class:`.BindParameter` instance),
@@ -194,8 +194,8 @@ class TypeEngine(AbstractType):
"""
- return self.__class__.bind_expression.func_code \
- is not TypeEngine.bind_expression.func_code
+ return self.__class__.bind_expression.__code__ \
+ is not TypeEngine.bind_expression.__code__
def compare_values(self, x, y):
"""Compare two values for equality."""
@@ -392,12 +392,11 @@ class TypeEngine(AbstractType):
return default.DefaultDialect()
def __str__(self):
- # Py3K
- #return unicode(self.compile())
- # Py2K
- return unicode(self.compile()).\
+ if util.py2k:
+ return unicode(self.compile()).\
encode('ascii', 'backslashreplace')
- # end Py2K
+ else:
+ return str(self.compile())
def __init__(self, *args, **kwargs):
"""Support implementations that were passing arguments"""
@@ -723,8 +722,8 @@ class TypeDecorator(TypeEngine):
"""
- return self.__class__.process_bind_param.func_code \
- is not TypeDecorator.process_bind_param.func_code
+ return self.__class__.process_bind_param.__code__ \
+ is not TypeDecorator.process_bind_param.__code__
def bind_processor(self, dialect):
"""Provide a bound value processing function for the
@@ -769,8 +768,8 @@ class TypeDecorator(TypeEngine):
exception throw.
"""
- return self.__class__.process_result_value.func_code \
- is not TypeDecorator.process_result_value.func_code
+ return self.__class__.process_result_value.__code__ \
+ is not TypeDecorator.process_result_value.__code__
def result_processor(self, dialect, coltype):
"""Provide a result value processing function for the given
@@ -1114,11 +1113,7 @@ class String(Concatenable, TypeEngine):
self.convert_unicode != 'force':
if self._warn_on_bytestring:
def process(value):
- # Py3K
- #if isinstance(value, bytes):
- # Py2K
- if isinstance(value, str):
- # end Py2K
+ if isinstance(value, util.binary_type):
util.warn("Unicode type received non-unicode bind "
"param value.")
return value
@@ -1130,7 +1125,7 @@ class String(Concatenable, TypeEngine):
warn_on_bytestring = self._warn_on_bytestring
def process(value):
- if isinstance(value, unicode):
+ if isinstance(value, util.text_type):
return encoder(value, self.unicode_error)[0]
elif warn_on_bytestring and value is not None:
util.warn("Unicode type received non-unicode bind "
@@ -1156,7 +1151,7 @@ class String(Concatenable, TypeEngine):
# habits. since we will be getting back unicode
# in most cases, we check for it (decode will fail).
def process(value):
- if isinstance(value, unicode):
+ if isinstance(value, util.text_type):
return value
else:
return to_unicode(value)
@@ -1171,7 +1166,7 @@ class String(Concatenable, TypeEngine):
@property
def python_type(self):
if self.convert_unicode:
- return unicode
+ return util.text_type
else:
return str
@@ -1318,12 +1313,10 @@ class Integer(_DateAffinity, TypeEngine):
Integer: self.__class__,
Numeric: Numeric,
},
- # Py2K
operators.div: {
Integer: self.__class__,
Numeric: Numeric,
},
- # end Py2K
operators.truediv: {
Integer: self.__class__,
Numeric: Numeric,
@@ -1488,12 +1481,10 @@ class Numeric(_DateAffinity, TypeEngine):
Numeric: self.__class__,
Integer: self.__class__,
},
- # Py2K
operators.div: {
Numeric: self.__class__,
Integer: self.__class__,
},
- # end Py2K
operators.truediv: {
Numeric: self.__class__,
Integer: self.__class__,
@@ -1558,11 +1549,9 @@ class Float(Numeric):
Interval: Interval,
Numeric: self.__class__,
},
- # Py2K
operators.div: {
Numeric: self.__class__,
},
- # end Py2K
operators.truediv: {
Numeric: self.__class__,
},
@@ -1693,11 +1682,7 @@ class _Binary(TypeEngine):
@property
def python_type(self):
- # Py3K
- #return bytes
- # Py2K
- return str
- # end Py2K
+ return util.binary_type
# Python 3 - sqlite3 doesn't need the `Binary` conversion
# here, though pg8000 does to indicate "bytea"
@@ -1715,32 +1700,31 @@ class _Binary(TypeEngine):
# Python 3 has native bytes() type
# both sqlite3 and pg8000 seem to return it,
# psycopg2 as of 2.5 returns 'memoryview'
- # Py3K
- #def result_processor(self, dialect, coltype):
- # def process(value):
- # if value is not None:
- # value = bytes(value)
- # return value
- # return process
- # Py2K
- def result_processor(self, dialect, coltype):
- if util.jython:
+ if util.py2k:
+ def result_processor(self, dialect, coltype):
+ if util.jython:
+ def process(value):
+ if value is not None:
+ if isinstance(value, array.array):
+ return value.tostring()
+ return str(value)
+ else:
+ return None
+ else:
+ process = processors.to_str
+ return process
+ else:
+ def result_processor(self, dialect, coltype):
def process(value):
if value is not None:
- if isinstance(value, array.array):
- return value.tostring()
- return str(value)
- else:
- return None
- else:
- process = processors.to_str
- return process
- # end Py2K
+ value = bytes(value)
+ return value
+ return process
def coerce_compared_value(self, op, value):
"""See :meth:`.TypeEngine.coerce_compared_value` for a description."""
- if isinstance(value, basestring):
+ if isinstance(value, util.string_types):
return self
else:
return super(_Binary, self).coerce_compared_value(op, value)
@@ -1997,7 +1981,7 @@ class Enum(String, SchemaType):
convert_unicode = kw.pop('convert_unicode', None)
if convert_unicode is None:
for e in enums:
- if isinstance(e, unicode):
+ if isinstance(e, util.text_type):
convert_unicode = True
break
else:
@@ -2296,11 +2280,9 @@ class Interval(_DateAffinity, TypeDecorator):
operators.truediv: {
Numeric: self.__class__
},
- # Py2K
operators.div: {
Numeric: self.__class__
}
- # end Py2K
}
@property
@@ -2450,12 +2432,6 @@ BOOLEANTYPE = Boolean()
STRINGTYPE = String()
_type_map = {
- str: String(),
- # Py3K
- #bytes: LargeBinary(),
- # Py2K
- unicode: Unicode(),
- # end Py2K
int: Integer(),
float: Numeric(),
bool: BOOLEANTYPE,
@@ -2466,3 +2442,12 @@ _type_map = {
dt.timedelta: Interval(),
NoneType: NULLTYPE
}
+
+if util.py3k:
+ _type_map[bytes] = LargeBinary()
+ _type_map[str] = Unicode()
+else:
+ _type_map[unicode] = Unicode()
+ _type_map[str] = String()
+
+
diff --git a/lib/sqlalchemy/util/__init__.py b/lib/sqlalchemy/util/__init__.py
index 3fa06c793..687abb39a 100644
--- a/lib/sqlalchemy/util/__init__.py
+++ b/lib/sqlalchemy/util/__init__.py
@@ -5,9 +5,12 @@
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from .compat import callable, cmp, reduce, \
- threading, py3k, py3k_warning, jython, pypy, cpython, win32, set_types, \
+ threading, py3k, py2k, jython, pypy, cpython, win32, \
pickle, dottedgetter, parse_qsl, namedtuple, next, WeakSet, reraise, \
- raise_from_cause
+ raise_from_cause, text_type, string_types, int_types, binary_type, \
+ quote_plus, with_metaclass, print_, itertools_filterfalse, u, ue, b,\
+ unquote_plus, b64decode, b64encode, byte_buffer, itertools_filter,\
+ StringIO
from ._collections import KeyedTuple, ImmutableContainer, immutabledict, \
Properties, OrderedProperties, ImmutableProperties, OrderedDict, \
diff --git a/lib/sqlalchemy/util/_collections.py b/lib/sqlalchemy/util/_collections.py
index 8e61275e7..86a90828a 100644
--- a/lib/sqlalchemy/util/_collections.py
+++ b/lib/sqlalchemy/util/_collections.py
@@ -9,7 +9,8 @@
import itertools
import weakref
import operator
-from .compat import threading
+from .compat import threading, itertools_filterfalse
+from . import py2k
EMPTY_SET = frozenset()
@@ -142,7 +143,7 @@ class Properties(object):
return len(self._data)
def __iter__(self):
- return self._data.itervalues()
+ return iter(list(self._data.values()))
def __add__(self, other):
return list(self) + list(other)
@@ -189,13 +190,13 @@ class Properties(object):
return default
def keys(self):
- return self._data.keys()
+ return list(self._data)
def values(self):
- return self._data.values()
+ return list(self._data.values())
def items(self):
- return self._data.items()
+ return list(self._data.items())
def has_key(self, key):
return key in self._data
@@ -260,23 +261,55 @@ class OrderedDict(dict):
def __iter__(self):
return iter(self._list)
- def values(self):
- return [self[key] for key in self._list]
- def itervalues(self):
- return iter([self[key] for key in self._list])
+ if py2k:
+ def values(self):
+ return [self[key] for key in self._list]
- def keys(self):
- return list(self._list)
+ def keys(self):
+ return self._list
- def iterkeys(self):
- return iter(self.keys())
+ def itervalues(self):
+ return iter([self[key] for key in self._list])
- def items(self):
- return [(key, self[key]) for key in self.keys()]
+ def iterkeys(self):
+ return iter(self)
+
+ def iteritems(self):
+ return iter(self.items())
+
+ def items(self):
+ return [(key, self[key]) for key in self._list]
+ else:
+ def values(self):
+ #return (self[key] for key in self)
+ return (self[key] for key in self._list)
+
+ def keys(self):
+ #return iter(self)
+ return iter(self._list)
+
+ def items(self):
+ #return ((key, self[key]) for key in self)
+ return ((key, self[key]) for key in self._list)
+
+ _debug_iter = False
+ if _debug_iter:
+ # normally disabled to reduce function call
+ # overhead
+ def __iter__(self):
+ len_ = len(self._list)
+ for item in self._list:
+ yield item
+ assert len_ == len(self._list), \
+ "Dictionary changed size during iteration"
+ def values(self):
+ return (self[key] for key in self)
+ def keys(self):
+ return iter(self)
+ def items(self):
+ return ((key, self[key]) for key in self)
- def iteritems(self):
- return iter(self.items())
def __setitem__(self, key, object):
if key not in self:
@@ -470,8 +503,8 @@ class IdentitySet(object):
if len(self) > len(other):
return False
- for m in itertools.ifilterfalse(other._members.__contains__,
- self._members.iterkeys()):
+ for m in itertools_filterfalse(other._members.__contains__,
+ iter(self._members.keys())):
return False
return True
@@ -491,8 +524,8 @@ class IdentitySet(object):
if len(self) < len(other):
return False
- for m in itertools.ifilterfalse(self._members.__contains__,
- other._members.iterkeys()):
+ for m in itertools_filterfalse(self._members.__contains__,
+ iter(other._members.keys())):
return False
return True
@@ -582,7 +615,7 @@ class IdentitySet(object):
return result
def _member_id_tuples(self):
- return ((id(v), v) for v in self._members.itervalues())
+ return ((id(v), v) for v in self._members.values())
def __xor__(self, other):
if not isinstance(other, IdentitySet):
@@ -599,7 +632,7 @@ class IdentitySet(object):
return self
def copy(self):
- return type(self)(self._members.itervalues())
+ return type(self)(iter(self._members.values()))
__copy__ = copy
@@ -607,13 +640,13 @@ class IdentitySet(object):
return len(self._members)
def __iter__(self):
- return self._members.itervalues()
+ return iter(self._members.values())
def __hash__(self):
raise TypeError('set objects are unhashable')
def __repr__(self):
- return '%s(%r)' % (type(self).__name__, self._members.values())
+ return '%s(%r)' % (type(self).__name__, list(self._members.values()))
class WeakSequence(object):
@@ -623,7 +656,7 @@ class WeakSequence(object):
)
def __iter__(self):
- return self._storage.itervalues()
+ return iter(self._storage.values())
def __getitem__(self, index):
try:
@@ -754,7 +787,7 @@ def flatten_iterator(x):
"""
for elem in x:
- if not isinstance(elem, basestring) and hasattr(elem, '__iter__'):
+ if not isinstance(elem, str) and hasattr(elem, '__iter__'):
for y in flatten_iterator(elem):
yield y
else:
diff --git a/lib/sqlalchemy/util/compat.py b/lib/sqlalchemy/util/compat.py
index 033a87cc7..fea22873c 100644
--- a/lib/sqlalchemy/util/compat.py
+++ b/lib/sqlalchemy/util/compat.py
@@ -14,39 +14,17 @@ except ImportError:
import dummy_threading as threading
py32 = sys.version_info >= (3, 2)
-py3k_warning = getattr(sys, 'py3kwarning', False) or sys.version_info >= (3, 0)
py3k = sys.version_info >= (3, 0)
+py2k = sys.version_info < (3, 0)
jython = sys.platform.startswith('java')
pypy = hasattr(sys, 'pypy_version_info')
win32 = sys.platform.startswith('win')
cpython = not pypy and not jython # TODO: something better for this ?
-if py3k_warning:
- set_types = set
-elif sys.version_info < (2, 6):
- import sets
- set_types = set, sets.Set
-else:
- # 2.6 deprecates sets.Set, but we still need to be able to detect them
- # in user code and as return values from DB-APIs
- ignore = ('ignore', None, DeprecationWarning, None, 0)
- import warnings
- try:
- warnings.filters.insert(0, ignore)
- except Exception:
- import sets
- else:
- import sets
- warnings.filters.remove(ignore)
- set_types = set, sets.Set
+next = next
-if sys.version_info < (2, 6):
- def next(iter):
- return iter.next()
-else:
- next = next
-if py3k_warning:
+if py3k:
import pickle
else:
try:
@@ -54,44 +32,114 @@ else:
except ImportError:
import pickle
-if sys.version_info < (2, 6):
- # emits a nasty deprecation warning
- # in newer pythons
- from cgi import parse_qsl
-else:
- from urlparse import parse_qsl
+if py3k:
+ import builtins
+
+ from inspect import getfullargspec as inspect_getfullargspec
+ from urllib.parse import quote_plus, unquote_plus, parse_qsl
+ import configparser
+ from io import StringIO
+
+ from io import BytesIO as byte_buffer
+
+
+ string_types = str,
+ binary_type = bytes
+ text_type = str
+ int_types = int,
+ iterbytes = iter
+
+ def u(s):
+ return s
-# Py3K
-#from inspect import getfullargspec as inspect_getfullargspec
-# Py2K
-from inspect import getargspec as inspect_getfullargspec
-# end Py2K
+ def ue(s):
+ return s
-if py3k_warning:
- # they're bringing it back in 3.2. brilliant !
- def callable(fn):
- return hasattr(fn, '__call__')
+ def b(s):
+ return s.encode("latin-1")
+
+ if py32:
+ callable = callable
+ else:
+ def callable(fn):
+ return hasattr(fn, '__call__')
def cmp(a, b):
return (a > b) - (a < b)
from functools import reduce
+
+ print_ = getattr(builtins, "print")
+
+ import_ = getattr(builtins, '__import__')
+
+ import itertools
+ itertools_filterfalse = itertools.filterfalse
+ itertools_filter = filter
+ itertools_imap = map
+
+ import base64
+ def b64encode(x):
+ return base64.b64encode(x).decode('ascii')
+ def b64decode(x):
+ return base64.b64decode(x.encode('ascii'))
+
else:
+ from inspect import getargspec as inspect_getfullargspec
+ from urllib import quote_plus, unquote_plus
+ from urlparse import parse_qsl
+ import ConfigParser as configparser
+ from StringIO import StringIO
+ from cStringIO import StringIO as byte_buffer
+
+ string_types = basestring,
+ binary_type = str
+ text_type = unicode
+ int_types = int, long
+ def iterbytes(buf):
+ return (ord(byte) for byte in buf)
+
+ def u(s):
+ # this differs from what six does, which doesn't support non-ASCII
+ # strings - we only use u() with
+ # literal source strings, and all our source files with non-ascii
+ # in them (all are tests) are utf-8 encoded.
+ return unicode(s, "utf-8")
+
+ def ue(s):
+ return unicode(s, "unicode_escape")
+
+ def b(s):
+ return s
+
+ def import_(*args):
+ if len(args) == 4:
+ args = args[0:3] + ([str(arg) for arg in args[3]],)
+ return __import__(*args)
+
callable = callable
cmp = cmp
reduce = reduce
-try:
- from collections import namedtuple
-except ImportError:
- def namedtuple(typename, fieldnames):
- def __new__(cls, *values):
- tup = tuple.__new__(cls, values)
- for i, fname in enumerate(fieldnames):
- setattr(tup, fname, tup[i])
- return tup
- tuptype = type(typename, (tuple, ), {'__new__': __new__})
- return tuptype
+ import base64
+ b64encode = base64.b64encode
+ b64decode = base64.b64decode
+
+ def print_(*args, **kwargs):
+ fp = kwargs.pop("file", sys.stdout)
+ if fp is None:
+ return
+ for arg in enumerate(args):
+ if not isinstance(arg, basestring):
+ arg = str(arg)
+ fp.write(arg)
+
+ import itertools
+ itertools_filterfalse = itertools.ifilterfalse
+ itertools_filter = itertools.ifilter
+ itertools_imap = itertools.imap
+
+
try:
from weakref import WeakSet
@@ -121,24 +169,8 @@ if win32 or jython:
else:
time_func = time.time
-
-if sys.version_info >= (2, 6):
- from operator import attrgetter as dottedgetter
-else:
- def dottedgetter(attr):
- def g(obj):
- for name in attr.split("."):
- obj = getattr(obj, name)
- return obj
- return g
-
-# Adapted from six.py
-if py3k:
- def b(s):
- return s.encode("latin-1")
-else:
- def b(s):
- return s
+from collections import namedtuple
+from operator import attrgetter as dottedgetter
if py3k:
@@ -149,19 +181,49 @@ if py3k:
raise value.with_traceback(tb)
raise value
- def raise_from_cause(exception, exc_info):
+ def raise_from_cause(exception, exc_info=None):
+ if exc_info is None:
+ exc_info = sys.exc_info()
exc_type, exc_value, exc_tb = exc_info
reraise(type(exception), exception, tb=exc_tb, cause=exc_value)
else:
exec("def reraise(tp, value, tb=None, cause=None):\n"
" raise tp, value, tb\n")
- def raise_from_cause(exception, exc_info):
+ def raise_from_cause(exception, exc_info=None):
# not as nice as that of Py3K, but at least preserves
# the code line where the issue occurred
+ if exc_info is None:
+ exc_info = sys.exc_info()
exc_type, exc_value, exc_tb = exc_info
reraise(type(exception), exception, tb=exc_tb)
+if py3k:
+ exec_ = getattr(builtins, 'exec')
+else:
+ def exec_(func_text, globals_, lcl=None):
+ if lcl is None:
+ exec('exec func_text in globals_')
+ else:
+ exec('exec func_text in globals_, lcl')
+
+
+def with_metaclass(meta, *bases):
+ """Create a base class with a metaclass.
+
+ Drops the middle class upon creation.
+
+ Source: http://lucumr.pocoo.org/2013/5/21/porting-to-python-3-redux/
+
+ """
+ class metaclass(meta):
+ __call__ = type.__call__
+ __init__ = type.__init__
+ def __new__(cls, name, this_bases, d):
+ if this_bases is None:
+ return type.__new__(cls, name, (), d)
+ return meta(name, bases, d)
+ return metaclass('temporary_class', None, {})
diff --git a/lib/sqlalchemy/util/deprecations.py b/lib/sqlalchemy/util/deprecations.py
index 34be4fbf8..e0dc168db 100644
--- a/lib/sqlalchemy/util/deprecations.py
+++ b/lib/sqlalchemy/util/deprecations.py
@@ -10,7 +10,7 @@ functionality."""
from .. import exc
import warnings
import re
-from langhelpers import decorator
+from .langhelpers import decorator
def warn_deprecated(msg, stacklevel=3):
diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py
index d82aefdea..b0ff5b073 100644
--- a/lib/sqlalchemy/util/langhelpers.py
+++ b/lib/sqlalchemy/util/langhelpers.py
@@ -15,16 +15,14 @@ import re
import sys
import types
import warnings
-from .compat import set_types, threading, \
- callable, inspect_getfullargspec
from functools import update_wrapper
from .. import exc
import hashlib
from . import compat
def md5_hex(x):
- # Py3K
- #x = x.encode('utf-8')
+ if compat.py3k:
+ x = x.encode('utf-8')
m = hashlib.md5()
m.update(x)
return m.hexdigest()
@@ -77,8 +75,8 @@ def _unique_symbols(used, *bases):
used = set(used)
for base in bases:
pool = itertools.chain((base,),
- itertools.imap(lambda i: base + str(i),
- xrange(1000)))
+ compat.itertools_imap(lambda i: base + str(i),
+ range(1000)))
for sym in pool:
if sym not in used:
used.add(sym)
@@ -94,8 +92,8 @@ def decorator(target):
def decorate(fn):
if not inspect.isfunction(fn):
raise Exception("not a decoratable function")
- spec = inspect_getfullargspec(fn)
- names = tuple(spec[0]) + spec[1:3] + (fn.func_name,)
+ spec = compat.inspect_getfullargspec(fn)
+ names = tuple(spec[0]) + spec[1:3] + (fn.__name__,)
targ_name, fn_name = _unique_symbols(names, 'target', 'fn')
metadata = dict(target=targ_name, fn=fn_name)
@@ -104,7 +102,7 @@ def decorator(target):
code = 'lambda %(args)s: %(target)s(%(fn)s, %(apply_kw)s)' % (
metadata)
decorated = eval(code, {targ_name: target, fn_name: fn})
- decorated.func_defaults = getattr(fn, 'im_func', fn).func_defaults
+ decorated.__defaults__ = getattr(fn, 'im_func', fn).__defaults__
return update_wrapper(decorated, fn)
return update_wrapper(decorate, target)
@@ -143,7 +141,7 @@ class PluginLoader(object):
def register(self, name, modulepath, objname):
def load():
- mod = __import__(modulepath)
+ mod = compat.import_(modulepath)
for token in modulepath.split(".")[1:]:
mod = getattr(mod, token)
return getattr(mod, objname)
@@ -169,7 +167,7 @@ def get_cls_kwargs(cls, _set=None):
ctr = cls.__dict__.get('__init__', False)
has_init = ctr and isinstance(ctr, types.FunctionType) and \
- isinstance(ctr.func_code, types.CodeType)
+ isinstance(ctr.__code__, types.CodeType)
if has_init:
names, has_kw = inspect_func_args(ctr)
@@ -192,7 +190,7 @@ try:
from inspect import CO_VARKEYWORDS
def inspect_func_args(fn):
- co = fn.func_code
+ co = fn.__code__
nargs = co.co_argcount
names = co.co_varnames
args = list(names[:nargs])
@@ -248,8 +246,8 @@ def format_argspec_plus(fn, grouped=True):
'apply_pos': '(self, a, b, c, **d)'}
"""
- if callable(fn):
- spec = inspect_getfullargspec(fn)
+ if compat.callable(fn):
+ spec = compat.inspect_getfullargspec(fn)
else:
# we accept an existing argspec...
spec = fn
@@ -261,22 +259,21 @@ def format_argspec_plus(fn, grouped=True):
else:
self_arg = None
- # Py3K
- #apply_pos = inspect.formatargspec(spec[0], spec[1],
- # spec[2], None, spec[4])
- #num_defaults = 0
- #if spec[3]:
- # num_defaults += len(spec[3])
- #if spec[4]:
- # num_defaults += len(spec[4])
- #name_args = spec[0] + spec[4]
- # Py2K
- apply_pos = inspect.formatargspec(spec[0], spec[1], spec[2])
- num_defaults = 0
- if spec[3]:
- num_defaults += len(spec[3])
- name_args = spec[0]
- # end Py2K
+ if compat.py3k:
+ apply_pos = inspect.formatargspec(spec[0], spec[1],
+ spec[2], None, spec[4])
+ num_defaults = 0
+ if spec[3]:
+ num_defaults += len(spec[3])
+ if spec[4]:
+ num_defaults += len(spec[4])
+ name_args = spec[0] + spec[4]
+ else:
+ apply_pos = inspect.formatargspec(spec[0], spec[1], spec[2])
+ num_defaults = 0
+ if spec[3]:
+ num_defaults += len(spec[3])
+ name_args = spec[0]
if num_defaults:
defaulted_vals = name_args[0 - num_defaults:]
@@ -339,8 +336,8 @@ def unbound_method_to_callable(func_or_cls):
"""
- if isinstance(func_or_cls, types.MethodType) and not func_or_cls.im_self:
- return func_or_cls.im_func
+ if isinstance(func_or_cls, types.MethodType) and not func_or_cls.__self__:
+ return func_or_cls.__func__
else:
return func_or_cls
@@ -397,7 +394,7 @@ class portable_instancemethod(object):
"""
def __init__(self, meth):
- self.target = meth.im_self
+ self.target = meth.__self__
self.name = meth.__name__
def __call__(self, *arg, **kw):
@@ -417,32 +414,33 @@ def class_hierarchy(cls):
will not be descended.
"""
- # Py2K
- if isinstance(cls, types.ClassType):
- return list()
- # end Py2K
+ if compat.py2k:
+ if isinstance(cls, types.ClassType):
+ return list()
+
hier = set([cls])
process = list(cls.__mro__)
while process:
c = process.pop()
- # Py2K
- if isinstance(c, types.ClassType):
- continue
- for b in (_ for _ in c.__bases__
- if _ not in hier and not isinstance(_, types.ClassType)):
- # end Py2K
- # Py3K
- #for b in (_ for _ in c.__bases__
- # if _ not in hier):
+ if compat.py2k:
+ if isinstance(c, types.ClassType):
+ continue
+ bases = (_ for _ in c.__bases__
+ if _ not in hier and not isinstance(_, types.ClassType))
+ else:
+ bases = (_ for _ in c.__bases__ if _ not in hier)
+
+ for b in bases:
process.append(b)
hier.add(b)
- # Py3K
- #if c.__module__ == 'builtins' or not hasattr(c, '__subclasses__'):
- # continue
- # Py2K
- if c.__module__ == '__builtin__' or not hasattr(c, '__subclasses__'):
- continue
- # end Py2K
+
+ if compat.py3k:
+ if c.__module__ == 'builtins' or not hasattr(c, '__subclasses__'):
+ continue
+ else:
+ if c.__module__ == '__builtin__' or not hasattr(c, '__subclasses__'):
+ continue
+
for s in [_ for _ in c.__subclasses__() if _ not in hier]:
process.append(s)
hier.add(s)
@@ -499,9 +497,9 @@ def monkeypatch_proxied_specials(into_cls, from_cls, skip=None, only=None,
"return %(name)s.%(method)s%(d_args)s" % locals())
env = from_instance is not None and {name: from_instance} or {}
- exec py in env
+ compat.exec_(py, env)
try:
- env[method].func_defaults = fn.func_defaults
+ env[method].__defaults__ = fn.__defaults__
except AttributeError:
pass
setattr(into_cls, method, env[method])
@@ -510,11 +508,7 @@ def monkeypatch_proxied_specials(into_cls, from_cls, skip=None, only=None,
def methods_equivalent(meth1, meth2):
"""Return True if the two methods are the same implementation."""
- # Py3K
- #return getattr(meth1, '__func__', meth1) is getattr(meth2, '__func__', meth2)
- # Py2K
- return getattr(meth1, 'im_func', meth1) is getattr(meth2, 'im_func', meth2)
- # end Py2K
+ return getattr(meth1, '__func__', meth1) is getattr(meth2, '__func__', meth2)
def as_interface(obj, cls=None, methods=None, required=None):
@@ -587,7 +581,7 @@ def as_interface(obj, cls=None, methods=None, required=None):
for method, impl in dictlike_iteritems(obj):
if method not in interface:
raise TypeError("%r: unknown in this interface" % method)
- if not callable(impl):
+ if not compat.callable(impl):
raise TypeError("%r=%r is not callable" % (method, impl))
setattr(AnonymousInterface, method, staticmethod(impl))
found.add(method)
@@ -728,11 +722,11 @@ class importlater(object):
def _resolve(self):
importlater._unresolved.discard(self)
if self._il_addtl:
- self._initial_import = __import__(
+ self._initial_import = compat.import_(
self._il_path, globals(), locals(),
[self._il_addtl])
else:
- self._initial_import = __import__(self._il_path)
+ self._initial_import = compat.import_(self._il_path)
def __getattr__(self, key):
if key == 'module':
@@ -751,7 +745,7 @@ class importlater(object):
# from paste.deploy.converters
def asbool(obj):
- if isinstance(obj, (str, unicode)):
+ if isinstance(obj, compat.string_types):
obj = obj.strip().lower()
if obj in ['true', 'yes', 'on', 'y', 't', '1']:
return True
@@ -811,14 +805,14 @@ def constructor_copy(obj, cls, **kw):
def counter():
"""Return a threadsafe counter function."""
- lock = threading.Lock()
- counter = itertools.count(1L)
+ lock = compat.threading.Lock()
+ counter = itertools.count(1)
# avoid the 2to3 "next" transformation...
def _next():
lock.acquire()
try:
- return counter.next()
+ return next(counter)
finally:
lock.release()
@@ -834,7 +828,7 @@ def duck_type_collection(specimen, default=None):
if hasattr(specimen, '__emulates__'):
# canonicalize set vs sets.Set to a standard: the builtin set
if (specimen.__emulates__ is not None and
- issubclass(specimen.__emulates__, set_types)):
+ issubclass(specimen.__emulates__, set)):
return set
else:
return specimen.__emulates__
@@ -842,7 +836,7 @@ def duck_type_collection(specimen, default=None):
isa = isinstance(specimen, type) and issubclass or isinstance
if isa(specimen, list):
return list
- elif isa(specimen, set_types):
+ elif isa(specimen, set):
return set
elif isa(specimen, dict):
return dict
@@ -874,15 +868,14 @@ def assert_arg_type(arg, argtype, name):
def dictlike_iteritems(dictlike):
"""Return a (key, value) iterator for almost any dict-like object."""
- # Py3K
- #if hasattr(dictlike, 'items'):
- # return dictlike.items()
- # Py2K
- if hasattr(dictlike, 'iteritems'):
- return dictlike.iteritems()
- elif hasattr(dictlike, 'items'):
- return iter(dictlike.items())
- # end Py2K
+ if compat.py3k:
+ if hasattr(dictlike, 'items'):
+ return list(dictlike.items())
+ else:
+ if hasattr(dictlike, 'iteritems'):
+ return dictlike.iteritems()
+ elif hasattr(dictlike, 'items'):
+ return iter(dictlike.items())
getter = getattr(dictlike, '__getitem__', getattr(dictlike, 'get', None))
if getter is None:
@@ -891,7 +884,7 @@ def dictlike_iteritems(dictlike):
if hasattr(dictlike, 'iterkeys'):
def iterator():
- for key in dictlike.iterkeys():
+ for key in dictlike.keys():
yield key, getter(key)
return iterator()
elif hasattr(dictlike, 'keys'):
@@ -935,7 +928,7 @@ class hybridmethod(object):
class _symbol(int):
def __new__(self, name, doc=None, canonical=None):
"""Construct a new named symbol."""
- assert isinstance(name, str)
+ assert isinstance(name, compat.string_types)
if canonical is None:
canonical = hash(name)
v = int.__new__(_symbol, canonical)
@@ -978,7 +971,7 @@ class symbol(object):
"""
symbols = {}
- _lock = threading.Lock()
+ _lock = compat.threading.Lock()
def __new__(cls, name, doc=None, canonical=None):
cls._lock.acquire()
@@ -1032,7 +1025,7 @@ def warn(msg, stacklevel=3):
be controlled.
"""
- if isinstance(msg, basestring):
+ if isinstance(msg, compat.string_types):
warnings.warn(msg, exc.SAWarning, stacklevel=stacklevel)
else:
warnings.warn(msg, stacklevel=stacklevel)
diff --git a/sa2to3.py b/sa2to3.py
deleted file mode 100644
index 8c09540a1..000000000
--- a/sa2to3.py
+++ /dev/null
@@ -1,72 +0,0 @@
-"""SQLAlchemy 2to3 tool.
-
-This tool monkeypatches a preprocessor onto
-lib2to3.refactor.RefactoringTool, so that conditional
-sections can replace non-fixable Python 2 code sections
-for the appropriate Python 3 version before 2to3 is run.
-
-"""
-
-from lib2to3 import main, refactor
-
-import re
-
-py3k_pattern = re.compile(r'\s*# Py3K')
-comment_pattern = re.compile(r'(\s*)#(?! ?Py2K)(.*)')
-py2k_pattern = re.compile(r'\s*# Py2K')
-end_py2k_pattern = re.compile(r'\s*# end Py2K')
-
-def preprocess(data):
- lines = data.split('\n')
- def consume_normal():
- while lines:
- line = lines.pop(0)
- if py3k_pattern.match(line):
- for line in consume_py3k():
- yield line
- elif py2k_pattern.match(line):
- for line in consume_py2k():
- yield line
- else:
- yield line
-
- def consume_py3k():
- yield "# start Py3K"
- while lines:
- line = lines.pop(0)
- m = comment_pattern.match(line)
- if m:
- yield "%s%s" % m.group(1, 2)
- else:
- # pushback
- lines.insert(0, line)
- break
- yield "# end Py3K"
-
- def consume_py2k():
- yield "# start Py2K"
- while lines:
- line = lines.pop(0)
- if not end_py2k_pattern.match(line):
- yield "#%s" % line
- else:
- break
- yield "# end Py2K"
-
- return "\n".join(consume_normal())
-
-old_refactor_string = refactor.RefactoringTool.refactor_string
-
-def refactor_string(self, data, name):
- newdata = preprocess(data)
- tree = old_refactor_string(self, newdata, name)
- if tree:
- if newdata != data:
- tree.was_changed = True
- return tree
-
-if __name__ == '__main__':
- refactor.RefactoringTool.refactor_string = refactor_string
- main.main("lib2to3.fixes")
-
-
diff --git a/setup.cfg b/setup.cfg
index 92bdbc40f..61c868a16 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -32,6 +32,8 @@ pg8000=postgresql+pg8000://scott:tiger@127.0.0.1:5432/test
postgresql_jython=postgresql+zxjdbc://scott:tiger@127.0.0.1:5432/test
mysql_jython=mysql+zxjdbc://scott:tiger@127.0.0.1:5432/test
mysql=mysql://scott:tiger@127.0.0.1:3306/test
+mssql=mssql+pyodbc://scott:tiger@ms_2005
+oursql=mysql+oursql://scott:tiger@127.0.0.1:3306/test
pymysql=mysql+pymysql://scott:tiger@127.0.0.1:3306/test?use_unicode=0&charset=utf8
oracle=oracle://scott:tiger@127.0.0.1:1521
oracle8=oracle://scott:tiger@127.0.0.1:1521/?use_ansi=0
diff --git a/setup.py b/setup.py
index 2950a12d4..97212b55e 100644
--- a/setup.py
+++ b/setup.py
@@ -17,26 +17,16 @@ except ImportError:
has_setuptools = False
from distutils.core import setup, Extension
Feature = None
- try: # Python 3
- from distutils.command.build_py import build_py_2to3 as build_py
- except ImportError: # Python 2
- from distutils.command.build_py import build_py
cmdclass = {}
pypy = hasattr(sys, 'pypy_version_info')
jython = sys.platform.startswith('java')
py3k = False
extra = {}
-if sys.version_info < (2, 4):
- raise Exception("SQLAlchemy requires Python 2.4 or higher.")
+if sys.version_info < (2, 6):
+ raise Exception("SQLAlchemy requires Python 2.6 or higher.")
elif sys.version_info >= (3, 0):
py3k = True
- if has_setuptools:
- extra.update(
- use_2to3=True,
- )
- else:
- cmdclass['build_py'] = build_py
ext_modules = [
Extension('sqlalchemy.cprocessors',
@@ -48,7 +38,7 @@ ext_modules = [
]
ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError)
-if sys.platform == 'win32' and sys.version_info > (2, 6):
+if sys.platform == 'win32':
# 2.6's distutils.msvc9compiler can raise an IOError when failing to
# find the compiler
ext_errors += (IOError,)
@@ -90,8 +80,7 @@ def find_packages(location):
packages = []
for pkg in ['sqlalchemy']:
for _dir, subdirectories, files in (
- os.walk(os.path.join(location, pkg))
- ):
+ os.walk(os.path.join(location, pkg))):
if '__init__.py' in files:
tokens = _dir.split(os.sep)[len(location.split(os.sep)):]
packages.append(".".join(tokens))
@@ -121,20 +110,20 @@ def run_setup(with_cext):
kwargs['ext_modules'] = ext_modules
setup(name="SQLAlchemy",
- version=VERSION,
- description="Database Abstraction Library",
- author="Mike Bayer",
- author_email="mike_mp@zzzcomputing.com",
- url="http://www.sqlalchemy.org",
- packages=find_packages('lib'),
- package_dir={'': 'lib'},
- license="MIT License",
- cmdclass=cmdclass,
-
- tests_require=['nose >= 0.11'],
- test_suite="sqla_nose",
- long_description=readme,
- classifiers=[
+ version=VERSION,
+ description="Database Abstraction Library",
+ author="Mike Bayer",
+ author_email="mike_mp@zzzcomputing.com",
+ url="http://www.sqlalchemy.org",
+ packages=find_packages('lib'),
+ package_dir={'': 'lib'},
+ license="MIT License",
+ cmdclass=cmdclass,
+
+ tests_require=['nose >= 0.11'],
+ test_suite="sqla_nose",
+ long_description=readme,
+ classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
@@ -149,28 +138,8 @@ def run_setup(with_cext):
**kwargs
)
-def monkeypatch2to3():
- from sa2to3 import refactor_string
- from lib2to3.refactor import RefactoringTool
- RefactoringTool.old_refactor_string = RefactoringTool.refactor_string
- RefactoringTool.refactor_string = refactor_string
-
-def unmonkeypatch2to3():
- from lib2to3.refactor import RefactoringTool
- if hasattr(RefactoringTool, 'old_refactor_string'):
- RefactoringTool.refactor_string = RefactoringTool.old_refactor_string
-
if pypy or jython or py3k:
- if py3k:
- # monkeypatch our preprocessor onto the 2to3 tool.
- monkeypatch2to3()
- try:
- run_setup(False)
- finally:
- if py3k:
- # unmonkeypatch to not stomp other setup.py's that are compiled
- # and exec'd and which also require 2to3 fixing
- unmonkeypatch2to3()
+ run_setup(False)
status_msgs(
"WARNING: C extensions are not supported on " +
"this Python platform, speedups are not enabled.",
@@ -179,8 +148,7 @@ if pypy or jython or py3k:
else:
try:
run_setup(True)
- except BuildFailed:
- exc = sys.exc_info()[1] # work around py 2/3 different syntax
+ except BuildFailed as exc:
status_msgs(
exc.cause,
"WARNING: The C extension could not be compiled, " +
diff --git a/test/aaa_profiling/test_compiler.py b/test/aaa_profiling/test_compiler.py
index 1b7798d06..8304296da 100644
--- a/test/aaa_profiling/test_compiler.py
+++ b/test/aaa_profiling/test_compiler.py
@@ -29,7 +29,7 @@ class CompileTest(fixtures.TestBase, AssertsExecutionResults):
for c in t.c:
c.type._type_affinity
from sqlalchemy import types
- for t in types._type_map.values():
+ for t in list(types._type_map.values()):
t._type_affinity
cls.dialect = default.DefaultDialect()
diff --git a/test/aaa_profiling/test_memusage.py b/test/aaa_profiling/test_memusage.py
index 57bddc859..20c6f0a65 100644
--- a/test/aaa_profiling/test_memusage.py
+++ b/test/aaa_profiling/test_memusage.py
@@ -47,7 +47,7 @@ def profile_memory(times=50):
gc_collect()
samples[x] = len(get_objects_skipping_sqlite_issue())
- print "sample gc sizes:", samples
+ print("sample gc sizes:", samples)
assert len(_sessions) == 0
diff --git a/test/aaa_profiling/test_orm.py b/test/aaa_profiling/test_orm.py
index b9eeb8361..199b96e5f 100644
--- a/test/aaa_profiling/test_orm.py
+++ b/test/aaa_profiling/test_orm.py
@@ -144,7 +144,7 @@ class LoadManyToOneFromIdentityTest(fixtures.MappedTest):
child.insert().execute([
{'id':i, 'data':'c%d' % i}
- for i in xrange(1, 251)
+ for i in range(1, 251)
])
parent.insert().execute([
{
@@ -152,7 +152,7 @@ class LoadManyToOneFromIdentityTest(fixtures.MappedTest):
'data':'p%dc%d' % (i, (i % 250) + 1),
'child_id':(i % 250) + 1
}
- for i in xrange(1, 1000)
+ for i in range(1, 1000)
])
def test_many_to_one_load_no_identity(self):
@@ -234,11 +234,11 @@ class MergeBackrefsTest(fixtures.MappedTest):
s = Session()
s.add_all([
A(id=i,
- bs=[B(id=(i * 5) + j) for j in xrange(1, 5)],
+ bs=[B(id=(i * 5) + j) for j in range(1, 5)],
c=C(id=i),
- ds=[D(id=(i * 5) + j) for j in xrange(1, 5)]
+ ds=[D(id=(i * 5) + j) for j in range(1, 5)]
)
- for i in xrange(1, 5)
+ for i in range(1, 5)
])
s.commit()
@@ -249,11 +249,11 @@ class MergeBackrefsTest(fixtures.MappedTest):
s = Session()
for a in [
A(id=i,
- bs=[B(id=(i * 5) + j) for j in xrange(1, 5)],
+ bs=[B(id=(i * 5) + j) for j in range(1, 5)],
c=C(id=i),
- ds=[D(id=(i * 5) + j) for j in xrange(1, 5)]
+ ds=[D(id=(i * 5) + j) for j in range(1, 5)]
)
- for i in xrange(1, 5)
+ for i in range(1, 5)
]:
s.merge(a)
diff --git a/test/aaa_profiling/test_resultset.py b/test/aaa_profiling/test_resultset.py
index 0146d1b08..27e60410d 100644
--- a/test/aaa_profiling/test_resultset.py
+++ b/test/aaa_profiling/test_resultset.py
@@ -2,6 +2,7 @@ from sqlalchemy import *
from sqlalchemy.testing import fixtures, AssertsExecutionResults, profiling
from sqlalchemy import testing
from sqlalchemy.testing import eq_
+from sqlalchemy.util import u
NUM_FIELDS = 10
NUM_RECORDS = 1000
@@ -19,10 +20,10 @@ class ResultSetTest(fixtures.TestBase, AssertsExecutionResults):
def setup(self):
metadata.create_all()
- t.insert().execute([dict(('field%d' % fnum, u'value%d' % fnum)
+ t.insert().execute([dict(('field%d' % fnum, u('value%d' % fnum))
for fnum in range(NUM_FIELDS)) for r_num in
range(NUM_RECORDS)])
- t2.insert().execute([dict(('field%d' % fnum, u'value%d' % fnum)
+ t2.insert().execute([dict(('field%d' % fnum, u('value%d' % fnum))
for fnum in range(NUM_FIELDS)) for r_num in
range(NUM_RECORDS)])
@@ -88,7 +89,7 @@ class RowProxyTest(fixtures.TestBase):
keymap = {}
for index, (keyobjs, processor, values) in \
- enumerate(zip(keys, processors, row)):
+ enumerate(list(zip(keys, processors, row))):
for key in keyobjs:
keymap[key] = (processor, key, index)
keymap[index] = (processor, key, index)
diff --git a/test/aaa_profiling/test_zoomark.py b/test/aaa_profiling/test_zoomark.py
index 5fc9ffc2a..145f3c594 100644
--- a/test/aaa_profiling/test_zoomark.py
+++ b/test/aaa_profiling/test_zoomark.py
@@ -1,3 +1,5 @@
+from __future__ import unicode_literals
+
"""Benchmark for SQLAlchemy.
An adaptation of Robert Brewers' ZooMark speed tests. """
@@ -72,68 +74,68 @@ class ZooMarkTest(fixtures.TestBase):
Zoo = metadata.tables['Zoo']
Animal = metadata.tables['Animal']
engine = metadata.bind
- wap = engine.execute(Zoo.insert(), Name=u'Wild Animal Park',
+ wap = engine.execute(Zoo.insert(), Name='Wild Animal Park',
Founded=datetime.date(2000, 1, 1),
Opens=datetime.time(8, 15, 59),
LastEscape=
datetime.datetime(2004, 7, 29, 5, 6, 7),
Admission=4.95).inserted_primary_key[0]
- sdz = engine.execute(Zoo.insert(), Name=u'San Diego Zoo',
+ sdz = engine.execute(Zoo.insert(), Name='San Diego Zoo',
Founded=datetime.date(1935, 9, 13),
Opens=datetime.time(9, 0, 0),
Admission=0).inserted_primary_key[0]
- engine.execute(Zoo.insert(inline=True), Name=u'Montr\xe9al Biod\xf4me',
+ engine.execute(Zoo.insert(inline=True), Name='Montr\xe9al Biod\xf4me',
Founded=datetime.date(1992, 6, 19),
Opens=datetime.time(9, 0, 0), Admission=11.75)
- seaworld = engine.execute(Zoo.insert(), Name=u'Sea_World',
+ seaworld = engine.execute(Zoo.insert(), Name='Sea_World',
Admission=60).inserted_primary_key[0]
# Let's add a crazy futuristic Zoo to test large date values.
- lp = engine.execute(Zoo.insert(), Name=u'Luna Park',
+ lp = engine.execute(Zoo.insert(), Name='Luna Park',
Founded=datetime.date(2072, 7, 17),
Opens=datetime.time(0, 0, 0),
Admission=134.95).inserted_primary_key[0]
# Animals
- leopardid = engine.execute(Animal.insert(), Species=u'Leopard',
+ leopardid = engine.execute(Animal.insert(), Species='Leopard',
Lifespan=73.5).inserted_primary_key[0]
engine.execute(Animal.update(Animal.c.ID == leopardid), ZooID=wap,
LastEscape=datetime.datetime( 2004, 12, 21, 8, 15, 0, 999907,)
)
- lion = engine.execute(Animal.insert(), Species=u'Lion',
+ lion = engine.execute(Animal.insert(), Species='Lion',
ZooID=wap).inserted_primary_key[0]
- engine.execute(Animal.insert(), Species=u'Slug', Legs=1, Lifespan=.75)
- tiger = engine.execute(Animal.insert(), Species=u'Tiger',
+ engine.execute(Animal.insert(), Species='Slug', Legs=1, Lifespan=.75)
+ tiger = engine.execute(Animal.insert(), Species='Tiger',
ZooID=sdz).inserted_primary_key[0]
# Override Legs.default with itself just to make sure it works.
- engine.execute(Animal.insert(inline=True), Species=u'Bear', Legs=4)
- engine.execute(Animal.insert(inline=True), Species=u'Ostrich', Legs=2,
+ engine.execute(Animal.insert(inline=True), Species='Bear', Legs=4)
+ engine.execute(Animal.insert(inline=True), Species='Ostrich', Legs=2,
Lifespan=103.2)
- engine.execute(Animal.insert(inline=True), Species=u'Centipede',
+ engine.execute(Animal.insert(inline=True), Species='Centipede',
Legs=100)
- emp = engine.execute(Animal.insert(), Species=u'Emperor Penguin',
+ emp = engine.execute(Animal.insert(), Species='Emperor Penguin',
Legs=2, ZooID=seaworld).inserted_primary_key[0]
- adelie = engine.execute(Animal.insert(), Species=u'Adelie Penguin',
+ adelie = engine.execute(Animal.insert(), Species='Adelie Penguin',
Legs=2, ZooID=seaworld).inserted_primary_key[0]
- engine.execute(Animal.insert(inline=True), Species=u'Millipede',
+ engine.execute(Animal.insert(inline=True), Species='Millipede',
Legs=1000000, ZooID=sdz)
# Add a mother and child to test relationships
- bai_yun = engine.execute(Animal.insert(), Species=u'Ape',
- Name=u'Bai Yun', Legs=2).inserted_primary_key[0]
- engine.execute(Animal.insert(inline=True), Species=u'Ape',
- Name=u'Hua Mei', Legs=2, MotherID=bai_yun)
+ bai_yun = engine.execute(Animal.insert(), Species='Ape',
+ Name='Bai Yun', Legs=2).inserted_primary_key[0]
+ engine.execute(Animal.insert(inline=True), Species='Ape',
+ Name='Hua Mei', Legs=2, MotherID=bai_yun)
def test_baseline_2_insert(self):
Animal = metadata.tables['Animal']
i = Animal.insert(inline=True)
- for x in xrange(ITERATIONS):
- tick = i.execute(Species=u'Tick', Name=u'Tick %d' % x,
+ for x in range(ITERATIONS):
+ tick = i.execute(Species='Tick', Name='Tick %d' % x,
Legs=8)
def test_baseline_3_properties(self):
@@ -146,28 +148,28 @@ class ZooMarkTest(fixtures.TestBase):
return list(engine.execute(select).first())
- for x in xrange(ITERATIONS):
+ for x in range(ITERATIONS):
# Zoos
WAP = fullobject(Zoo.select(Zoo.c.Name
- == u'Wild Animal Park'))
+ == 'Wild Animal Park'))
SDZ = fullobject(Zoo.select(Zoo.c.Founded
== datetime.date(1935, 9, 13)))
Biodome = fullobject(Zoo.select(Zoo.c.Name
- == u'Montr\xe9al Biod\xf4me'))
+ == 'Montr\xe9al Biod\xf4me'))
seaworld = fullobject(Zoo.select(Zoo.c.Admission
== float(60)))
# Animals
leopard = fullobject(Animal.select(Animal.c.Species
- == u'Leopard'))
+ == 'Leopard'))
ostrich = fullobject(Animal.select(Animal.c.Species
- == u'Ostrich'))
+ == 'Ostrich'))
millipede = fullobject(Animal.select(Animal.c.Legs
== 1000000))
- ticks = fullobject(Animal.select(Animal.c.Species == u'Tick'
+ ticks = fullobject(Animal.select(Animal.c.Species == 'Tick'
))
def test_baseline_4_expressions(self):
@@ -180,7 +182,7 @@ class ZooMarkTest(fixtures.TestBase):
return [list(row) for row in engine.execute(select).fetchall()]
- for x in xrange(ITERATIONS):
+ for x in range(ITERATIONS):
assert len(fulltable(Zoo.select())) == 5
assert len(fulltable(Animal.select())) == ITERATIONS + 12
assert len(fulltable(Animal.select(Animal.c.Legs == 4))) \
@@ -194,9 +196,9 @@ class ZooMarkTest(fixtures.TestBase):
assert len(fulltable(Animal.select(Animal.c.Lifespan
> 70))) == 2
assert len(fulltable(Animal.select(Animal.c.Species.
- startswith(u'L')))) == 2
+ startswith('L')))) == 2
assert len(fulltable(Animal.select(Animal.c.Species.
- endswith(u'pede')))) == 2
+ endswith('pede')))) == 2
assert len(fulltable(Animal.select(Animal.c.LastEscape
!= None))) == 1
assert len(fulltable(Animal.select(None
@@ -204,10 +206,10 @@ class ZooMarkTest(fixtures.TestBase):
# In operator (containedby)
- assert len(fulltable(Animal.select(Animal.c.Species.like(u'%pede%'
+ assert len(fulltable(Animal.select(Animal.c.Species.like('%pede%'
)))) == 2
- assert len(fulltable(Animal.select(Animal.c.Species.in_([u'Lion'
- , u'Tiger', u'Bear'])))) == 3
+ assert len(fulltable(Animal.select(Animal.c.Species.in_(['Lion'
+ , 'Tiger', 'Bear'])))) == 3
# Try In with cell references
class thing(object):
@@ -215,20 +217,20 @@ class ZooMarkTest(fixtures.TestBase):
pet, pet2 = thing(), thing()
- pet.Name, pet2.Name = u'Slug', u'Ostrich'
+ pet.Name, pet2.Name = 'Slug', 'Ostrich'
assert len(fulltable(Animal.select(Animal.c.Species.in_([pet.Name,
pet2.Name])))) == 2
# logic and other functions
- assert len(fulltable(Animal.select(Animal.c.Species.like(u'Slug'
+ assert len(fulltable(Animal.select(Animal.c.Species.like('Slug'
)))) == 1
- assert len(fulltable(Animal.select(Animal.c.Species.like(u'%pede%'
+ assert len(fulltable(Animal.select(Animal.c.Species.like('%pede%'
)))) == 2
- name = u'Lion'
+ name = 'Lion'
assert len(fulltable(Animal.select(func.length(Animal.c.Species)
== len(name)))) == ITERATIONS + 3
- assert len(fulltable(Animal.select(Animal.c.Species.like(u'%i%'
+ assert len(fulltable(Animal.select(Animal.c.Species.like('%i%'
)))) == ITERATIONS + 7
# Test now(), today(), year(), month(), day()
@@ -250,7 +252,7 @@ class ZooMarkTest(fixtures.TestBase):
Zoo = metadata.tables['Zoo']
engine = metadata.bind
- for x in xrange(ITERATIONS):
+ for x in range(ITERATIONS):
# views
@@ -274,7 +276,7 @@ class ZooMarkTest(fixtures.TestBase):
for species, lifespan in engine.execute(select([Animal.c.Species,
Animal.c.Lifespan])).fetchall():
assert lifespan == expected[species]
- expected = [u'Montr\xe9al Biod\xf4me', 'Wild Animal Park']
+ expected = ['Montr\xe9al Biod\xf4me', 'Wild Animal Park']
e = select([Zoo.c.Name], and_(Zoo.c.Founded != None,
Zoo.c.Founded <= func.current_timestamp(),
Zoo.c.Founded >= datetime.date(1990, 1, 1)))
@@ -290,21 +292,21 @@ class ZooMarkTest(fixtures.TestBase):
def test_baseline_6_editing(self):
Zoo = metadata.tables['Zoo']
engine = metadata.bind
- for x in xrange(ITERATIONS):
+ for x in range(ITERATIONS):
# Edit
- SDZ = engine.execute(Zoo.select(Zoo.c.Name == u'San Diego Zoo'
+ SDZ = engine.execute(Zoo.select(Zoo.c.Name == 'San Diego Zoo'
)).first()
engine.execute(Zoo.update(Zoo.c.ID == SDZ['ID'
- ]), Name=u'The San Diego Zoo',
+ ]), Name='The San Diego Zoo',
Founded=datetime.date(1900, 1, 1),
Opens=datetime.time(7, 30, 0),
Admission='35.00')
# Test edits
- SDZ = engine.execute(Zoo.select(Zoo.c.Name == u'The San Diego Zoo'
+ SDZ = engine.execute(Zoo.select(Zoo.c.Name == 'The San Diego Zoo'
)).first()
assert SDZ['Founded'] == datetime.date(1900, 1, 1), \
SDZ['Founded']
@@ -312,14 +314,14 @@ class ZooMarkTest(fixtures.TestBase):
# Change it back
engine.execute(Zoo.update(Zoo.c.ID == SDZ['ID'
- ]), Name=u'San Diego Zoo',
+ ]), Name='San Diego Zoo',
Founded=datetime.date(1935, 9, 13),
Opens=datetime.time(9, 0, 0),
Admission='0')
# Test re-edits
- SDZ = engine.execute(Zoo.select(Zoo.c.Name == u'San Diego Zoo'
+ SDZ = engine.execute(Zoo.select(Zoo.c.Name == 'San Diego Zoo'
)).first()
assert SDZ['Founded'] == datetime.date(1935, 9, 13)
@@ -333,14 +335,14 @@ class ZooMarkTest(fixtures.TestBase):
return [list(row) for row in engine.execute(select).fetchall()]
- for x in xrange(ITERATIONS):
+ for x in range(ITERATIONS):
za = fulltable(select([Zoo.c.ID] + list(Animal.c),
- Zoo.c.Name == u'San Diego Zoo',
+ Zoo.c.Name == 'San Diego Zoo',
from_obj=[join(Zoo, Animal)]))
- SDZ = Zoo.select(Zoo.c.Name == u'San Diego Zoo')
+ SDZ = Zoo.select(Zoo.c.Name == 'San Diego Zoo')
e = fulltable(select([Zoo.c.ID, Animal.c.ID],
- and_(Zoo.c.Name == u'San Diego Zoo',
- Animal.c.Species == u'Leopard'),
+ and_(Zoo.c.Name == 'San Diego Zoo',
+ Animal.c.Species == 'Leopard'),
from_obj=[join(Zoo, Animal)]))
# Now try the same query with INNER, LEFT, and RIGHT JOINs.
diff --git a/test/aaa_profiling/test_zoomark_orm.py b/test/aaa_profiling/test_zoomark_orm.py
index f8c5477f7..ddcad681a 100644
--- a/test/aaa_profiling/test_zoomark_orm.py
+++ b/test/aaa_profiling/test_zoomark_orm.py
@@ -1,3 +1,5 @@
+from __future__ import unicode_literals
+
"""Benchmark for SQLAlchemy.
An adaptation of Robert Brewers' ZooMark speed tests. """
@@ -75,14 +77,14 @@ class ZooMarkTest(fixtures.TestBase):
class Zoo(object):
def __init__(self, **kwargs):
- for k, v in kwargs.iteritems():
+ for k, v in kwargs.items():
setattr(self, k, v)
class Animal(object):
def __init__(self, **kwargs):
- for k, v in kwargs.iteritems():
+ for k, v in kwargs.items():
setattr(self, k, v)
@@ -90,93 +92,93 @@ class ZooMarkTest(fixtures.TestBase):
mapper(Animal, animal)
def test_baseline_1a_populate(self):
- wap = Zoo(Name=u'Wild Animal Park', Founded=datetime.date(2000,
+ wap = Zoo(Name='Wild Animal Park', Founded=datetime.date(2000,
1, 1), Opens=datetime.time(8, 15, 59),
LastEscape=datetime.datetime( 2004, 7, 29, 5, 6, 7, ),
Admission=4.95)
session.add(wap)
- sdz = Zoo(Name=u'San Diego Zoo', Founded=datetime.date(1835, 9,
+ sdz = Zoo(Name='San Diego Zoo', Founded=datetime.date(1835, 9,
13), Opens=datetime.time(9, 0, 0), Admission=0)
session.add(sdz)
- bio = Zoo(Name=u'Montr\xe9al Biod\xf4me',
+ bio = Zoo(Name='Montr\xe9al Biod\xf4me',
Founded=datetime.date(1992, 6, 19),
Opens=datetime.time(9, 0, 0), Admission=11.75)
session.add(bio)
- seaworld = Zoo(Name=u'Sea_World', Admission=60)
+ seaworld = Zoo(Name='Sea_World', Admission=60)
session.add(seaworld)
# Let's add a crazy futuristic Zoo to test large date values.
- lp = Zoo(Name=u'Luna Park', Founded=datetime.date(2072, 7, 17),
+ lp = Zoo(Name='Luna Park', Founded=datetime.date(2072, 7, 17),
Opens=datetime.time(0, 0, 0), Admission=134.95)
session.add(lp)
session.flush()
# Animals
- leopard = Animal(Species=u'Leopard', Lifespan=73.5)
+ leopard = Animal(Species='Leopard', Lifespan=73.5)
session.add(leopard)
leopard.ZooID = wap.ID
leopard.LastEscape = \
datetime.datetime(2004, 12, 21, 8, 15, 0, 999907, )
- session.add(Animal(Species=u'Lion', ZooID=wap.ID))
- session.add(Animal(Species=u'Slug', Legs=1, Lifespan=.75))
- session.add(Animal(Species=u'Tiger', ZooID=sdz.ID))
+ session.add(Animal(Species='Lion', ZooID=wap.ID))
+ session.add(Animal(Species='Slug', Legs=1, Lifespan=.75))
+ session.add(Animal(Species='Tiger', ZooID=sdz.ID))
# Override Legs.default with itself just to make sure it works.
- session.add(Animal(Species=u'Bear', Legs=4))
- session.add(Animal(Species=u'Ostrich', Legs=2, Lifespan=103.2))
- session.add(Animal(Species=u'Centipede', Legs=100))
- session.add(Animal(Species=u'Emperor Penguin', Legs=2,
+ session.add(Animal(Species='Bear', Legs=4))
+ session.add(Animal(Species='Ostrich', Legs=2, Lifespan=103.2))
+ session.add(Animal(Species='Centipede', Legs=100))
+ session.add(Animal(Species='Emperor Penguin', Legs=2,
ZooID=seaworld.ID))
- session.add(Animal(Species=u'Adelie Penguin', Legs=2,
+ session.add(Animal(Species='Adelie Penguin', Legs=2,
ZooID=seaworld.ID))
- session.add(Animal(Species=u'Millipede', Legs=1000000,
+ session.add(Animal(Species='Millipede', Legs=1000000,
ZooID=sdz.ID))
# Add a mother and child to test relationships
- bai_yun = Animal(Species=u'Ape', Nameu=u'Bai Yun', Legs=2)
+ bai_yun = Animal(Species='Ape', Nameu='Bai Yun', Legs=2)
session.add(bai_yun)
- session.add(Animal(Species=u'Ape', Name=u'Hua Mei', Legs=2,
+ session.add(Animal(Species='Ape', Name='Hua Mei', Legs=2,
MotherID=bai_yun.ID))
session.flush()
session.commit()
def test_baseline_2_insert(self):
- for x in xrange(ITERATIONS):
- session.add(Animal(Species=u'Tick', Name=u'Tick %d' % x,
+ for x in range(ITERATIONS):
+ session.add(Animal(Species='Tick', Name='Tick %d' % x,
Legs=8))
session.flush()
def test_baseline_3_properties(self):
- for x in xrange(ITERATIONS):
+ for x in range(ITERATIONS):
# Zoos
WAP = list(session.query(Zoo).filter(Zoo.Name
- == u'Wild Animal Park'))
+ == 'Wild Animal Park'))
SDZ = list(session.query(Zoo).filter(Zoo.Founded
== datetime.date(1835, 9, 13)))
Biodome = list(session.query(Zoo).filter(Zoo.Name
- == u'Montr\xe9al Biod\xf4me'))
+ == 'Montr\xe9al Biod\xf4me'))
seaworld = list(session.query(Zoo).filter(Zoo.Admission
== float(60)))
# Animals
leopard = list(session.query(Animal).filter(Animal.Species
- == u'Leopard'))
+ == 'Leopard'))
ostrich = list(session.query(Animal).filter(Animal.Species
- == u'Ostrich'))
+ == 'Ostrich'))
millipede = list(session.query(Animal).filter(Animal.Legs
== 1000000))
ticks = list(session.query(Animal).filter(Animal.Species
- == u'Tick'))
+ == 'Tick'))
def test_baseline_4_expressions(self):
- for x in xrange(ITERATIONS):
+ for x in range(ITERATIONS):
assert len(list(session.query(Zoo))) == 5
assert len(list(session.query(Animal))) == ITERATIONS + 12
assert len(list(session.query(Animal).filter(Animal.Legs
@@ -190,9 +192,9 @@ class ZooMarkTest(fixtures.TestBase):
assert len(list(session.query(Animal).filter(Animal.Lifespan
> 70))) == 2
assert len(list(session.query(Animal).
- filter(Animal.Species.like(u'L%')))) == 2
+ filter(Animal.Species.like('L%')))) == 2
assert len(list(session.query(Animal).
- filter(Animal.Species.like(u'%pede')))) == 2
+ filter(Animal.Species.like('%pede')))) == 2
assert len(list(session.query(Animal).filter(Animal.LastEscape
!= None))) == 1
assert len(list(session.query(Animal).filter(Animal.LastEscape
@@ -201,29 +203,29 @@ class ZooMarkTest(fixtures.TestBase):
# In operator (containedby)
assert len(list(session.query(Animal).filter(
- Animal.Species.like(u'%pede%')))) == 2
+ Animal.Species.like('%pede%')))) == 2
assert len(list(session.query(Animal).
- filter(Animal.Species.in_((u'Lion'
- , u'Tiger', u'Bear'))))) == 3
+ filter(Animal.Species.in_(('Lion'
+ , 'Tiger', 'Bear'))))) == 3
# Try In with cell references
class thing(object):
pass
pet, pet2 = thing(), thing()
- pet.Name, pet2.Name = u'Slug', u'Ostrich'
+ pet.Name, pet2.Name = 'Slug', 'Ostrich'
assert len(list(session.query(Animal).
filter(Animal.Species.in_((pet.Name,
pet2.Name))))) == 2
# logic and other functions
- name = u'Lion'
+ name = 'Lion'
assert len(list(session.query(Animal).
filter(func.length(Animal.Species)
== len(name)))) == ITERATIONS + 3
assert len(list(session.query(Animal).
- filter(Animal.Species.like(u'%i%'
+ filter(Animal.Species.like('%i%'
)))) == ITERATIONS + 7
# Test now(), today(), year(), month(), day()
@@ -246,7 +248,7 @@ class ZooMarkTest(fixtures.TestBase):
# TODO: convert to ORM
engine = metadata.bind
- for x in xrange(ITERATIONS):
+ for x in range(ITERATIONS):
# views
@@ -270,7 +272,7 @@ class ZooMarkTest(fixtures.TestBase):
for species, lifespan in engine.execute(select([Animal.c.Species,
Animal.c.Lifespan])).fetchall():
assert lifespan == expected[species]
- expected = [u'Montr\xe9al Biod\xf4me', 'Wild Animal Park']
+ expected = ['Montr\xe9al Biod\xf4me', 'Wild Animal Park']
e = select([Zoo.c.Name], and_(Zoo.c.Founded != None,
Zoo.c.Founded <= func.current_timestamp(),
Zoo.c.Founded >= datetime.date(1990, 1, 1)))
@@ -284,13 +286,13 @@ class ZooMarkTest(fixtures.TestBase):
legs.sort()
def test_baseline_6_editing(self):
- for x in xrange(ITERATIONS):
+ for x in range(ITERATIONS):
# Edit
- SDZ = session.query(Zoo).filter(Zoo.Name == u'San Diego Zoo'
+ SDZ = session.query(Zoo).filter(Zoo.Name == 'San Diego Zoo'
).one()
- SDZ.Name = u'The San Diego Zoo'
+ SDZ.Name = 'The San Diego Zoo'
SDZ.Founded = datetime.date(1900, 1, 1)
SDZ.Opens = datetime.time(7, 30, 0)
SDZ.Admission = 35.00
@@ -298,19 +300,19 @@ class ZooMarkTest(fixtures.TestBase):
# Test edits
SDZ = session.query(Zoo).filter(Zoo.Name
- == u'The San Diego Zoo').one()
+ == 'The San Diego Zoo').one()
assert SDZ.Founded == datetime.date(1900, 1, 1), SDZ.Founded
# Change it back
- SDZ.Name = u'San Diego Zoo'
+ SDZ.Name = 'San Diego Zoo'
SDZ.Founded = datetime.date(1835, 9, 13)
SDZ.Opens = datetime.time(9, 0, 0)
SDZ.Admission = 0
# Test re-edits
- SDZ = session.query(Zoo).filter(Zoo.Name == u'San Diego Zoo'
+ SDZ = session.query(Zoo).filter(Zoo.Name == 'San Diego Zoo'
).one()
assert SDZ.Founded == datetime.date(1835, 9, 13), \
SDZ.Founded
diff --git a/test/base/test_dependency.py b/test/base/test_dependency.py
index f4a0a4c8b..b16516f15 100644
--- a/test/base/test_dependency.py
+++ b/test/base/test_dependency.py
@@ -82,7 +82,7 @@ class DependencySortTest(fixtures.TestBase):
try:
list(topological.sort(tuples, allitems))
assert False
- except exc.CircularDependencyError, err:
+ except exc.CircularDependencyError as err:
eq_(err.cycles, set(['node1', 'node3', 'node2', 'node5',
'node4']))
eq_(err.edges, set([('node3', 'node1'), ('node4', 'node1'),
@@ -105,7 +105,7 @@ class DependencySortTest(fixtures.TestBase):
try:
list(topological.sort(tuples, allitems))
assert False
- except exc.CircularDependencyError, err:
+ except exc.CircularDependencyError as err:
eq_(err.cycles, set(['node1', 'node3', 'node2']))
eq_(err.edges, set([('node3', 'node1'), ('node2', 'node3'),
('node3', 'node2'), ('node1', 'node2'),
@@ -271,7 +271,7 @@ class DependencySortTest(fixtures.TestBase):
('node4', 'node17'), ('node2', 'node20'), ('node19', 'node10'),
('node8', 'node4'), ('node11', 'node3'), ('node6', 'node1')
]
- allnodes = ['node%d' % i for i in xrange(1, 21)]
+ allnodes = ['node%d' % i for i in range(1, 21)]
eq_(
topological.find_cycles(tuples, allnodes),
set(['node11', 'node10', 'node13', 'node15', 'node14', 'node17',
diff --git a/test/base/test_events.py b/test/base/test_events.py
index 4efb30aba..7cfb5fa7d 100644
--- a/test/base/test_events.py
+++ b/test/base/test_events.py
@@ -190,7 +190,7 @@ class ClsLevelListenTest(fixtures.TestBase):
def test_lis_subcalss_lis(self):
@event.listens_for(self.TargetOne, "event_one")
def handler1(x, y):
- print 'handler1'
+ print('handler1')
class SubTarget(self.TargetOne):
pass
@@ -207,7 +207,7 @@ class ClsLevelListenTest(fixtures.TestBase):
def test_lis_multisub_lis(self):
@event.listens_for(self.TargetOne, "event_one")
def handler1(x, y):
- print 'handler1'
+ print('handler1')
class SubTarget(self.TargetOne):
pass
diff --git a/test/base/test_except.py b/test/base/test_except.py
index a8c7de201..f2428c22d 100644
--- a/test/base/test_except.py
+++ b/test/base/test_except.py
@@ -5,14 +5,15 @@ from sqlalchemy import exc as sa_exceptions
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import eq_
-# Py3K
-#StandardError = BaseException
-# Py2K
-from exceptions import StandardError, KeyboardInterrupt, SystemExit
+# start Py3K
+Exception = BaseException
+# end Py3K
+# start Py2K
+#from exceptions import StandardError, KeyboardInterrupt, SystemExit
# end Py2K
-class Error(StandardError):
+class Error(Exception):
"""This class will be old-style on <= 2.4 and new-style on >=
2.5."""
@@ -47,7 +48,7 @@ class WrapTest(fixtures.TestBase):
try:
raise sa_exceptions.DBAPIError.instance('this is a message'
, None, OperationalError(), DatabaseError)
- except sa_exceptions.DBAPIError, exc:
+ except sa_exceptions.DBAPIError as exc:
assert str(exc) \
== "(OperationalError) 'this is a message' None"
@@ -58,7 +59,7 @@ class WrapTest(fixtures.TestBase):
{'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 6, 'g': 7, 'h':
8, 'i': 9, 'j': 10, 'k': 11,
}, OperationalError(), DatabaseError)
- except sa_exceptions.DBAPIError, exc:
+ except sa_exceptions.DBAPIError as exc:
assert str(exc).startswith("(OperationalError) 'this is a "
"message' {")
@@ -67,7 +68,7 @@ class WrapTest(fixtures.TestBase):
raise sa_exceptions.DBAPIError.instance('this is a message',
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,],
OperationalError(), DatabaseError)
- except sa_exceptions.DBAPIError, exc:
+ except sa_exceptions.DBAPIError as exc:
assert str(exc).startswith("(OperationalError) 'this is a "
"message' [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]")
@@ -77,7 +78,7 @@ class WrapTest(fixtures.TestBase):
[{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1},
{1: 1}, {1:1}, {1: 1}, {1: 1},],
OperationalError(), DatabaseError)
- except sa_exceptions.DBAPIError, exc:
+ except sa_exceptions.DBAPIError as exc:
eq_(str(exc) ,
"(OperationalError) 'this is a message' [{1: 1}, "\
"{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: "\
@@ -87,7 +88,7 @@ class WrapTest(fixtures.TestBase):
{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1},
{1:1}, {1: 1}, {1: 1}, {1: 1},
], OperationalError(), DatabaseError)
- except sa_exceptions.DBAPIError, exc:
+ except sa_exceptions.DBAPIError as exc:
eq_(str(exc) ,
"(OperationalError) 'this is a message' [{1: 1}, "
"{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, "
@@ -100,7 +101,7 @@ class WrapTest(fixtures.TestBase):
(1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ),
(1, ),
], OperationalError(), DatabaseError)
- except sa_exceptions.DBAPIError, exc:
+ except sa_exceptions.DBAPIError as exc:
eq_(str(exc),
"(OperationalError) 'this is a message' [(1,), "\
"(1,), (1,), (1,), (1,), (1,), (1,), (1,), (1,), (1,)]")
@@ -109,7 +110,7 @@ class WrapTest(fixtures.TestBase):
(1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ),
(1, ), (1, ),
], OperationalError(), DatabaseError)
- except sa_exceptions.DBAPIError, exc:
+ except sa_exceptions.DBAPIError as exc:
eq_(str(exc),
"(OperationalError) 'this is a message' [(1,), "
"(1,), (1,), (1,), (1,), (1,), (1,), (1,) "
@@ -121,7 +122,7 @@ class WrapTest(fixtures.TestBase):
try:
raise sa_exceptions.DBAPIError.instance('', [],
ProgrammingError(), DatabaseError)
- except sa_exceptions.DBAPIError, e:
+ except sa_exceptions.DBAPIError as e:
self.assert_(True)
self.assert_('Error in str() of DB-API' in e.args[0])
@@ -129,7 +130,7 @@ class WrapTest(fixtures.TestBase):
try:
raise sa_exceptions.DBAPIError.instance('', [], OutOfSpec(),
DatabaseError)
- except sa_exceptions.DBAPIError, e:
+ except sa_exceptions.DBAPIError as e:
self.assert_(e.__class__ is sa_exceptions.DBAPIError)
except OutOfSpec:
self.assert_(False)
@@ -137,7 +138,7 @@ class WrapTest(fixtures.TestBase):
try:
raise sa_exceptions.DBAPIError.instance('', [],
sa_exceptions.ArgumentError(), DatabaseError)
- except sa_exceptions.DBAPIError, e:
+ except sa_exceptions.DBAPIError as e:
self.assert_(e.__class__ is sa_exceptions.DBAPIError)
except sa_exceptions.ArgumentError:
self.assert_(False)
diff --git a/test/base/test_utils.py b/test/base/test_utils.py
index b28d26e71..194ef1222 100644
--- a/test/base/test_utils.py
+++ b/test/base/test_utils.py
@@ -16,7 +16,7 @@ class KeyedTupleTest():
eq_(len(keyed_tuple), 0)
eq_(keyed_tuple.__dict__, {'_labels': []})
- eq_(keyed_tuple.keys(), [])
+ eq_(list(keyed_tuple.keys()), [])
eq_(keyed_tuple._fields, ())
eq_(keyed_tuple._asdict(), {})
@@ -27,7 +27,7 @@ class KeyedTupleTest():
eq_(len(keyed_tuple), 2)
eq_(keyed_tuple.__dict__, {'_labels': []})
- eq_(keyed_tuple.keys(), [])
+ eq_(list(keyed_tuple.keys()), [])
eq_(keyed_tuple._fields, ())
eq_(keyed_tuple._asdict(), {})
@@ -37,7 +37,7 @@ class KeyedTupleTest():
def test_basic_creation(self):
keyed_tuple = util.KeyedTuple([1, 2], ['a', 'b'])
eq_(str(keyed_tuple), '(1, 2)')
- eq_(keyed_tuple.keys(), ['a', 'b'])
+ eq_(list(keyed_tuple.keys()), ['a', 'b'])
eq_(keyed_tuple._fields, ('a', 'b'))
eq_(keyed_tuple._asdict(), {'a': 1, 'b': 2})
@@ -66,7 +66,7 @@ class KeyedTupleTest():
# TODO: consider not allowing None labels
expected = {'a': 1, None: 2, 'b': 3, '_labels': ['a', None, 'b']}
eq_(keyed_tuple.__dict__, expected)
- eq_(keyed_tuple.keys(), ['a', 'b'])
+ eq_(list(keyed_tuple.keys()), ['a', 'b'])
eq_(keyed_tuple._fields, ('a', 'b'))
eq_(keyed_tuple._asdict(), {'a': 1, 'b': 3})
@@ -86,7 +86,7 @@ class KeyedTupleTest():
# TODO: consider not allowing duplicate labels
expected = {'a': 1, 'b': 3, '_labels': ['a', 'b', 'b']}
eq_(keyed_tuple.__dict__, expected)
- eq_(keyed_tuple.keys(), ['a', 'b', 'b'])
+ eq_(list(keyed_tuple.keys()), ['a', 'b', 'b'])
eq_(keyed_tuple._fields, ('a', 'b', 'b'))
eq_(keyed_tuple._asdict(), {'a': 1, 'b': 3})
@@ -125,12 +125,12 @@ class OrderedDictTest(fixtures.TestBase):
o['snack'] = 'attack'
o['c'] = 3
- eq_(o.keys(), ['a', 'b', 'snack', 'c'])
- eq_(o.values(), [1, 2, 'attack', 3])
+ eq_(list(o.keys()), ['a', 'b', 'snack', 'c'])
+ eq_(list(o.values()), [1, 2, 'attack', 3])
o.pop('snack')
- eq_(o.keys(), ['a', 'b', 'c'])
- eq_(o.values(), [1, 2, 3])
+ eq_(list(o.keys()), ['a', 'b', 'c'])
+ eq_(list(o.values()), [1, 2, 3])
try:
o.pop('eep')
@@ -146,40 +146,40 @@ class OrderedDictTest(fixtures.TestBase):
except TypeError:
pass
- eq_(o.keys(), ['a', 'b', 'c'])
- eq_(o.values(), [1, 2, 3])
+ eq_(list(o.keys()), ['a', 'b', 'c'])
+ eq_(list(o.values()), [1, 2, 3])
o2 = util.OrderedDict(d=4)
o2['e'] = 5
- eq_(o2.keys(), ['d', 'e'])
- eq_(o2.values(), [4, 5])
+ eq_(list(o2.keys()), ['d', 'e'])
+ eq_(list(o2.values()), [4, 5])
o.update(o2)
- eq_(o.keys(), ['a', 'b', 'c', 'd', 'e'])
- eq_(o.values(), [1, 2, 3, 4, 5])
+ eq_(list(o.keys()), ['a', 'b', 'c', 'd', 'e'])
+ eq_(list(o.values()), [1, 2, 3, 4, 5])
o.setdefault('c', 'zzz')
o.setdefault('f', 6)
- eq_(o.keys(), ['a', 'b', 'c', 'd', 'e', 'f'])
- eq_(o.values(), [1, 2, 3, 4, 5, 6])
+ eq_(list(o.keys()), ['a', 'b', 'c', 'd', 'e', 'f'])
+ eq_(list(o.values()), [1, 2, 3, 4, 5, 6])
def test_odict_constructor(self):
o = util.OrderedDict([('name', 'jbe'), ('fullname', 'jonathan'
), ('password', '')])
- eq_(o.keys(), ['name', 'fullname', 'password'])
+ eq_(list(o.keys()), ['name', 'fullname', 'password'])
def test_odict_copy(self):
o = util.OrderedDict()
o["zzz"] = 1
o["aaa"] = 2
- eq_(o.keys(), ['zzz', 'aaa'])
+ eq_(list(o.keys()), ['zzz', 'aaa'])
o2 = o.copy()
- eq_(o2.keys(), o.keys())
+ eq_(list(o2.keys()), list(o.keys()))
o3 = copy.copy(o)
- eq_(o3.keys(), o.keys())
+ eq_(list(o3.keys()), list(o.keys()))
class OrderedSetTest(fixtures.TestBase):
@@ -198,7 +198,7 @@ class FrozenDictTest(fixtures.TestBase):
def test_serialize(self):
d = util.immutabledict({1: 2, 3: 4})
for loads, dumps in picklers():
- print loads(dumps(d))
+ print(loads(dumps(d)))
class MemoizedAttrTest(fixtures.TestBase):
@@ -252,7 +252,7 @@ class ColumnCollectionTest(fixtures.TestBase):
try:
cc['col1'] in cc
assert False
- except exc.ArgumentError, e:
+ except exc.ArgumentError as e:
eq_(str(e), "__contains__ requires a string argument")
def test_compare(self):
@@ -414,14 +414,14 @@ class IdentitySetTest(fixtures.TestBase):
for type_ in (object, ImmutableSubclass):
data = [type_(), type_()]
ids = util.IdentitySet()
- for i in range(2) + range(2):
+ for i in list(range(2)) + list(range(2)):
ids.add(data[i])
self.assert_eq(ids, data)
for type_ in (EqOverride, HashOverride, HashEqOverride):
data = [type_(1), type_(1), type_(2)]
ids = util.IdentitySet()
- for i in range(3) + range(3):
+ for i in list(range(3)) + list(range(3)):
ids.add(data[i])
self.assert_eq(ids, data)
@@ -843,13 +843,12 @@ class IdentitySetTest(fixtures.TestBase):
return super_, sub_, twin1, twin2, unique1, unique2
def _assert_unorderable_types(self, callable_):
- # Py3K
- #assert_raises_message(
- # TypeError, 'unorderable types', callable_)
- # Py2K
- assert_raises_message(
- TypeError, 'cannot compare sets using cmp()', callable_)
- # end Py2K
+ if util.py3k:
+ assert_raises_message(
+ TypeError, 'unorderable types', callable_)
+ else:
+ assert_raises_message(
+ TypeError, 'cannot compare sets using cmp()', callable_)
def test_basic_sanity(self):
IdentitySet = util.IdentitySet
@@ -969,23 +968,23 @@ class DictlikeIteritemsTest(fixtures.TestBase):
d = subdict(a=1, b=2, c=3)
self._ok(d)
- # Py2K
- def test_UserDict(self):
- import UserDict
- d = UserDict.UserDict(a=1, b=2, c=3)
- self._ok(d)
- # end Py2K
+# start Py2K
+# def test_UserDict(self):
+# import UserDict
+# d = UserDict.UserDict(a=1, b=2, c=3)
+# self._ok(d)
+# end Py2K
def test_object(self):
self._notok(object())
- # Py2K
- def test_duck_1(self):
- class duck1(object):
- def iteritems(duck):
- return iter(self.baseline)
- self._ok(duck1())
- # end Py2K
+# start Py2K
+# def test_duck_1(self):
+# class duck1(object):
+# def iteritems(duck):
+# return iter(self.baseline)
+# self._ok(duck1())
+# end Py2K
def test_duck_2(self):
class duck2(object):
@@ -993,16 +992,16 @@ class DictlikeIteritemsTest(fixtures.TestBase):
return list(self.baseline)
self._ok(duck2())
- # Py2K
- def test_duck_3(self):
- class duck3(object):
- def iterkeys(duck):
- return iter(['a', 'b', 'c'])
-
- def __getitem__(duck, key):
- return dict(a=1, b=2, c=3).get(key)
- self._ok(duck3())
- # end Py2K
+# start Py2K
+# def test_duck_3(self):
+# class duck3(object):
+# def iterkeys(duck):
+# return iter(['a', 'b', 'c'])
+#
+# def __getitem__(duck, key):
+# return dict(a=1, b=2, c=3).get(key)
+# self._ok(duck3())
+# end Py2K
def test_duck_4(self):
class duck4(object):
@@ -1029,9 +1028,9 @@ class DictlikeIteritemsTest(fixtures.TestBase):
class DuckTypeCollectionTest(fixtures.TestBase):
def test_sets(self):
- # Py2K
- import sets
- # end Py2K
+# start Py2K
+# import sets
+# end Py2K
class SetLike(object):
def add(self):
@@ -1041,9 +1040,9 @@ class DuckTypeCollectionTest(fixtures.TestBase):
__emulates__ = set
for type_ in (set,
- # Py2K
- sets.Set,
- # end Py2K
+# start Py2K
+# sets.Set,
+# end Py2K
SetLike,
ForcedSet):
eq_(util.duck_type_collection(type_), set)
@@ -1051,9 +1050,9 @@ class DuckTypeCollectionTest(fixtures.TestBase):
eq_(util.duck_type_collection(instance), set)
for type_ in (frozenset,
- # Py2K
- sets.ImmutableSet
- # end Py2K
+# start Py2K
+# sets.ImmutableSet
+# end Py2K
):
is_(util.duck_type_collection(type_), None)
instance = type_()
@@ -1195,7 +1194,7 @@ class SymbolTest(fixtures.TestBase):
sym3 = util.pickle.loads(s)
for protocol in 0, 1, 2:
- print protocol
+ print(protocol)
serial = util.pickle.dumps(sym1)
rt = util.pickle.loads(serial)
assert rt is sym1
@@ -1570,21 +1569,21 @@ class TestClassHierarchy(fixtures.TestBase):
eq_(set(util.class_hierarchy(A)), set((A, B, C, object)))
eq_(set(util.class_hierarchy(B)), set((A, B, C, object)))
- # Py2K
- def test_oldstyle_mixin(self):
- class A(object):
- pass
-
- class Mixin:
- pass
-
- class B(A, Mixin):
- pass
-
- eq_(set(util.class_hierarchy(B)), set((A, B, object)))
- eq_(set(util.class_hierarchy(Mixin)), set())
- eq_(set(util.class_hierarchy(A)), set((A, B, object)))
- # end Py2K
+# start Py2K
+# def test_oldstyle_mixin(self):
+# class A(object):
+# pass
+#
+# class Mixin:
+# pass
+#
+# class B(A, Mixin):
+# pass
+#
+# eq_(set(util.class_hierarchy(B)), set((A, B, object)))
+# eq_(set(util.class_hierarchy(Mixin)), set())
+# eq_(set(util.class_hierarchy(A)), set((A, B, object)))
+# end Py2K
class TestClassProperty(fixtures.TestBase):
diff --git a/test/dialect/test_firebird.py b/test/dialect/test_firebird.py
index 5a80a3776..6019dc8f9 100644
--- a/test/dialect/test_firebird.py
+++ b/test/dialect/test_firebird.py
@@ -28,7 +28,7 @@ class DomainReflectionTest(fixtures.TestBase, AssertsExecutionResults):
)
con.execute('CREATE DOMAIN img_domain AS BLOB SUB_TYPE '
'BINARY')
- except ProgrammingError, e:
+ except ProgrammingError as e:
if not 'attempt to store duplicate value' in str(e):
raise e
con.execute('''CREATE GENERATOR gen_testtable_id''')
diff --git a/test/dialect/test_mssql.py b/test/dialect/test_mssql.py
index 7b8f53390..4a99ef5f7 100644
--- a/test/dialect/test_mssql.py
+++ b/test/dialect/test_mssql.py
@@ -16,8 +16,8 @@ from sqlalchemy import testing
from sqlalchemy.testing import emits_warning_on, assert_raises_message
import decimal
from sqlalchemy.engine.reflection import Inspector
-from sqlalchemy.util.compat import b
-from sqlalchemy import sql
+from sqlalchemy.util import b, u, ue
+from sqlalchemy import sql, util
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
@@ -414,7 +414,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
self.assert_compile(
s,
"SELECT TOP 10 t.x, t.y FROM t WHERE t.x = :x_1 ORDER BY t.y",
- checkparams={u'x_1': 5}
+ checkparams={'x_1': 5}
)
def test_limit_zero_using_top(self):
@@ -425,7 +425,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
self.assert_compile(
s,
"SELECT TOP 0 t.x, t.y FROM t WHERE t.x = :x_1 ORDER BY t.y",
- checkparams={u'x_1': 5}
+ checkparams={'x_1': 5}
)
def test_offset_using_window(self):
@@ -435,14 +435,14 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
# test that the select is not altered with subsequent compile
# calls
- for i in xrange(2):
+ for i in range(2):
self.assert_compile(
s,
"SELECT anon_1.x, anon_1.y FROM (SELECT t.x AS x, t.y "
"AS y, ROW_NUMBER() OVER (ORDER BY t.y) AS "
"mssql_rn FROM t WHERE t.x = :x_1) AS "
"anon_1 WHERE mssql_rn > :mssql_rn_1",
- checkparams={u'mssql_rn_1': 20, u'x_1': 5}
+ checkparams={'mssql_rn_1': 20, 'x_1': 5}
)
def test_limit_offset_using_window(self):
@@ -458,7 +458,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
"FROM t "
"WHERE t.x = :x_1) AS anon_1 "
"WHERE mssql_rn > :mssql_rn_1 AND mssql_rn <= :mssql_rn_2",
- checkparams={u'mssql_rn_1': 20, u'mssql_rn_2': 30, u'x_1': 5}
+ checkparams={'mssql_rn_1': 20, 'mssql_rn_2': 30, 'x_1': 5}
)
def test_limit_offset_with_correlated_order_by(self):
@@ -479,7 +479,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
"FROM t1 "
"WHERE t1.x = :x_1) AS anon_1 "
"WHERE mssql_rn > :mssql_rn_1 AND mssql_rn <= :mssql_rn_2",
- checkparams={u'mssql_rn_1': 20, u'mssql_rn_2': 30, u'x_1': 5}
+ checkparams={'mssql_rn_1': 20, 'mssql_rn_2': 30, 'x_1': 5}
)
def test_limit_zero_offset_using_window(self):
@@ -493,7 +493,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
s,
"SELECT TOP 0 t.x, t.y FROM t "
"WHERE t.x = :x_1 ORDER BY t.y",
- checkparams={u'x_1': 5}
+ checkparams={'x_1': 5}
)
def test_sequence_start_0(self):
@@ -862,11 +862,11 @@ class QueryUnicodeTest(fixtures.TestBase):
# encode in UTF-8 (sting object) because this is the default
# dialect encoding
- con.execute(u"insert into unitest_table values ('bien u\
- umang\xc3\xa9')".encode('UTF-8'))
+ con.execute(ue("insert into unitest_table values ('bien u\
+ umang\xc3\xa9')").encode('UTF-8'))
try:
r = t1.select().execute().first()
- assert isinstance(r[1], unicode), \
+ assert isinstance(r[1], util.text_type), \
'%s is %s instead of unicode, working on %s' % (r[1],
type(r[1]), meta.bind)
finally:
@@ -1718,7 +1718,7 @@ class TypeRoundTripTest(fixtures.TestBase, AssertsExecutionResults, ComparesTabl
)]
for value in test_items:
float_table.insert().execute(floatcol=value)
- except Exception, e:
+ except Exception as e:
raise e
@@ -1964,8 +1964,8 @@ class MonkeyPatchedBinaryTest(fixtures.TestBase):
def test_unicode(self):
module = __import__('pymssql')
- result = module.Binary(u'foo')
- eq_(result, u'foo')
+ result = module.Binary('foo')
+ eq_(result, 'foo')
def test_bytes(self):
module = __import__('pymssql')
@@ -2084,7 +2084,7 @@ class InfoCoerceUnicodeTest(fixtures.TestBase):
dialect = mssql.dialect()
value = CoerceUnicode().bind_processor(dialect)('a string')
- assert isinstance(value, unicode)
+ assert isinstance(value, util.text_type)
class ReflectHugeViewTest(fixtures.TestBase):
__only_on__ = 'mssql'
@@ -2096,13 +2096,13 @@ class ReflectHugeViewTest(fixtures.TestBase):
t = Table('base_table', self.metadata,
*[
Column("long_named_column_number_%d" % i, Integer)
- for i in xrange(self.col_num)
+ for i in range(self.col_num)
]
)
self.view_str = view_str = \
"CREATE VIEW huge_named_view AS SELECT %s FROM base_table" % (
",".join("long_named_column_number_%d" % i
- for i in xrange(self.col_num))
+ for i in range(self.col_num))
)
assert len(view_str) > 4000
diff --git a/test/dialect/test_mysql.py b/test/dialect/test_mysql.py
index f89b0b229..728098d3a 100644
--- a/test/dialect/test_mysql.py
+++ b/test/dialect/test_mysql.py
@@ -5,6 +5,7 @@ from sqlalchemy.testing import eq_, assert_raises, assert_raises_message
from sqlalchemy import *
from sqlalchemy import sql, exc, schema, types as sqltypes
+from sqlalchemy.util import u
from sqlalchemy.dialects.mysql import base as mysql
from sqlalchemy.engine.url import make_url
from sqlalchemy.testing import fixtures, AssertsCompiledSQL, AssertsExecutionResults
@@ -372,9 +373,9 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
try:
self.assert_(list(row) == expected)
except:
- print "Storing %s" % store
- print "Expected %s" % expected
- print "Found %s" % list(row)
+ print("Storing %s" % store)
+ print("Expected %s" % expected)
+ print("Found %s" % list(row))
raise
table.delete().execute().close()
@@ -684,17 +685,17 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
metadata = MetaData(unicode_engine)
t1 = Table('table', metadata,
Column('id', Integer, primary_key=True),
- Column('value', Enum(u'réveillé', u'drôle', u'S’il')),
- Column('value2', mysql.ENUM(u'réveillé', u'drôle', u'S’il'))
+ Column('value', Enum(u('réveillé'), u('drôle'), u('S’il'))),
+ Column('value2', mysql.ENUM(u('réveillé'), u('drôle'), u('S’il')))
)
metadata.create_all()
try:
- t1.insert().execute(value=u'drôle', value2=u'drôle')
- t1.insert().execute(value=u'réveillé', value2=u'réveillé')
- t1.insert().execute(value=u'S’il', value2=u'S’il')
+ t1.insert().execute(value=u('drôle'), value2=u('drôle'))
+ t1.insert().execute(value=u('réveillé'), value2=u('réveillé'))
+ t1.insert().execute(value=u('S’il'), value2=u('S’il'))
eq_(t1.select().order_by(t1.c.id).execute().fetchall(),
- [(1, u'drôle', u'drôle'), (2, u'réveillé', u'réveillé'),
- (3, u'S’il', u'S’il')]
+ [(1, u('drôle'), u('drôle')), (2, u('réveillé'), u('réveillé')),
+ (3, u('S’il'), u('S’il'))]
)
# test reflection of the enum labels
@@ -706,10 +707,10 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
# latin-1 stuff forcing its way in ?
assert t2.c.value.type.enums[0:2] == \
- (u'réveillé', u'drôle') # u'S’il') # eh ?
+ (u('réveillé'), u('drôle')) # u'S’il') # eh ?
assert t2.c.value2.type.enums[0:2] == \
- (u'réveillé', u'drôle') # u'S’il') # eh ?
+ (u('réveillé'), u('drôle')) # u'S’il') # eh ?
finally:
metadata.drop_all()
@@ -1370,7 +1371,7 @@ class SQLModeDetectionTest(fixtures.TestBase):
def _options(self, modes):
def connect(con, record):
cursor = con.cursor()
- print "DOING THiS:", "set sql_mode='%s'" % (",".join(modes))
+ print("DOING THiS:", "set sql_mode='%s'" % (",".join(modes)))
cursor.execute("set sql_mode='%s'" % (",".join(modes)))
e = engines.testing_engine(options={
'pool_events':[
diff --git a/test/dialect/test_oracle.py b/test/dialect/test_oracle.py
index def4654f0..d26ca6c03 100644
--- a/test/dialect/test_oracle.py
+++ b/test/dialect/test_oracle.py
@@ -1,5 +1,5 @@
# coding: utf-8
-from __future__ import with_statement
+
from sqlalchemy.testing import eq_
from sqlalchemy import *
@@ -7,6 +7,8 @@ from sqlalchemy import types as sqltypes, exc, schema
from sqlalchemy.sql import table, column
from sqlalchemy.testing import fixtures, AssertsExecutionResults, AssertsCompiledSQL
from sqlalchemy import testing
+from sqlalchemy.util import u, b
+from sqlalchemy import util
from sqlalchemy.testing import assert_raises, assert_raises_message
from sqlalchemy.testing.engines import testing_engine
from sqlalchemy.dialects.oracle import cx_oracle, base as oracle
@@ -817,7 +819,7 @@ class TwoPhaseTest(fixtures.TablesTest):
)
def test_twophase_prepare_false(self):
conn = self._connection()
- for i in xrange(2):
+ for i in range(2):
trans = conn.begin_twophase()
conn.execute("select 1 from dual")
trans.prepare()
@@ -827,7 +829,7 @@ class TwoPhaseTest(fixtures.TablesTest):
def test_twophase_prepare_true(self):
conn = self._connection()
- for i in xrange(2):
+ for i in range(2):
trans = conn.begin_twophase()
conn.execute("insert into datatable (id, data) "
"values (%s, 'somedata')" % i)
@@ -880,7 +882,7 @@ class DialectTypesTest(fixtures.TestBase, AssertsCompiledSQL):
b = bindparam("foo", "hello world!")
assert b.type.dialect_impl(dialect).get_dbapi_type(dbapi) == 'STRING'
- b = bindparam("foo", u"hello world!")
+ b = bindparam("foo", "hello world!")
assert b.type.dialect_impl(dialect).get_dbapi_type(dbapi) == 'STRING'
def test_long(self):
@@ -1277,10 +1279,10 @@ class TypesTest(fixtures.TestBase):
Column('data', oracle.RAW(35))
)
metadata.create_all()
- testing.db.execute(raw_table.insert(), id=1, data="ABCDEF")
+ testing.db.execute(raw_table.insert(), id=1, data=b("ABCDEF"))
eq_(
testing.db.execute(raw_table.select()).first(),
- (1, "ABCDEF")
+ (1, b("ABCDEF"))
)
@testing.provide_metadata
@@ -1301,11 +1303,11 @@ class TypesTest(fixtures.TestBase):
t2.c.data.type.dialect_impl(testing.db.dialect),
cx_oracle._OracleNVarChar)
- data = u'm’a réveillé.'
+ data = u('m’a réveillé.')
t2.insert().execute(data=data)
res = t2.select().execute().first()['data']
eq_(res, data)
- assert isinstance(res, unicode)
+ assert isinstance(res, util.text_type)
def test_char_length(self):
@@ -1367,10 +1369,10 @@ class TypesTest(fixtures.TestBase):
try:
engine.execute(t.insert(), id=1,
data='this is text',
- bindata='this is binary')
+ bindata=b('this is binary'))
row = engine.execute(t.select()).first()
eq_(row['data'].read(), 'this is text')
- eq_(row['bindata'].read(), 'this is binary')
+ eq_(row['bindata'].read(), b('this is binary'))
finally:
t.drop(engine)
@@ -1455,7 +1457,8 @@ class BufferedColumnTest(fixtures.TestBase, AssertsCompiledSQL):
stream = os.path.join(
os.path.dirname(__file__), "..",
'binary_data_one.dat')
- stream = file(stream).read(12000)
+ with open(stream, "rb") as file_:
+ stream = file_.read(12000)
for i in range(1, 11):
binary_table.insert().execute(id=i, data=stream)
@@ -1651,28 +1654,28 @@ class UnicodeSchemaTest(fixtures.TestBase):
metadata.create_all()
table.insert().execute(
- {'_underscorecolumn': u'’é'},
+ {'_underscorecolumn': u('’é')},
)
result = testing.db.execute(
- table.select().where(table.c._underscorecolumn==u'’é')
+ table.select().where(table.c._underscorecolumn==u('’é'))
).scalar()
- eq_(result, u'’é')
+ eq_(result, u('’é'))
@testing.provide_metadata
def test_quoted_column_unicode(self):
metadata = self.metadata
table=Table("atable", metadata,
- Column(u"méil", Unicode(255), primary_key=True),
+ Column(u("méil"), Unicode(255), primary_key=True),
)
metadata.create_all()
table.insert().execute(
- {u'méil': u'’é'},
+ {u('méil'): u('’é')},
)
result = testing.db.execute(
- table.select().where(table.c[u'méil']==u'’é')
+ table.select().where(table.c[u('méil')] == u('’é'))
).scalar()
- eq_(result, u'’é')
+ eq_(result, u('’é'))
class DBLinkReflectionTest(fixtures.TestBase):
@@ -1712,5 +1715,5 @@ class DBLinkReflectionTest(fixtures.TestBase):
t = Table('test_table_syn', m, autoload=True,
autoload_with=testing.db, oracle_resolve_synonyms=True)
- eq_(t.c.keys(), ['id', 'data'])
+ eq_(list(t.c.keys()), ['id', 'data'])
eq_(list(t.primary_key), [t.c.id])
diff --git a/test/dialect/test_postgresql.py b/test/dialect/test_postgresql.py
index 4fd5bc9c1..00e5c07ab 100644
--- a/test/dialect/test_postgresql.py
+++ b/test/dialect/test_postgresql.py
@@ -1,6 +1,6 @@
# coding: utf-8
-from __future__ import with_statement
+
from sqlalchemy.testing.assertions import eq_, assert_raises, \
assert_raises_message, is_, AssertsExecutionResults, \
@@ -555,28 +555,29 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
@testing.fails_on('postgresql+pg8000',
'zxjdbc fails on ENUM: column "XXX" is of type '
'XXX but expression is of type text')
+ @testing.provide_metadata
def test_unicode_labels(self):
- metadata = MetaData(testing.db)
+ metadata = self.metadata
t1 = Table('table', metadata,
Column('id', Integer, primary_key=True),
Column('value',
- Enum(u'réveillé', u'drôle', u'S’il',
+ Enum(util.u('réveillé'), util.u('drôle'), util.u('S’il'),
name='onetwothreetype'))
)
-
metadata.create_all()
- try:
- t1.insert().execute(value=u'drôle')
- t1.insert().execute(value=u'réveillé')
- t1.insert().execute(value=u'S’il')
- eq_(t1.select().order_by(t1.c.id).execute().fetchall(),
- [(1, u'drôle'), (2, u'réveillé'), (3, u'S’il')]
- )
- m2 = MetaData(testing.db)
- t2 = Table('table', m2, autoload=True)
- assert t2.c.value.type.enums == (u'réveillé', u'drôle', u'S’il')
- finally:
- metadata.drop_all()
+ t1.insert().execute(value=util.u('drôle'))
+ t1.insert().execute(value=util.u('réveillé'))
+ t1.insert().execute(value=util.u('S’il'))
+ eq_(t1.select().order_by(t1.c.id).execute().fetchall(),
+ [(1, util.u('drôle')), (2, util.u('réveillé')),
+ (3, util.u('S’il'))]
+ )
+ m2 = MetaData(testing.db)
+ t2 = Table('table', m2, autoload=True)
+ eq_(
+ t2.c.value.type.enums,
+ (util.u('réveillé'), util.u('drôle'), util.u('S’il'))
+ )
def test_non_native_type(self):
metadata = MetaData()
@@ -1239,7 +1240,7 @@ class DomainReflectionTest(fixtures.TestBase, AssertsExecutionResults):
:
try:
con.execute(ddl)
- except exc.DBAPIError, e:
+ except exc.DBAPIError as e:
if not 'already exists' in str(e):
raise e
con.execute('CREATE TABLE testtable (question integer, answer '
@@ -1477,7 +1478,7 @@ class ReflectionTest(fixtures.TestBase):
meta1.create_all()
meta2 = MetaData(testing.db)
subject = Table('subject', meta2, autoload=True)
- eq_(subject.primary_key.columns.keys(), [u'p2', u'p1'])
+ eq_(subject.primary_key.columns.keys(), ['p2', 'p1'])
@testing.provide_metadata
def test_pg_weirdchar_reflection(self):
@@ -1750,7 +1751,7 @@ class ReflectionTest(fixtures.TestBase):
conn.execute("ALTER TABLE t RENAME COLUMN x to y")
ind = testing.db.dialect.get_indexes(conn, "t", None)
- eq_(ind, [{'unique': False, 'column_names': [u'y'], 'name': u'idx1'}])
+ eq_(ind, [{'unique': False, 'column_names': ['y'], 'name': 'idx1'}])
conn.close()
class CustomTypeReflectionTest(fixtures.TestBase):
@@ -2175,18 +2176,18 @@ class ArrayTest(fixtures.TablesTest, AssertsExecutionResults):
def test_insert_array(self):
arrtable = self.tables.arrtable
- arrtable.insert().execute(intarr=[1, 2, 3], strarr=[u'abc',
- u'def'])
+ arrtable.insert().execute(intarr=[1, 2, 3], strarr=[util.u('abc'),
+ util.u('def')])
results = arrtable.select().execute().fetchall()
eq_(len(results), 1)
eq_(results[0]['intarr'], [1, 2, 3])
- eq_(results[0]['strarr'], ['abc', 'def'])
+ eq_(results[0]['strarr'], [util.u('abc'), util.u('def')])
def test_array_where(self):
arrtable = self.tables.arrtable
- arrtable.insert().execute(intarr=[1, 2, 3], strarr=[u'abc',
- u'def'])
- arrtable.insert().execute(intarr=[4, 5, 6], strarr=u'ABC')
+ arrtable.insert().execute(intarr=[1, 2, 3], strarr=[util.u('abc'),
+ util.u('def')])
+ arrtable.insert().execute(intarr=[4, 5, 6], strarr=util.u('ABC'))
results = arrtable.select().where(arrtable.c.intarr == [1, 2,
3]).execute().fetchall()
eq_(len(results), 1)
@@ -2195,7 +2196,7 @@ class ArrayTest(fixtures.TablesTest, AssertsExecutionResults):
def test_array_concat(self):
arrtable = self.tables.arrtable
arrtable.insert().execute(intarr=[1, 2, 3],
- strarr=[u'abc', u'def'])
+ strarr=[util.u('abc'), util.u('def')])
results = select([arrtable.c.intarr + [4, 5,
6]]).execute().fetchall()
eq_(len(results), 1)
@@ -2204,15 +2205,15 @@ class ArrayTest(fixtures.TablesTest, AssertsExecutionResults):
def test_array_subtype_resultprocessor(self):
arrtable = self.tables.arrtable
arrtable.insert().execute(intarr=[4, 5, 6],
- strarr=[[u'm\xe4\xe4'], [u'm\xf6\xf6'
- ]])
- arrtable.insert().execute(intarr=[1, 2, 3], strarr=[u'm\xe4\xe4'
- , u'm\xf6\xf6'])
+ strarr=[[util.ue('m\xe4\xe4')], [
+ util.ue('m\xf6\xf6')]])
+ arrtable.insert().execute(intarr=[1, 2, 3], strarr=[
+ util.ue('m\xe4\xe4'), util.ue('m\xf6\xf6')])
results = \
arrtable.select(order_by=[arrtable.c.intarr]).execute().fetchall()
eq_(len(results), 2)
- eq_(results[0]['strarr'], [u'm\xe4\xe4', u'm\xf6\xf6'])
- eq_(results[1]['strarr'], [[u'm\xe4\xe4'], [u'm\xf6\xf6']])
+ eq_(results[0]['strarr'], [util.ue('m\xe4\xe4'), util.ue('m\xf6\xf6')])
+ eq_(results[1]['strarr'], [[util.ue('m\xe4\xe4')], [util.ue('m\xf6\xf6')]])
def test_array_literal(self):
eq_(
@@ -2264,7 +2265,7 @@ class ArrayTest(fixtures.TablesTest, AssertsExecutionResults):
testing.db.execute(
arrtable.insert(),
intarr=[4, 5, 6],
- strarr=[u'abc', u'def']
+ strarr=[util.u('abc'), util.u('def')]
)
eq_(
testing.db.scalar(select([arrtable.c.intarr[2:3]])),
@@ -2900,8 +2901,8 @@ class HStoreTest(fixtures.TestBase):
dialect, None)
assert_raises_message(
ValueError,
- r'''After '\[\.\.\.\], "key1"=>"value1", ', could not parse '''
- '''residual at position 36: 'crapcrapcrap, "key3"\[\.\.\.\]''',
+ r'''After u?'\[\.\.\.\], "key1"=>"value1", ', could not parse '''
+ '''residual at position 36: u?'crapcrapcrap, "key3"\[\.\.\.\]''',
proc,
'"key2"=>"value2", "key1"=>"value1", '
'crapcrapcrap, "key3"=>"value3"'
@@ -3206,3 +3207,28 @@ class HStoreRoundTripTest(fixtures.TablesTest):
def test_fixed_round_trip_native(self):
engine = testing.db
self._test_fixed_round_trip(engine)
+
+ def _test_unicode_round_trip(self, engine):
+ s = select([
+ hstore(
+ array([u'réveillé', u'drôle', u'S’il']),
+ array([u'réveillé', u'drôle', u'S’il'])
+ )
+ ])
+ eq_(
+ engine.scalar(s),
+ {
+ u'réveillé': u'réveillé',
+ u'drôle': u'drôle',
+ u'S’il': u'S’il'
+ }
+ )
+
+ def test_unicode_round_trip_python(self):
+ engine = self._non_native_engine()
+ self._test_unicode_round_trip(engine)
+
+ @testing.only_on("postgresql+psycopg2")
+ def test_unicode_round_trip_native(self):
+ engine = testing.db
+ self._test_unicode_round_trip(engine)
diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py
index 97962a54a..4ede13ff6 100644
--- a/test/dialect/test_sqlite.py
+++ b/test/dialect/test_sqlite.py
@@ -1,10 +1,17 @@
+#!coding: utf-8
+
"""SQLite-specific tests."""
from sqlalchemy.testing import eq_, assert_raises, \
assert_raises_message
import datetime
-from sqlalchemy import *
-from sqlalchemy import exc, sql, schema, pool, types as sqltypes
+from sqlalchemy import Table, String, select, Text, CHAR, bindparam, Column,\
+ Unicode, Date, MetaData, UnicodeText, Time, Integer, TIMESTAMP, \
+ Boolean, func, NUMERIC, DateTime, extract, ForeignKey, text, Numeric,\
+ DefaultClause, and_, DECIMAL, TypeDecorator, create_engine, Float, \
+ INTEGER, UniqueConstraint, DATETIME, DATE, TIME, BOOLEAN
+from sqlalchemy.util import u, ue
+from sqlalchemy import exc, sql, schema, pool, types as sqltypes, util
from sqlalchemy.dialects.sqlite import base as sqlite, \
pysqlite as pysqlite_dialect
from sqlalchemy.engine.url import make_url
@@ -84,7 +91,7 @@ class TestTypes(fixtures.TestBase, AssertsExecutionResults):
eq_(row, (1, datetime.date(2010, 5, 10),
datetime.datetime( 2010, 5, 10, 12, 15, 25, )))
r = engine.execute(func.current_date()).scalar()
- assert isinstance(r, basestring)
+ assert isinstance(r, util.string_types)
finally:
t.drop(engine)
engine.dispose()
@@ -104,8 +111,8 @@ class TestTypes(fixtures.TestBase, AssertsExecutionResults):
UnicodeText(),
):
bindproc = t.dialect_impl(dialect).bind_processor(dialect)
- assert not bindproc or isinstance(bindproc(u'some string'),
- unicode)
+ assert not bindproc or \
+ isinstance(bindproc(util.u('some string')), util.text_type)
@testing.provide_metadata
def test_type_reflection(self):
@@ -485,6 +492,20 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults):
'constrained_columns': ['tid']
}])
+ @testing.provide_metadata
+ def test_description_encoding(self):
+ # amazingly, pysqlite seems to still deliver cursor.description
+ # as encoded bytes in py2k
+
+ t = Table('x', self.metadata,
+ Column(u('méil'), Integer, primary_key=True),
+ Column(ue('\u6e2c\u8a66'), Integer),
+ )
+ self.metadata.create_all(testing.db)
+
+ result = testing.db.execute(t.select())
+ assert u('méil') in result.keys()
+ assert ue('\u6e2c\u8a66') in result.keys()
def test_attached_as_schema(self):
cx = testing.db.connect()
@@ -566,7 +587,7 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults):
eq_(inspector.get_indexes('foo'), [])
eq_(inspector.get_indexes('foo',
include_auto_indexes=True), [{'unique': 1, 'name'
- : u'sqlite_autoindex_foo_1', 'column_names': [u'bar']}])
+ : 'sqlite_autoindex_foo_1', 'column_names': ['bar']}])
finally:
meta.drop_all()
diff --git a/test/dialect/test_sybase.py b/test/dialect/test_sybase.py
index 025d49aae..1318a282b 100644
--- a/test/dialect/test_sybase.py
+++ b/test/dialect/test_sybase.py
@@ -19,7 +19,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
'year': 'year',
}
- for field, subst in mapping.items():
+ for field, subst in list(mapping.items()):
self.assert_compile(
select([extract(field, t.c.col1)]),
'SELECT DATEPART("%s", t.col1) AS anon_1 FROM t' % subst)
diff --git a/test/engine/test_bind.py b/test/engine/test_bind.py
index f76350fcc..973cf4d84 100644
--- a/test/engine/test_bind.py
+++ b/test/engine/test_bind.py
@@ -1,6 +1,6 @@
"""tests the "bind" attribute/argument across schema and SQL,
including the deprecated versions of these arguments"""
-from __future__ import with_statement
+
from sqlalchemy.testing import eq_, assert_raises
from sqlalchemy import engine, exc
from sqlalchemy import MetaData, ThreadLocalMetaData
@@ -61,7 +61,7 @@ class BindTest(fixtures.TestBase):
try:
meth()
assert False
- except exc.UnboundExecutionError, e:
+ except exc.UnboundExecutionError as e:
eq_(str(e),
"The MetaData is not bound to an Engine or "
"Connection. Execution can not proceed without a "
@@ -82,7 +82,7 @@ class BindTest(fixtures.TestBase):
try:
meth()
assert False
- except exc.UnboundExecutionError, e:
+ except exc.UnboundExecutionError as e:
eq_(
str(e),
"The Table 'test_table' "
diff --git a/test/engine/test_ddlemit.py b/test/engine/test_ddlemit.py
index 3dbd5756a..deaf09cf7 100644
--- a/test/engine/test_ddlemit.py
+++ b/test/engine/test_ddlemit.py
@@ -47,7 +47,7 @@ class EmitDDLTest(fixtures.TestBase):
return (m, ) + tuple(
Table('t%d' % i, m, Column('x', Integer))
- for i in xrange(1, 6)
+ for i in range(1, 6)
)
def _table_seq_fixture(self):
diff --git a/test/engine/test_ddlevents.py b/test/engine/test_ddlevents.py
index 71379ec7e..6cc652baf 100644
--- a/test/engine/test_ddlevents.py
+++ b/test/engine/test_ddlevents.py
@@ -1,4 +1,4 @@
-from __future__ import with_statement
+
from sqlalchemy.testing import assert_raises, assert_raises_message
from sqlalchemy.schema import DDL, CheckConstraint, AddConstraint, \
DropConstraint
diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py
index 203d7bd71..1c577730b 100644
--- a/test/engine/test_execute.py
+++ b/test/engine/test_execute.py
@@ -1,4 +1,4 @@
-from __future__ import with_statement
+
from sqlalchemy.testing import eq_, assert_raises, assert_raises_message, \
config, is_
@@ -12,13 +12,13 @@ from sqlalchemy.testing.schema import Table, Column
import sqlalchemy as tsa
from sqlalchemy import testing
from sqlalchemy.testing import engines
+from sqlalchemy import util
from sqlalchemy.testing.engines import testing_engine
import logging.handlers
from sqlalchemy.dialects.oracle.zxjdbc import ReturningParam
from sqlalchemy.engine import result as _result, default
from sqlalchemy.engine.base import Connection, Engine
from sqlalchemy.testing import fixtures
-import StringIO
users, metadata, users_autoinc = None, None, None
class ExecuteTest(fixtures.TestBase):
@@ -256,7 +256,7 @@ class ExecuteTest(fixtures.TestBase):
try:
cursor = raw.cursor()
cursor.execute("SELECTINCORRECT")
- except testing.db.dialect.dbapi.DatabaseError, orig:
+ except testing.db.dialect.dbapi.DatabaseError as orig:
# py3k has "orig" in local scope...
the_orig = orig
finally:
@@ -622,7 +622,7 @@ class LogParamsTest(fixtures.TestBase):
def test_log_large_dict(self):
self.eng.execute(
"INSERT INTO foo (data) values (:data)",
- [{"data":str(i)} for i in xrange(100)]
+ [{"data":str(i)} for i in range(100)]
)
eq_(
self.buf.buffer[1].message,
@@ -635,7 +635,7 @@ class LogParamsTest(fixtures.TestBase):
def test_log_large_list(self):
self.eng.execute(
"INSERT INTO foo (data) values (?)",
- [(str(i), ) for i in xrange(100)]
+ [(str(i), ) for i in range(100)]
)
eq_(
self.buf.buffer[1].message,
@@ -654,7 +654,7 @@ class LogParamsTest(fixtures.TestBase):
"100 total bound parameter sets ... {'data': '98'}, {'data': '99'}\]",
lambda: self.eng.execute(
"INSERT INTO nonexistent (data) values (:data)",
- [{"data":str(i)} for i in xrange(100)]
+ [{"data":str(i)} for i in range(100)]
)
)
@@ -668,7 +668,7 @@ class LogParamsTest(fixtures.TestBase):
"\('98',\), \('99',\)\]",
lambda: self.eng.execute(
"INSERT INTO nonexistent (data) values (?)",
- [(str(i), ) for i in xrange(100)]
+ [(str(i), ) for i in range(100)]
)
)
@@ -834,9 +834,9 @@ class EchoTest(fixtures.TestBase):
class MockStrategyTest(fixtures.TestBase):
def _engine_fixture(self):
- buf = StringIO.StringIO()
+ buf = util.StringIO()
def dump(sql, *multiparams, **params):
- buf.write(unicode(sql.compile(dialect=engine.dialect)))
+ buf.write(util.text_type(sql.compile(dialect=engine.dialect)))
engine = create_engine('postgresql://', strategy='mock', executor=dump)
return engine, buf
@@ -939,7 +939,6 @@ class ResultProxyTest(fixtures.TestBase):
def test_row_c_sequence_check(self):
import csv
import collections
- from StringIO import StringIO
metadata = MetaData()
metadata.bind = 'sqlite://'
@@ -952,7 +951,7 @@ class ResultProxyTest(fixtures.TestBase):
users.insert().execute(name='Test')
row = users.select().execute().fetchone()
- s = StringIO()
+ s = util.StringIO()
writer = csv.writer(s)
# csv performs PySequenceCheck call
writer.writerow(row)
@@ -1026,7 +1025,7 @@ class AlternateResultProxyTest(fixtures.TestBase):
)
m.create_all(engine)
engine.execute(t.insert(), [
- {'x':i, 'y':"t_%d" % i} for i in xrange(1, 12)
+ {'x':i, 'y':"t_%d" % i} for i in range(1, 12)
])
def _test_proxy(self, cls):
@@ -1039,13 +1038,13 @@ class AlternateResultProxyTest(fixtures.TestBase):
assert isinstance(r, cls)
for i in range(5):
rows.append(r.fetchone())
- eq_(rows, [(i, "t_%d" % i) for i in xrange(1, 6)])
+ eq_(rows, [(i, "t_%d" % i) for i in range(1, 6)])
rows = r.fetchmany(3)
- eq_(rows, [(i, "t_%d" % i) for i in xrange(6, 9)])
+ eq_(rows, [(i, "t_%d" % i) for i in range(6, 9)])
rows = r.fetchall()
- eq_(rows, [(i, "t_%d" % i) for i in xrange(9, 12)])
+ eq_(rows, [(i, "t_%d" % i) for i in range(9, 12)])
r = self.engine.execute(select([self.table]))
rows = r.fetchmany(None)
@@ -1059,7 +1058,7 @@ class AlternateResultProxyTest(fixtures.TestBase):
r = self.engine.execute(select([self.table]).limit(5))
rows = r.fetchmany(6)
- eq_(rows, [(i, "t_%d" % i) for i in xrange(1, 6)])
+ eq_(rows, [(i, "t_%d" % i) for i in range(1, 6)])
def test_plain(self):
self._test_proxy(_result.ResultProxy)
@@ -1184,7 +1183,7 @@ class EngineEventsTest(fixtures.TestBase):
try:
conn.execute("SELECT FOO FROM I_DONT_EXIST")
assert False
- except tsa.exc.DBAPIError, e:
+ except tsa.exc.DBAPIError as e:
assert canary[0][2] is e.orig
assert canary[0][0] == "SELECT FOO FROM I_DONT_EXIST"
diff --git a/test/engine/test_parseconnect.py b/test/engine/test_parseconnect.py
index a00a942cb..73bdc76c4 100644
--- a/test/engine/test_parseconnect.py
+++ b/test/engine/test_parseconnect.py
@@ -1,6 +1,5 @@
-from sqlalchemy.testing import assert_raises, assert_raises_message, eq_
-import ConfigParser
-import StringIO
+from sqlalchemy.testing import assert_raises, eq_
+from sqlalchemy.util.compat import configparser, StringIO
import sqlalchemy.engine.url as url
from sqlalchemy import create_engine, engine_from_config, exc, pool
from sqlalchemy.engine.util import _coerce_config
@@ -103,8 +102,8 @@ pool_size=2
pool_threadlocal=1
pool_timeout=10
"""
- ini = ConfigParser.ConfigParser()
- ini.readfp(StringIO.StringIO(raw))
+ ini = configparser.ConfigParser()
+ ini.readfp(StringIO(raw))
expected = {
'url': 'postgresql://scott:tiger@somehost/test?fooz=somevalue',
@@ -234,7 +233,7 @@ pool_timeout=10
: True}, convert_unicode=True)
try:
e.connect()
- except tsa.exc.DBAPIError, de:
+ except tsa.exc.DBAPIError as de:
assert not de.connection_invalidated
def test_ensure_dialect_does_is_disconnect_no_conn(self):
@@ -266,7 +265,7 @@ pool_timeout=10
try:
create_engine('sqlite://', module=ThrowOnConnect()).connect()
assert False
- except tsa.exc.DBAPIError, de:
+ except tsa.exc.DBAPIError as de:
assert de.connection_invalidated
def test_urlattr(self):
diff --git a/test/engine/test_pool.py b/test/engine/test_pool.py
index 9551086f5..260f461d0 100644
--- a/test/engine/test_pool.py
+++ b/test/engine/test_pool.py
@@ -527,23 +527,23 @@ class DeprecatedPoolListenerTest(PoolTestBase):
self.assert_((item in innerself.checked_out) == in_cout)
self.assert_((item in innerself.checked_in) == in_cin)
def inst_connect(self, con, record):
- print "connect(%s, %s)" % (con, record)
+ print("connect(%s, %s)" % (con, record))
assert con is not None
assert record is not None
self.connected.append(con)
def inst_first_connect(self, con, record):
- print "first_connect(%s, %s)" % (con, record)
+ print("first_connect(%s, %s)" % (con, record))
assert con is not None
assert record is not None
self.first_connected.append(con)
def inst_checkout(self, con, record, proxy):
- print "checkout(%s, %s, %s)" % (con, record, proxy)
+ print("checkout(%s, %s, %s)" % (con, record, proxy))
assert con is not None
assert record is not None
assert proxy is not None
self.checked_out.append(con)
def inst_checkin(self, con, record):
- print "checkin(%s, %s)" % (con, record)
+ print("checkin(%s, %s)" % (con, record))
# con can be None if invalidated
assert record is not None
self.checked_in.append(con)
@@ -740,8 +740,8 @@ class QueuePoolTest(PoolTestBase):
def status(pool):
tup = pool.size(), pool.checkedin(), pool.overflow(), \
pool.checkedout()
- print 'Pool size: %d Connections in pool: %d Current '\
- 'Overflow: %d Current Checked out connections: %d' % tup
+ print('Pool size: %d Connections in pool: %d Current '\
+ 'Overflow: %d Current Checked out connections: %d' % tup)
return tup
c1 = p.connect()
@@ -814,7 +814,7 @@ class QueuePoolTest(PoolTestBase):
max_overflow=1, use_threadlocal=False, timeout=3)
timeouts = []
def checkout():
- for x in xrange(1):
+ for x in range(1):
now = time.time()
try:
c1 = p.connect()
@@ -825,7 +825,7 @@ class QueuePoolTest(PoolTestBase):
c1.close()
threads = []
- for i in xrange(10):
+ for i in range(10):
th = threading.Thread(target=checkout)
th.start()
threads.append(th)
@@ -862,7 +862,7 @@ class QueuePoolTest(PoolTestBase):
except tsa.exc.TimeoutError:
pass
threads = []
- for i in xrange(thread_count):
+ for i in range(thread_count):
th = threading.Thread(target=whammy)
th.start()
threads.append(th)
@@ -1009,8 +1009,8 @@ class QueuePoolTest(PoolTestBase):
strong_refs.add(c.connection)
return c
- for j in xrange(5):
- conns = [_conn() for i in xrange(4)]
+ for j in range(5):
+ conns = [_conn() for i in range(4)]
for c in conns:
c.close()
@@ -1154,7 +1154,7 @@ class SingletonThreadPoolTest(PoolTestBase):
return p.connect()
def checkout():
- for x in xrange(10):
+ for x in range(10):
c = _conn()
assert c
c.cursor()
@@ -1162,7 +1162,7 @@ class SingletonThreadPoolTest(PoolTestBase):
time.sleep(.1)
threads = []
- for i in xrange(10):
+ for i in range(10):
th = threading.Thread(target=checkout)
th.start()
threads.append(th)
diff --git a/test/engine/test_processors.py b/test/engine/test_processors.py
index bc9af7305..b1c482f09 100644
--- a/test/engine/test_processors.py
+++ b/test/engine/test_processors.py
@@ -53,7 +53,7 @@ class PyDateProcessorTest(_DateProcessorTest):
cls.module = type("util", (object,),
dict(
(k, staticmethod(v))
- for k, v in processors.py_fallback().items()
+ for k, v in list(processors.py_fallback().items())
)
)
@@ -156,7 +156,7 @@ class PyDistillArgsTest(_DistillArgsTest):
cls.module = type("util", (object,),
dict(
(k, staticmethod(v))
- for k, v in util.py_fallback().items()
+ for k, v in list(util.py_fallback().items())
)
)
diff --git a/test/engine/test_reconnect.py b/test/engine/test_reconnect.py
index 567647f20..ee3ff1459 100644
--- a/test/engine/test_reconnect.py
+++ b/test/engine/test_reconnect.py
@@ -7,7 +7,7 @@ import sqlalchemy as tsa
from sqlalchemy import testing
from sqlalchemy.testing import engines
from sqlalchemy.testing.util import gc_collect
-from sqlalchemy import exc
+from sqlalchemy import exc, util
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.engines import testing_engine
@@ -173,7 +173,7 @@ class MockReconnectTest(fixtures.TestBase):
try:
trans.commit()
assert False
- except tsa.exc.InvalidRequestError, e:
+ except tsa.exc.InvalidRequestError as e:
assert str(e) \
== "Can't reconnect until invalid transaction is "\
"rolled back"
@@ -370,7 +370,7 @@ class RealReconnectTest(fixtures.TestBase):
try:
conn.execute(select([1]))
assert False
- except tsa.exc.DBAPIError, e:
+ except tsa.exc.DBAPIError as e:
if not e.connection_invalidated:
raise
@@ -386,7 +386,7 @@ class RealReconnectTest(fixtures.TestBase):
try:
conn.execute(select([1]))
assert False
- except tsa.exc.DBAPIError, e:
+ except tsa.exc.DBAPIError as e:
if not e.connection_invalidated:
raise
assert conn.invalidated
@@ -407,7 +407,7 @@ class RealReconnectTest(fixtures.TestBase):
try:
c1.execute(select([1]))
assert False
- except tsa.exc.DBAPIError, e:
+ except tsa.exc.DBAPIError as e:
assert e.connection_invalidated
p2 = engine.pool
@@ -415,7 +415,7 @@ class RealReconnectTest(fixtures.TestBase):
try:
c2.execute(select([1]))
assert False
- except tsa.exc.DBAPIError, e:
+ except tsa.exc.DBAPIError as e:
assert e.connection_invalidated
# pool isn't replaced
@@ -464,6 +464,9 @@ class RealReconnectTest(fixtures.TestBase):
conn.invalidate()
conn.invalidate()
+ @testing.skip_if(
+ [lambda: util.py3k, "oracle+cx_oracle"],
+ "Crashes on py3k+cx_oracle")
def test_explode_in_initializer(self):
engine = engines.testing_engine()
def broken_initialize(connection):
@@ -503,7 +506,7 @@ class RealReconnectTest(fixtures.TestBase):
try:
conn.execute(select([1]))
assert False
- except tsa.exc.DBAPIError, e:
+ except tsa.exc.DBAPIError as e:
if not e.connection_invalidated:
raise
assert not conn.closed
@@ -523,7 +526,7 @@ class RealReconnectTest(fixtures.TestBase):
try:
conn.execute(select([1]))
assert False
- except tsa.exc.DBAPIError, e:
+ except tsa.exc.DBAPIError as e:
if not e.connection_invalidated:
raise
@@ -542,7 +545,7 @@ class RealReconnectTest(fixtures.TestBase):
try:
conn.execute(select([1]))
assert False
- except tsa.exc.DBAPIError, e:
+ except tsa.exc.DBAPIError as e:
if not e.connection_invalidated:
raise
assert not conn.closed
@@ -558,7 +561,7 @@ class RealReconnectTest(fixtures.TestBase):
try:
trans.commit()
assert False
- except tsa.exc.InvalidRequestError, e:
+ except tsa.exc.InvalidRequestError as e:
assert str(e) \
== "Can't reconnect until invalid transaction is "\
"rolled back"
@@ -627,13 +630,13 @@ class InvalidateDuringResultTest(fixtures.TestBase):
def test_invalidate_on_results(self):
conn = engine.connect()
result = conn.execute('select * from sometable')
- for x in xrange(20):
+ for x in range(20):
result.fetchone()
engine.test_shutdown()
try:
- print 'ghost result: %r' % result.fetchone()
+ print('ghost result: %r' % result.fetchone())
assert False
- except tsa.exc.DBAPIError, e:
+ except tsa.exc.DBAPIError as e:
if not e.connection_invalidated:
raise
assert conn.invalidated
diff --git a/test/engine/test_reflection.py b/test/engine/test_reflection.py
index a2f6b3796..a562ef73b 100644
--- a/test/engine/test_reflection.py
+++ b/test/engine/test_reflection.py
@@ -7,6 +7,8 @@ from sqlalchemy.testing import ComparesTables, \
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.testing import eq_, assert_raises, assert_raises_message
from sqlalchemy import testing
+from sqlalchemy.util import ue
+
metadata, users = None, None
@@ -808,7 +810,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
try:
m4.reflect(only=['rt_a', 'rt_f'])
self.assert_(False)
- except sa.exc.InvalidRequestError, e:
+ except sa.exc.InvalidRequestError as e:
self.assert_(e.args[0].endswith('(rt_f)'))
m5 = MetaData(testing.db)
@@ -830,7 +832,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
)
if existing:
- print "Other tables present in database, skipping some checks."
+ print("Other tables present in database, skipping some checks.")
else:
baseline.drop_all()
m9 = MetaData(testing.db)
@@ -1039,19 +1041,19 @@ class UnicodeReflectionTest(fixtures.TestBase):
cls.metadata = metadata = MetaData()
no_multibyte_period = set([
- (u'plain', u'col_plain', u'ix_plain')
+ ('plain', 'col_plain', 'ix_plain')
])
no_has_table = [
- (u'no_has_table_1', u'col_Unit\u00e9ble', u'ix_Unit\u00e9ble'),
- (u'no_has_table_2', u'col_\u6e2c\u8a66', u'ix_\u6e2c\u8a66'),
+ ('no_has_table_1', ue('col_Unit\u00e9ble'), ue('ix_Unit\u00e9ble')),
+ ('no_has_table_2', ue('col_\u6e2c\u8a66'), ue('ix_\u6e2c\u8a66')),
]
no_case_sensitivity = [
- (u'\u6e2c\u8a66', u'col_\u6e2c\u8a66', u'ix_\u6e2c\u8a66'),
- (u'unit\u00e9ble', u'col_unit\u00e9ble', u'ix_unit\u00e9ble'),
+ (ue('\u6e2c\u8a66'), ue('col_\u6e2c\u8a66'), ue('ix_\u6e2c\u8a66')),
+ (ue('unit\u00e9ble'), ue('col_unit\u00e9ble'), ue('ix_unit\u00e9ble')),
]
full = [
- (u'Unit\u00e9ble', u'col_Unit\u00e9ble', u'ix_Unit\u00e9ble'),
- (u'\u6e2c\u8a66', u'col_\u6e2c\u8a66', u'ix_\u6e2c\u8a66'),
+ (ue('Unit\u00e9ble'), ue('col_Unit\u00e9ble'), ue('ix_Unit\u00e9ble')),
+ (ue('\u6e2c\u8a66'), ue('col_\u6e2c\u8a66'), ue('ix_\u6e2c\u8a66')),
]
# as you can see, our options for this kind of thing
diff --git a/test/engine/test_transaction.py b/test/engine/test_transaction.py
index 5558ff778..2d7f39253 100644
--- a/test/engine/test_transaction.py
+++ b/test/engine/test_transaction.py
@@ -74,8 +74,8 @@ class TransactionTest(fixtures.TestBase):
connection.execute(users.insert(), user_id=1, user_name='user3')
transaction.commit()
assert False
- except Exception , e:
- print "Exception: ", e
+ except Exception as e:
+ print("Exception: ", e)
transaction.rollback()
result = connection.execute("select * from query_users")
@@ -121,10 +121,10 @@ class TransactionTest(fixtures.TestBase):
trans2.rollback()
raise
transaction.rollback()
- except Exception, e:
+ except Exception as e:
transaction.rollback()
raise
- except Exception, e:
+ except Exception as e:
try:
assert str(e) == 'uh oh' # and not "This transaction is
# inactive"
@@ -167,7 +167,7 @@ class TransactionTest(fixtures.TestBase):
connection.execute(users.insert(), user_id=2, user_name='user2')
try:
connection.execute(users.insert(), user_id=2, user_name='user2.5')
- except Exception, e:
+ except Exception as e:
trans.__exit__(*sys.exc_info())
assert not trans.is_active
@@ -1019,7 +1019,7 @@ class ForUpdateTest(fixtures.TestBase):
con = testing.db.connect()
sel = counters.select(for_update=update_style,
whereclause=counters.c.counter_id == 1)
- for i in xrange(count):
+ for i in range(count):
trans = con.begin()
try:
existing = con.execute(sel).first()
@@ -1033,7 +1033,7 @@ class ForUpdateTest(fixtures.TestBase):
raise AssertionError('Got %s post-update, expected '
'%s' % (readback['counter_value'], incr))
trans.commit()
- except Exception, e:
+ except Exception as e:
trans.rollback()
errors.append(e)
break
@@ -1057,7 +1057,7 @@ class ForUpdateTest(fixtures.TestBase):
db.execute(counters.insert(), counter_id=1, counter_value=0)
iterations, thread_count = 10, 5
threads, errors = [], []
- for i in xrange(thread_count):
+ for i in range(thread_count):
thrd = threading.Thread(target=self.increment,
args=(iterations, ),
kwargs={'errors': errors,
@@ -1088,7 +1088,7 @@ class ForUpdateTest(fixtures.TestBase):
rows = con.execute(sel).fetchall()
time.sleep(0.25)
trans.commit()
- except Exception, e:
+ except Exception as e:
trans.rollback()
errors.append(e)
con.close()
@@ -1105,7 +1105,7 @@ class ForUpdateTest(fixtures.TestBase):
db.execute(counters.insert(), counter_id=cid + 1,
counter_value=0)
errors, threads = [], []
- for i in xrange(thread_count):
+ for i in range(thread_count):
thrd = threading.Thread(target=self.overlap,
args=(groups.pop(0), errors,
update_style))
diff --git a/test/ext/declarative/test_basic.py b/test/ext/declarative/test_basic.py
index 0fe54a154..ad2970b70 100644
--- a/test/ext/declarative/test_basic.py
+++ b/test/ext/declarative/test_basic.py
@@ -13,7 +13,7 @@ from sqlalchemy.orm import relationship, create_session, class_mapper, \
deferred, column_property, composite,\
Session
from sqlalchemy.testing import eq_
-from sqlalchemy.util import classproperty
+from sqlalchemy.util import classproperty, with_metaclass
from sqlalchemy.ext.declarative import declared_attr, AbstractConcreteBase, \
ConcreteBase, synonym_for
from sqlalchemy.testing import fixtures
@@ -127,14 +127,13 @@ class DeclarativeTest(DeclarativeTestBase):
class BrokenMeta(type):
def __getattribute__(self, attr):
if attr == 'xyzzy':
- raise AttributeError, 'xyzzy'
+ raise AttributeError('xyzzy')
else:
return object.__getattribute__(self,attr)
# even though this class has an xyzzy attribute, getattr(cls,"xyzzy")
# fails
- class BrokenParent(object):
- __metaclass__ = BrokenMeta
+ class BrokenParent(with_metaclass(BrokenMeta)):
xyzzy = "magic"
# _as_declarative() inspects obj.__class__.__bases__
@@ -275,7 +274,7 @@ class DeclarativeTest(DeclarativeTestBase):
__tablename__ = 'foo'
id = Column(Integer, primary_key=True)
- _user_id = Column(Integer)
+ _user_id = Column(Integer)
rel = relationship('User',
uselist=False,
foreign_keys=[User.id],
@@ -1458,12 +1457,12 @@ def _produce_test(inline, stringbased):
@classmethod
def insert_data(cls):
- params = [dict(zip(('id', 'name'), column_values))
+ params = [dict(list(zip(('id', 'name'), column_values)))
for column_values in [(7, 'jack'), (8, 'ed'), (9,
'fred'), (10, 'chuck')]]
User.__table__.insert().execute(params)
- Address.__table__.insert().execute([dict(zip(('id',
- 'user_id', 'email'), column_values))
+ Address.__table__.insert().execute([dict(list(zip(('id',
+ 'user_id', 'email'), column_values)))
for column_values in [(1, 7, 'jack@bean.com'), (2,
8, 'ed@wood.com'), (3, 8, 'ed@bettyboop.com'), (4,
8, 'ed@lala.com'), (5, 9, 'fred@fred.com')]])
@@ -1492,6 +1491,6 @@ def _produce_test(inline, stringbased):
for inline in True, False:
for stringbased in True, False:
testclass = _produce_test(inline, stringbased)
- exec '%s = testclass' % testclass.__name__
+ exec('%s = testclass' % testclass.__name__)
del testclass
diff --git a/test/ext/declarative/test_clsregistry.py b/test/ext/declarative/test_clsregistry.py
index 1d09f158e..e78a1abbe 100644
--- a/test/ext/declarative/test_clsregistry.py
+++ b/test/ext/declarative/test_clsregistry.py
@@ -124,7 +124,7 @@ class ClsRegistryTest(fixtures.TestBase):
base = weakref.WeakValueDictionary()
- for i in xrange(3):
+ for i in range(3):
f1 = MockClass(base, "foo.bar.Foo")
f2 = MockClass(base, "foo.alt.Foo")
clsregistry.add_class("Foo", f1)
diff --git a/test/ext/declarative/test_inheritance.py b/test/ext/declarative/test_inheritance.py
index 1a3a4aba5..01bf3f3f6 100644
--- a/test/ext/declarative/test_inheritance.py
+++ b/test/ext/declarative/test_inheritance.py
@@ -573,7 +573,7 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
primary_language = Column(String(50))
assert Person.__table__.c.golf_swing is not None
- assert not Person.__table__.c.has_key('primary_language')
+ assert 'primary_language' not in Person.__table__.c
assert Engineer.__table__.c.primary_language is not None
assert Engineer.primary_language is not None
assert Manager.golf_swing is not None
diff --git a/test/ext/declarative/test_mixin.py b/test/ext/declarative/test_mixin.py
index fb674f27a..d8ec484e1 100644
--- a/test/ext/declarative/test_mixin.py
+++ b/test/ext/declarative/test_mixin.py
@@ -685,7 +685,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
__mapper_args__ = dict(polymorphic_identity='specific')
assert Specific.__table__ is Generic.__table__
- eq_(Generic.__table__.c.keys(), ['id', 'type', 'value'])
+ eq_(list(Generic.__table__.c.keys()), ['id', 'type', 'value'])
assert class_mapper(Specific).polymorphic_on \
is Generic.__table__.c.type
eq_(class_mapper(Specific).polymorphic_identity, 'specific')
@@ -714,9 +714,9 @@ class DeclarativeMixinTest(DeclarativeTestBase):
eq_(Generic.__table__.name, 'generic')
eq_(Specific.__table__.name, 'specific')
- eq_(Generic.__table__.c.keys(), ['timestamp', 'id',
+ eq_(list(Generic.__table__.c.keys()), ['timestamp', 'id',
'python_type'])
- eq_(Specific.__table__.c.keys(), ['id'])
+ eq_(list(Specific.__table__.c.keys()), ['id'])
eq_(Generic.__table__.kwargs, {'mysql_engine': 'InnoDB'})
eq_(Specific.__table__.kwargs, {'mysql_engine': 'InnoDB'})
@@ -749,12 +749,12 @@ class DeclarativeMixinTest(DeclarativeTestBase):
primary_key=True)
eq_(BaseType.__table__.name, 'basetype')
- eq_(BaseType.__table__.c.keys(), ['timestamp', 'type', 'id',
+ eq_(list(BaseType.__table__.c.keys()), ['timestamp', 'type', 'id',
'value'])
eq_(BaseType.__table__.kwargs, {'mysql_engine': 'InnoDB'})
assert Single.__table__ is BaseType.__table__
eq_(Joined.__table__.name, 'joined')
- eq_(Joined.__table__.c.keys(), ['id'])
+ eq_(list(Joined.__table__.c.keys()), ['id'])
eq_(Joined.__table__.kwargs, {'mysql_engine': 'InnoDB'})
def test_col_copy_vs_declared_attr_joined_propagation(self):
@@ -839,7 +839,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
__mapper_args__ = dict(polymorphic_identity='specific')
eq_(BaseType.__table__.name, 'basetype')
- eq_(BaseType.__table__.c.keys(), ['type', 'id', 'value'])
+ eq_(list(BaseType.__table__.c.keys()), ['type', 'id', 'value'])
assert Specific.__table__ is BaseType.__table__
assert class_mapper(Specific).polymorphic_on \
is BaseType.__table__.c.type
@@ -870,9 +870,9 @@ class DeclarativeMixinTest(DeclarativeTestBase):
primary_key=True)
eq_(BaseType.__table__.name, 'basetype')
- eq_(BaseType.__table__.c.keys(), ['type', 'id', 'value'])
+ eq_(list(BaseType.__table__.c.keys()), ['type', 'id', 'value'])
eq_(Specific.__table__.name, 'specific')
- eq_(Specific.__table__.c.keys(), ['id'])
+ eq_(list(Specific.__table__.c.keys()), ['id'])
def test_single_back_propagate(self):
@@ -891,7 +891,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
__mapper_args__ = dict(polymorphic_identity='specific')
- eq_(BaseType.__table__.c.keys(), ['type', 'id', 'timestamp'])
+ eq_(list(BaseType.__table__.c.keys()), ['type', 'id', 'timestamp'])
def test_table_in_model_and_same_column_in_mixin(self):
@@ -987,7 +987,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
id = Column(Integer, primary_key=True)
__tablename__ = 'model'
- eq_(Model.__table__.c.keys(), ['col1', 'col3', 'col2', 'col4',
+ eq_(list(Model.__table__.c.keys()), ['col1', 'col3', 'col2', 'col4',
'id'])
def test_honor_class_mro_one(self):
@@ -1078,8 +1078,8 @@ class DeclarativeMixinTest(DeclarativeTestBase):
filter_class = FilterB
id = Column(Integer(), primary_key=True)
- TypeA(filters=[u'foo'])
- TypeB(filters=[u'foo'])
+ TypeA(filters=['foo'])
+ TypeB(filters=['foo'])
class DeclarativeMixinPropertyTest(DeclarativeTestBase):
diff --git a/test/ext/test_associationproxy.py b/test/ext/test_associationproxy.py
index c15b98bf2..a5fcc45cc 100644
--- a/test/ext/test_associationproxy.py
+++ b/test/ext/test_associationproxy.py
@@ -288,7 +288,7 @@ class CustomDictTest(DictTest):
ch = Child('a', 'regular')
p1._children.append(ch)
- self.assert_(ch in p1._children.values())
+ self.assert_(ch in list(p1._children.values()))
self.assert_(len(p1._children) == 1)
self.assert_(p1.children)
@@ -300,7 +300,7 @@ class CustomDictTest(DictTest):
p1.children['b'] = 'proxied'
- self.assert_('proxied' in p1.children.values())
+ self.assert_('proxied' in list(p1.children.values()))
self.assert_('b' in p1.children)
self.assert_('proxied' not in p1._children)
self.assert_(len(p1.children) == 2)
@@ -526,9 +526,9 @@ class SetTest(_CollectionOperations):
try:
self.assert_(p.children == control)
except:
- print 'Test %s.%s(%s):' % (set(base), op, other)
- print 'want', repr(control)
- print 'got', repr(p.children)
+ print('Test %s.%s(%s):' % (set(base), op, other))
+ print('want', repr(control))
+ print('got', repr(p.children))
raise
p = self.roundtrip(p)
@@ -536,9 +536,9 @@ class SetTest(_CollectionOperations):
try:
self.assert_(p.children == control)
except:
- print 'Test %s.%s(%s):' % (base, op, other)
- print 'want', repr(control)
- print 'got', repr(p.children)
+ print('Test %s.%s(%s):' % (base, op, other))
+ print('want', repr(control))
+ print('got', repr(p.children))
raise
# in-place mutations
@@ -553,15 +553,15 @@ class SetTest(_CollectionOperations):
p.children = base[:]
control = set(base[:])
- exec "p.children %s other" % op
- exec "control %s other" % op
+ exec("p.children %s other" % op)
+ exec("control %s other" % op)
try:
self.assert_(p.children == control)
except:
- print 'Test %s %s %s:' % (set(base), op, other)
- print 'want', repr(control)
- print 'got', repr(p.children)
+ print('Test %s %s %s:' % (set(base), op, other))
+ print('want', repr(control))
+ print('got', repr(p.children))
raise
p = self.roundtrip(p)
@@ -569,9 +569,9 @@ class SetTest(_CollectionOperations):
try:
self.assert_(p.children == control)
except:
- print 'Test %s %s %s:' % (base, op, other)
- print 'want', repr(control)
- print 'got', repr(p.children)
+ print('Test %s %s %s:' % (base, op, other))
+ print('want', repr(control))
+ print('got', repr(p.children))
raise
diff --git a/test/ext/test_serializer.py b/test/ext/test_serializer.py
index 74e033b4f..84fff1304 100644
--- a/test/ext/test_serializer.py
+++ b/test/ext/test_serializer.py
@@ -47,12 +47,12 @@ class SerializeTest(fixtures.MappedTest):
@classmethod
def insert_data(cls):
- params = [dict(zip(('id', 'name'), column_values))
+ params = [dict(list(zip(('id', 'name'), column_values)))
for column_values in [(7, 'jack'), (8, 'ed'), (9,
'fred'), (10, 'chuck')]]
users.insert().execute(params)
- addresses.insert().execute([dict(zip(('id', 'user_id', 'email'
- ), column_values))
+ addresses.insert().execute([dict(list(zip(('id', 'user_id', 'email'
+ ), column_values)))
for column_values in [(1, 7,
'jack@bean.com'), (2, 8,
'ed@wood.com'), (3, 8,
@@ -85,8 +85,8 @@ class SerializeTest(fixtures.MappedTest):
users.metadata, None)
eq_(str(expr), str(re_expr))
assert re_expr.bind is testing.db
- eq_(re_expr.execute().fetchall(), [(7, u'jack'), (8, u'ed'),
- (8, u'ed'), (8, u'ed'), (9, u'fred')])
+ eq_(re_expr.execute().fetchall(), [(7, 'jack'), (8, 'ed'),
+ (8, 'ed'), (8, 'ed'), (9, 'fred')])
def test_query_one(self):
q = Session.query(User).\
@@ -114,7 +114,7 @@ class SerializeTest(fixtures.MappedTest):
Address(email='ed@lala.com'),
Address(email='ed@bettyboop.com')])
- @testing.skip_if(lambda: util.pypy, "problems with pypy pickle reported")
+ @testing.requires.non_broken_pickle
def test_query_two(self):
q = \
Session.query(User).join(User.addresses).\
@@ -122,9 +122,9 @@ class SerializeTest(fixtures.MappedTest):
q2 = serializer.loads(serializer.dumps(q, -1), users.metadata,
Session)
eq_(q2.all(), [User(name='fred')])
- eq_(list(q2.values(User.id, User.name)), [(9, u'fred')])
+ eq_(list(q2.values(User.id, User.name)), [(9, 'fred')])
- @testing.skip_if(lambda: util.pypy, "problems with pypy pickle reported")
+ @testing.requires.non_broken_pickle
def test_query_three(self):
ua = aliased(User)
q = \
@@ -136,9 +136,9 @@ class SerializeTest(fixtures.MappedTest):
# try to pull out the aliased entity here...
ua_2 = q2._entities[0].entity_zero.entity
- eq_(list(q2.values(ua_2.id, ua_2.name)), [(9, u'fred')])
+ eq_(list(q2.values(ua_2.id, ua_2.name)), [(9, 'fred')])
- @testing.skip_if(lambda: util.pypy, "problems with pypy pickle reported")
+ @testing.requires.non_broken_pickle
def test_orm_join(self):
from sqlalchemy.orm.util import join
@@ -165,8 +165,7 @@ class SerializeTest(fixtures.MappedTest):
eq_(list(q2.all()), [(u7, u8), (u7, u9), (u7, u10), (u8, u9),
(u8, u10)])
- @testing.skip_if(lambda: util.pypy, "pickle sometimes has "
- "problems here, sometimes not")
+ @testing.requires.non_broken_pickle
def test_any(self):
r = User.addresses.any(Address.email == 'x')
ser = serializer.dumps(r, -1)
diff --git a/test/orm/inheritance/test_assorted_poly.py b/test/orm/inheritance/test_assorted_poly.py
index e8e6ba82a..d05a22f39 100644
--- a/test/orm/inheritance/test_assorted_poly.py
+++ b/test/orm/inheritance/test_assorted_poly.py
@@ -18,7 +18,7 @@ from sqlalchemy.testing.schema import Table, Column
class AttrSettable(object):
def __init__(self, **kwargs):
- [setattr(self, k, v) for k, v in kwargs.iteritems()]
+ [setattr(self, k, v) for k, v in kwargs.items()]
def __repr__(self):
return self.__class__.__name__ + "(%s)" % (hex(id(self)))
@@ -386,7 +386,7 @@ class RelationshipTest4(fixtures.MappedTest):
# class definitions
class Person(object):
def __init__(self, **kwargs):
- for key, value in kwargs.iteritems():
+ for key, value in kwargs.items():
setattr(self, key, value)
def __repr__(self):
return "Ordinary person %s" % self.name
@@ -400,7 +400,7 @@ class RelationshipTest4(fixtures.MappedTest):
(self.name, self.longer_status)
class Car(object):
def __init__(self, **kwargs):
- for key, value in kwargs.iteritems():
+ for key, value in kwargs.items():
setattr(self, key, value)
def __repr__(self):
return "Car number %d" % self.car_id
@@ -512,7 +512,7 @@ class RelationshipTest5(fixtures.MappedTest):
class Person(object):
def __init__(self, **kwargs):
- for key, value in kwargs.iteritems():
+ for key, value in kwargs.items():
setattr(self, key, value)
def __repr__(self):
return "Ordinary person %s" % self.name
@@ -526,7 +526,7 @@ class RelationshipTest5(fixtures.MappedTest):
(self.name, self.longer_status)
class Car(object):
def __init__(self, **kwargs):
- for key, value in kwargs.iteritems():
+ for key, value in kwargs.items():
setattr(self, key, value)
def __repr__(self):
return "Car number %d" % self.car_id
@@ -646,7 +646,7 @@ class RelationshipTest7(fixtures.MappedTest):
class PersistentObject(object):
def __init__(self, **kwargs):
- for key, value in kwargs.iteritems():
+ for key, value in kwargs.items():
setattr(self, key, value)
class Status(PersistentObject):
@@ -838,7 +838,7 @@ class GenerativeTest(fixtures.TestBase, AssertsExecutionResults):
# class definitions
class PersistentObject(object):
def __init__(self, **kwargs):
- for key, value in kwargs.iteritems():
+ for key, value in kwargs.items():
setattr(self, key, value)
class Status(PersistentObject):
def __repr__(self):
@@ -968,7 +968,7 @@ class MultiLevelTest(fixtures.MappedTest):
def test_threelevels(self):
class Employee( object):
def set( me, **kargs):
- for k,v in kargs.iteritems(): setattr( me, k, v)
+ for k,v in kargs.items(): setattr( me, k, v)
return me
def __str__(me):
return str(me.__class__.__name__)+':'+str(me.name)
diff --git a/test/orm/inheritance/test_basic.py b/test/orm/inheritance/test_basic.py
index bbfa54383..afd63f2b4 100644
--- a/test/orm/inheritance/test_basic.py
+++ b/test/orm/inheritance/test_basic.py
@@ -1355,7 +1355,7 @@ class SyncCompileTest(fixtures.MappedTest):
def _do_test(self, j1, j2):
class A(object):
def __init__(self, **kwargs):
- for key, value in kwargs.items():
+ for key, value in list(kwargs.items()):
setattr(self, key, value)
class B(A):
@@ -1830,7 +1830,7 @@ class OptimizedLoadTest(fixtures.MappedTest):
"SELECT sub.counter AS sub_counter, base.counter AS base_counter, "
"sub.counter2 AS sub_counter2 FROM base JOIN sub ON "
"base.id = sub.id WHERE base.id = :param_1",
- lambda ctx:{u'param_1': s1.id}
+ lambda ctx:{'param_1': s1.id}
),
)
@@ -1910,7 +1910,7 @@ class OptimizedLoadTest(fixtures.MappedTest):
"SELECT subsub.counter2 AS subsub_counter2, "
"sub.counter2 AS sub_counter2 FROM subsub, sub "
"WHERE :param_1 = sub.id AND sub.id = subsub.id",
- lambda ctx:{u'param_1': s1.id}
+ lambda ctx:{'param_1': s1.id}
),
)
@@ -2409,9 +2409,9 @@ class NameConflictTest(fixtures.MappedTest):
polymorphic_identity='foo')
sess = create_session()
f = Foo()
- f.content_type = u'bar'
+ f.content_type = 'bar'
sess.add(f)
sess.flush()
f_id = f.id
sess.expunge_all()
- assert sess.query(Content).get(f_id).content_type == u'bar'
+ assert sess.query(Content).get(f_id).content_type == 'bar'
diff --git a/test/orm/inheritance/test_concrete.py b/test/orm/inheritance/test_concrete.py
index 40c3ab31b..573913f74 100644
--- a/test/orm/inheritance/test_concrete.py
+++ b/test/orm/inheritance/test_concrete.py
@@ -711,21 +711,21 @@ class ColKeysTest(fixtures.MappedTest):
refugees_table = Table('refugee', metadata, Column('refugee_fid'
, Integer, primary_key=True,
test_needs_autoincrement=True),
- Column('refugee_name', Unicode(30),
+ Column('refugee_name', String(30),
key='name'))
offices_table = Table('office', metadata, Column('office_fid',
Integer, primary_key=True,
test_needs_autoincrement=True),
- Column('office_name', Unicode(30),
+ Column('office_name', String(30),
key='name'))
@classmethod
def insert_data(cls):
refugees_table.insert().execute(dict(refugee_fid=1,
- name=u'refugee1'), dict(refugee_fid=2, name=u'refugee2'
+ name='refugee1'), dict(refugee_fid=2, name='refugee2'
))
offices_table.insert().execute(dict(office_fid=1,
- name=u'office1'), dict(office_fid=2, name=u'office2'))
+ name='office1'), dict(office_fid=2, name='office2'))
def test_keys(self):
pjoin = polymorphic_union({'refugee': refugees_table, 'office'
diff --git a/test/orm/inheritance/test_magazine.py b/test/orm/inheritance/test_magazine.py
index ae5aa8c8d..b459a01ae 100644
--- a/test/orm/inheritance/test_magazine.py
+++ b/test/orm/inheritance/test_magazine.py
@@ -8,7 +8,7 @@ from sqlalchemy.testing.schema import Table, Column
class BaseObject(object):
def __init__(self, *args, **kwargs):
- for key, value in kwargs.iteritems():
+ for key, value in kwargs.items():
setattr(self, key, value)
class Publication(BaseObject):
pass
@@ -200,15 +200,15 @@ def _generate_round_trip_test(use_unions=False, use_joins=False):
session.flush()
- print [x for x in session]
+ print([x for x in session])
session.expunge_all()
session.flush()
session.expunge_all()
p = session.query(Publication).filter(Publication.name=="Test").one()
- print p.issues[0].locations[0].magazine.pages
- print [page, page2, page3]
+ print(p.issues[0].locations[0].magazine.pages)
+ print([page, page2, page3])
assert repr(p.issues[0].locations[0].magazine.pages) == repr([page, page2, page3]), repr(p.issues[0].locations[0].magazine.pages)
test_roundtrip = function_named(
diff --git a/test/orm/inheritance/test_manytomany.py b/test/orm/inheritance/test_manytomany.py
index e4df5d133..31c4ba40a 100644
--- a/test/orm/inheritance/test_manytomany.py
+++ b/test/orm/inheritance/test_manytomany.py
@@ -42,7 +42,7 @@ class InheritTest(fixtures.MappedTest):
def testbasic(self):
class Principal(object):
def __init__(self, **kwargs):
- for key, value in kwargs.iteritems():
+ for key, value in kwargs.items():
setattr(self, key, value)
class User(Principal):
@@ -94,8 +94,8 @@ class InheritTest2(fixtures.MappedTest):
mapper(Foo, foo)
mapper(Bar, bar, inherits=Foo)
- print foo.join(bar).primary_key
- print class_mapper(Bar).primary_key
+ print(foo.join(bar).primary_key)
+ print(class_mapper(Bar).primary_key)
b = Bar('somedata')
sess = create_session()
sess.add(b)
@@ -133,8 +133,8 @@ class InheritTest2(fixtures.MappedTest):
sess.expunge_all()
l = sess.query(Bar).all()
- print l[0]
- print l[0].foos
+ print(l[0])
+ print(l[0].foos)
self.assert_unordered_result(l, Bar,
# {'id':1, 'data':'barfoo', 'bid':1, 'foos':(Foo, [{'id':2,'data':'subfoo1'}, {'id':3,'data':'subfoo2'}])},
{'id':b.id, 'data':'barfoo', 'foos':(Foo, [{'id':f1.id,'data':'subfoo1'}, {'id':f2.id,'data':'subfoo2'}])},
@@ -197,7 +197,7 @@ class InheritTest3(fixtures.MappedTest):
compare = [repr(b)] + sorted([repr(o) for o in b.foos])
sess.expunge_all()
l = sess.query(Bar).all()
- print repr(l[0]) + repr(l[0].foos)
+ print(repr(l[0]) + repr(l[0].foos))
found = [repr(l[0])] + sorted([repr(o) for o in l[0].foos])
eq_(found, compare)
@@ -239,11 +239,11 @@ class InheritTest3(fixtures.MappedTest):
sess.expunge_all()
l = sess.query(Blub).all()
- print l
+ print(l)
self.assert_(repr(l[0]) == compare)
sess.expunge_all()
x = sess.query(Blub).filter_by(id=blubid).one()
- print x
+ print(x)
self.assert_(repr(x) == compare)
diff --git a/test/orm/inheritance/test_polymorphic_rel.py b/test/orm/inheritance/test_polymorphic_rel.py
index 1b9acb787..8c1f22114 100644
--- a/test/orm/inheritance/test_polymorphic_rel.py
+++ b/test/orm/inheritance/test_polymorphic_rel.py
@@ -7,7 +7,7 @@ from sqlalchemy import exc as sa_exc
from sqlalchemy import testing
from sqlalchemy.testing import assert_raises, eq_
-from _poly_fixtures import Company, Person, Engineer, Manager, Boss, \
+from ._poly_fixtures import Company, Person, Engineer, Manager, Boss, \
Machine, Paperwork, _Polymorphic,\
_PolymorphicPolymorphic, _PolymorphicUnions, _PolymorphicJoins,\
_PolymorphicAliasedJoins
@@ -118,24 +118,24 @@ class _PolymorphicTestBase(object):
eq_(q.count(), 1)
eq_(q.all(), [
(
- Company(company_id=1, name=u'MegaCorp, Inc.'),
+ Company(company_id=1, name='MegaCorp, Inc.'),
Engineer(
- status=u'regular engineer',
- engineer_name=u'dilbert',
- name=u'dilbert',
+ status='regular engineer',
+ engineer_name='dilbert',
+ name='dilbert',
company_id=1,
- primary_language=u'java',
+ primary_language='java',
person_id=1,
- type=u'engineer'),
- Company(company_id=1, name=u'MegaCorp, Inc.'),
+ type='engineer'),
+ Company(company_id=1, name='MegaCorp, Inc.'),
Engineer(
- status=u'regular engineer',
- engineer_name=u'wally',
- name=u'wally',
+ status='regular engineer',
+ engineer_name='wally',
+ name='wally',
company_id=1,
- primary_language=u'c++',
+ primary_language='c++',
person_id=2,
- type=u'engineer')
+ type='engineer')
)
])
@@ -337,8 +337,8 @@ class _PolymorphicTestBase(object):
sess = create_session()
expected = [
- (u'dogbert',),
- (u'pointy haired boss',)]
+ ('dogbert',),
+ ('pointy haired boss',)]
eq_(sess.query(Manager.name)
.order_by(Manager.name).all(),
expected)
@@ -346,9 +346,9 @@ class _PolymorphicTestBase(object):
def test_join_from_columns_or_subclass_two(self):
sess = create_session()
expected = [
- (u'dogbert',),
- (u'dogbert',),
- (u'pointy haired boss',)]
+ ('dogbert',),
+ ('dogbert',),
+ ('pointy haired boss',)]
eq_(sess.query(Manager.name)
.join(Paperwork, Manager.paperwork)
.order_by(Manager.name).all(),
@@ -357,14 +357,14 @@ class _PolymorphicTestBase(object):
def test_join_from_columns_or_subclass_three(self):
sess = create_session()
expected = [
- (u'dilbert',),
- (u'dilbert',),
- (u'dogbert',),
- (u'dogbert',),
- (u'pointy haired boss',),
- (u'vlad',),
- (u'wally',),
- (u'wally',)]
+ ('dilbert',),
+ ('dilbert',),
+ ('dogbert',),
+ ('dogbert',),
+ ('pointy haired boss',),
+ ('vlad',),
+ ('wally',),
+ ('wally',)]
eq_(sess.query(Person.name)
.join(Paperwork, Person.paperwork)
.order_by(Person.name).all(),
@@ -375,14 +375,14 @@ class _PolymorphicTestBase(object):
# Load Person.name, joining from Person -> paperwork, get all
# the people.
expected = [
- (u'dilbert',),
- (u'dilbert',),
- (u'dogbert',),
- (u'dogbert',),
- (u'pointy haired boss',),
- (u'vlad',),
- (u'wally',),
- (u'wally',)]
+ ('dilbert',),
+ ('dilbert',),
+ ('dogbert',),
+ ('dogbert',),
+ ('pointy haired boss',),
+ ('vlad',),
+ ('wally',),
+ ('wally',)]
eq_(sess.query(Person.name)
.join(paperwork,
Person.person_id == paperwork.c.person_id)
@@ -393,9 +393,9 @@ class _PolymorphicTestBase(object):
sess = create_session()
# same, on manager. get only managers.
expected = [
- (u'dogbert',),
- (u'dogbert',),
- (u'pointy haired boss',)]
+ ('dogbert',),
+ ('dogbert',),
+ ('pointy haired boss',)]
eq_(sess.query(Manager.name)
.join(paperwork,
Manager.person_id == paperwork.c.person_id)
@@ -418,14 +418,14 @@ class _PolymorphicTestBase(object):
# here, it joins to the full result set. This is 0.6's
# behavior and is more or less wrong.
expected = [
- (u'dilbert',),
- (u'dilbert',),
- (u'dogbert',),
- (u'dogbert',),
- (u'pointy haired boss',),
- (u'vlad',),
- (u'wally',),
- (u'wally',)]
+ ('dilbert',),
+ ('dilbert',),
+ ('dogbert',),
+ ('dogbert',),
+ ('pointy haired boss',),
+ ('vlad',),
+ ('wally',),
+ ('wally',)]
eq_(sess.query(Person.name)
.join(paperwork,
Manager.person_id == paperwork.c.person_id)
@@ -435,9 +435,9 @@ class _PolymorphicTestBase(object):
# when a join is present and managers.person_id is available,
# you get the managers.
expected = [
- (u'dogbert',),
- (u'dogbert',),
- (u'pointy haired boss',)]
+ ('dogbert',),
+ ('dogbert',),
+ ('pointy haired boss',)]
eq_(sess.query(Person.name)
.join(paperwork,
Manager.person_id == paperwork.c.person_id)
@@ -454,9 +454,9 @@ class _PolymorphicTestBase(object):
def test_join_from_columns_or_subclass_eight(self):
sess = create_session()
expected = [
- (u'dogbert',),
- (u'dogbert',),
- (u'pointy haired boss',)]
+ ('dogbert',),
+ ('dogbert',),
+ ('pointy haired boss',)]
eq_(sess.query(Manager.name)
.join(paperwork,
Manager.person_id == paperwork.c.person_id)
@@ -474,9 +474,9 @@ class _PolymorphicTestBase(object):
def test_join_from_columns_or_subclass_ten(self):
sess = create_session()
expected = [
- (u'pointy haired boss', u'review #1'),
- (u'dogbert', u'review #2'),
- (u'dogbert', u'review #3')]
+ ('pointy haired boss', 'review #1'),
+ ('dogbert', 'review #2'),
+ ('dogbert', 'review #3')]
eq_(sess.query(Manager.name, Paperwork.description)
.join(Paperwork,
Manager.person_id == Paperwork.person_id)
@@ -486,9 +486,9 @@ class _PolymorphicTestBase(object):
def test_join_from_columns_or_subclass_eleven(self):
sess = create_session()
expected = [
- (u'pointy haired boss',),
- (u'dogbert',),
- (u'dogbert',)]
+ ('pointy haired boss',),
+ ('dogbert',),
+ ('dogbert',)]
malias = aliased(Manager)
eq_(sess.query(malias.name)
.join(paperwork,
@@ -977,32 +977,32 @@ class _PolymorphicTestBase(object):
expected = [
(Engineer(
- status=u'regular engineer',
- engineer_name=u'dilbert',
- name=u'dilbert',
+ status='regular engineer',
+ engineer_name='dilbert',
+ name='dilbert',
company_id=1,
- primary_language=u'java',
+ primary_language='java',
person_id=1,
- type=u'engineer'),
- u'MegaCorp, Inc.'),
+ type='engineer'),
+ 'MegaCorp, Inc.'),
(Engineer(
- status=u'regular engineer',
- engineer_name=u'wally',
- name=u'wally',
+ status='regular engineer',
+ engineer_name='wally',
+ name='wally',
company_id=1,
- primary_language=u'c++',
+ primary_language='c++',
person_id=2,
- type=u'engineer'),
- u'MegaCorp, Inc.'),
+ type='engineer'),
+ 'MegaCorp, Inc.'),
(Engineer(
- status=u'elbonian engineer',
- engineer_name=u'vlad',
- name=u'vlad',
+ status='elbonian engineer',
+ engineer_name='vlad',
+ name='vlad',
company_id=2,
- primary_language=u'cobol',
+ primary_language='cobol',
person_id=5,
- type=u'engineer'),
- u'Elbonia, Inc.')]
+ type='engineer'),
+ 'Elbonia, Inc.')]
eq_(sess.query(Engineer, Company.name)
.join(Company.employees)
.filter(Person.type == 'engineer').all(),
@@ -1011,9 +1011,9 @@ class _PolymorphicTestBase(object):
def test_mixed_entities_two(self):
sess = create_session()
expected = [
- (u'java', u'MegaCorp, Inc.'),
- (u'cobol', u'Elbonia, Inc.'),
- (u'c++', u'MegaCorp, Inc.')]
+ ('java', 'MegaCorp, Inc.'),
+ ('cobol', 'Elbonia, Inc.'),
+ ('c++', 'MegaCorp, Inc.')]
eq_(sess.query(Engineer.primary_language, Company.name)
.join(Company.employees)
.filter(Person.type == 'engineer')
@@ -1025,19 +1025,19 @@ class _PolymorphicTestBase(object):
palias = aliased(Person)
expected = [(
Engineer(
- status=u'elbonian engineer',
- engineer_name=u'vlad',
- name=u'vlad',
- primary_language=u'cobol'),
- u'Elbonia, Inc.',
+ status='elbonian engineer',
+ engineer_name='vlad',
+ name='vlad',
+ primary_language='cobol'),
+ 'Elbonia, Inc.',
Engineer(
- status=u'regular engineer',
- engineer_name=u'dilbert',
- name=u'dilbert',
+ status='regular engineer',
+ engineer_name='dilbert',
+ name='dilbert',
company_id=1,
- primary_language=u'java',
+ primary_language='java',
person_id=1,
- type=u'engineer'))]
+ type='engineer'))]
eq_(sess.query(Person, Company.name, palias)
.join(Company.employees)
.filter(Company.name == 'Elbonia, Inc.')
@@ -1049,19 +1049,19 @@ class _PolymorphicTestBase(object):
palias = aliased(Person)
expected = [(
Engineer(
- status=u'regular engineer',
- engineer_name=u'dilbert',
- name=u'dilbert',
+ status='regular engineer',
+ engineer_name='dilbert',
+ name='dilbert',
company_id=1,
- primary_language=u'java',
+ primary_language='java',
person_id=1,
- type=u'engineer'),
- u'Elbonia, Inc.',
+ type='engineer'),
+ 'Elbonia, Inc.',
Engineer(
- status=u'elbonian engineer',
- engineer_name=u'vlad',
- name=u'vlad',
- primary_language=u'cobol'),)]
+ status='elbonian engineer',
+ engineer_name='vlad',
+ name='vlad',
+ primary_language='cobol'),)]
eq_(sess.query(palias, Company.name, Person)
.join(Company.employees)
.filter(Company.name == 'Elbonia, Inc.')
@@ -1071,7 +1071,7 @@ class _PolymorphicTestBase(object):
def test_mixed_entities_five(self):
sess = create_session()
palias = aliased(Person)
- expected = [(u'vlad', u'Elbonia, Inc.', u'dilbert')]
+ expected = [('vlad', 'Elbonia, Inc.', 'dilbert')]
eq_(sess.query(Person.name, Company.name, palias.name)
.join(Company.employees)
.filter(Company.name == 'Elbonia, Inc.')
@@ -1082,9 +1082,9 @@ class _PolymorphicTestBase(object):
sess = create_session()
palias = aliased(Person)
expected = [
- (u'manager', u'dogbert', u'engineer', u'dilbert'),
- (u'manager', u'dogbert', u'engineer', u'wally'),
- (u'manager', u'dogbert', u'boss', u'pointy haired boss')]
+ ('manager', 'dogbert', 'engineer', 'dilbert'),
+ ('manager', 'dogbert', 'engineer', 'wally'),
+ ('manager', 'dogbert', 'boss', 'pointy haired boss')]
eq_(sess.query(Person.type, Person.name, palias.type, palias.name)
.filter(Person.company_id == palias.company_id)
.filter(Person.name == 'dogbert')
@@ -1095,14 +1095,14 @@ class _PolymorphicTestBase(object):
def test_mixed_entities_seven(self):
sess = create_session()
expected = [
- (u'dilbert', u'tps report #1'),
- (u'dilbert', u'tps report #2'),
- (u'dogbert', u'review #2'),
- (u'dogbert', u'review #3'),
- (u'pointy haired boss', u'review #1'),
- (u'vlad', u'elbonian missive #3'),
- (u'wally', u'tps report #3'),
- (u'wally', u'tps report #4')]
+ ('dilbert', 'tps report #1'),
+ ('dilbert', 'tps report #2'),
+ ('dogbert', 'review #2'),
+ ('dogbert', 'review #3'),
+ ('pointy haired boss', 'review #1'),
+ ('vlad', 'elbonian missive #3'),
+ ('wally', 'tps report #3'),
+ ('wally', 'tps report #4')]
eq_(sess.query(Person.name, Paperwork.description)
.filter(Person.person_id == Paperwork.person_id)
.order_by(Person.name, Paperwork.description).all(),
@@ -1116,7 +1116,7 @@ class _PolymorphicTestBase(object):
def test_mixed_entities_nine(self):
sess = create_session()
- expected = [(u'Elbonia, Inc.', 1), (u'MegaCorp, Inc.', 4)]
+ expected = [('Elbonia, Inc.', 1), ('MegaCorp, Inc.', 4)]
eq_(sess.query(Company.name, func.count(Person.person_id))
.filter(Company.company_id == Person.company_id)
.group_by(Company.name)
@@ -1125,7 +1125,7 @@ class _PolymorphicTestBase(object):
def test_mixed_entities_ten(self):
sess = create_session()
- expected = [(u'Elbonia, Inc.', 1), (u'MegaCorp, Inc.', 4)]
+ expected = [('Elbonia, Inc.', 1), ('MegaCorp, Inc.', 4)]
eq_(sess.query(Company.name, func.count(Person.person_id))
.join(Company.employees)
.group_by(Company.name)
@@ -1153,14 +1153,14 @@ class _PolymorphicTestBase(object):
def test_mixed_entities_eleven(self):
sess = create_session()
- expected = [(u'java',), (u'c++',), (u'cobol',)]
+ expected = [('java',), ('c++',), ('cobol',)]
eq_(sess.query(Engineer.primary_language)
.filter(Person.type == 'engineer').all(),
expected)
def test_mixed_entities_twelve(self):
sess = create_session()
- expected = [(u'vlad', u'Elbonia, Inc.')]
+ expected = [('vlad', 'Elbonia, Inc.')]
eq_(sess.query(Person.name, Company.name)
.join(Company.employees)
.filter(Company.name == 'Elbonia, Inc.').all(),
@@ -1168,15 +1168,15 @@ class _PolymorphicTestBase(object):
def test_mixed_entities_thirteen(self):
sess = create_session()
- expected = [(u'pointy haired boss', u'fore')]
+ expected = [('pointy haired boss', 'fore')]
eq_(sess.query(Boss.name, Boss.golf_swing).all(), expected)
def test_mixed_entities_fourteen(self):
sess = create_session()
expected = [
- (u'dilbert', u'java'),
- (u'wally', u'c++'),
- (u'vlad', u'cobol')]
+ ('dilbert', 'java'),
+ ('wally', 'c++'),
+ ('vlad', 'cobol')]
eq_(sess.query(Engineer.name, Engineer.primary_language).all(),
expected)
@@ -1184,12 +1184,12 @@ class _PolymorphicTestBase(object):
sess = create_session()
expected = [(
- u'Elbonia, Inc.',
+ 'Elbonia, Inc.',
Engineer(
- status=u'elbonian engineer',
- engineer_name=u'vlad',
- name=u'vlad',
- primary_language=u'cobol'))]
+ status='elbonian engineer',
+ engineer_name='vlad',
+ name='vlad',
+ primary_language='cobol'))]
eq_(sess.query(Company.name, Person)
.join(Company.employees)
.filter(Company.name == 'Elbonia, Inc.').all(),
@@ -1199,11 +1199,11 @@ class _PolymorphicTestBase(object):
sess = create_session()
expected = [(
Engineer(
- status=u'elbonian engineer',
- engineer_name=u'vlad',
- name=u'vlad',
- primary_language=u'cobol'),
- u'Elbonia, Inc.')]
+ status='elbonian engineer',
+ engineer_name='vlad',
+ name='vlad',
+ primary_language='cobol'),
+ 'Elbonia, Inc.')]
eq_(sess.query(Person, Company.name)
.join(Company.employees)
.filter(Company.name == 'Elbonia, Inc.').all(),
diff --git a/test/orm/inheritance/test_productspec.py b/test/orm/inheritance/test_productspec.py
index c1e99e338..3de5e3695 100644
--- a/test/orm/inheritance/test_productspec.py
+++ b/test/orm/inheritance/test_productspec.py
@@ -122,8 +122,8 @@ class InheritTest(fixtures.MappedTest):
a1 = session.query(Product).filter_by(name='a1').one()
new = repr(a1)
- print orig
- print new
+ print(orig)
+ print(new)
assert orig == new == '<Assembly a1> specification=[<SpecLine 1.0 <Product p1>>, <SpecLine 1.0 <Detail d1>>] documents=None'
def testtwo(self):
@@ -153,8 +153,8 @@ class InheritTest(fixtures.MappedTest):
session.flush()
session.expunge_all()
new = repr(session.query(SpecLine).all())
- print orig
- print new
+ print(orig)
+ print(new)
assert orig == new == '[<SpecLine 1.0 <Product p1>>, <SpecLine 1.0 <Detail d1>>]'
def testthree(self):
@@ -206,8 +206,8 @@ class InheritTest(fixtures.MappedTest):
a1 = session.query(Product).filter_by(name='a1').one()
new = repr(a1)
- print orig
- print new
+ print(orig)
+ print(new)
assert orig == new == '<Assembly a1> specification=[<SpecLine 1.0 <Detail d1>>] documents=[<Document doc1>, <RasterDocument doc2>]'
def testfour(self):
@@ -245,8 +245,8 @@ class InheritTest(fixtures.MappedTest):
a1 = session.query(Product).filter_by(name='a1').one()
new = repr(a1)
- print orig
- print new
+ print(orig)
+ print(new)
assert orig == new == '<Assembly a1> specification=None documents=[<RasterDocument doc2>]'
del a1.documents[0]
@@ -312,7 +312,7 @@ class InheritTest(fixtures.MappedTest):
a1 = session.query(Product).filter_by(name='a1').one()
new = repr(a1)
- print orig
- print new
+ print(orig)
+ print(new)
assert orig == new == '<Assembly a1> specification=[<SpecLine 1.0 <Detail d1>>] documents=[<Document doc1>, <RasterDocument doc2>]'
diff --git a/test/orm/inheritance/test_relationship.py b/test/orm/inheritance/test_relationship.py
index 36dbb7d27..809884f52 100644
--- a/test/orm/inheritance/test_relationship.py
+++ b/test/orm/inheritance/test_relationship.py
@@ -287,7 +287,7 @@ class SelfReferentialJ2JSelfTest(fixtures.MappedTest):
def _five_obj_fixture(self):
sess = Session()
e1, e2, e3, e4, e5 = [
- Engineer(name='e%d' % (i + 1)) for i in xrange(5)
+ Engineer(name='e%d' % (i + 1)) for i in range(5)
]
e3.reports_to = e1
e4.reports_to = e2
diff --git a/test/orm/inheritance/test_with_poly.py b/test/orm/inheritance/test_with_poly.py
index 93cccee7a..87251a4c2 100644
--- a/test/orm/inheritance/test_with_poly.py
+++ b/test/orm/inheritance/test_with_poly.py
@@ -11,7 +11,7 @@ from sqlalchemy import testing
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.testing import assert_raises, eq_
-from _poly_fixtures import Company, Person, Engineer, Manager, Boss, \
+from ._poly_fixtures import Company, Person, Engineer, Manager, Boss, \
Machine, Paperwork, _PolymorphicFixtureBase, _Polymorphic,\
_PolymorphicPolymorphic, _PolymorphicUnions, _PolymorphicJoins,\
_PolymorphicAliasedJoins
@@ -37,8 +37,8 @@ class _WithPolymorphicBase(_PolymorphicFixtureBase):
pa.Manager.manager_name=='dogbert')).\
order_by(pa.Engineer.type).all(),
[
- (u'dilbert', u'java', None),
- (u'dogbert', None, u'dogbert'),
+ ('dilbert', 'java', None),
+ ('dogbert', None, 'dogbert'),
]
)
@@ -63,10 +63,10 @@ class _WithPolymorphicBase(_PolymorphicFixtureBase):
)
).order_by(pa.name, pa_alias.name)],
[
- (u'dilbert', Engineer, u'dilbert', Engineer),
- (u'dogbert', Manager, u'pointy haired boss', Boss),
- (u'vlad', Engineer, u'vlad', Engineer),
- (u'wally', Engineer, u'wally', Engineer)
+ ('dilbert', Engineer, 'dilbert', Engineer),
+ ('dogbert', Manager, 'pointy haired boss', Boss),
+ ('vlad', Engineer, 'vlad', Engineer),
+ ('wally', Engineer, 'wally', Engineer)
]
)
@@ -91,10 +91,10 @@ class _WithPolymorphicBase(_PolymorphicFixtureBase):
)
).order_by(pa.name, pa_alias.name)],
[
- (u'dilbert', u'java', u'dilbert', u'java'),
- (u'dogbert', None, u'pointy haired boss', None),
- (u'vlad', u'cobol', u'vlad', u'cobol'),
- (u'wally', u'c++', u'wally', u'c++')
+ ('dilbert', 'java', 'dilbert', 'java'),
+ ('dogbert', None, 'pointy haired boss', None),
+ ('vlad', 'cobol', 'vlad', 'cobol'),
+ ('wally', 'c++', 'wally', 'c++')
]
)
diff --git a/test/orm/test_assorted_eager.py b/test/orm/test_assorted_eager.py
index a1c96bdfa..83fccbf7a 100644
--- a/test/orm/test_assorted_eager.py
+++ b/test/orm/test_assorted_eager.py
@@ -129,7 +129,7 @@ class EagerTest(fixtures.MappedTest):
# 3 "
# not orm style correct query
- print "Obtaining correct results without orm"
+ print("Obtaining correct results without orm")
result = sa.select(
[tests.c.id,categories.c.name],
sa.and_(tests.c.owner_id == 1,
@@ -140,7 +140,7 @@ class EagerTest(fixtures.MappedTest):
tests.c.id == options.c.test_id,
tests.c.owner_id == options.c.owner_id))]
).execute().fetchall()
- eq_(result, [(1, u'Some Category'), (3, u'Some Category')])
+ eq_(result, [(1, 'Some Category'), (3, 'Some Category')])
def test_withoutjoinedload(self):
Thing, tests, options = (self.classes.Thing,
@@ -158,7 +158,7 @@ class EagerTest(fixtures.MappedTest):
options.c.someoption==False))))
result = ["%d %s" % ( t.id,t.category.name ) for t in l]
- eq_(result, [u'1 Some Category', u'3 Some Category'])
+ eq_(result, ['1 Some Category', '3 Some Category'])
def test_withjoinedload(self):
"""
@@ -185,7 +185,7 @@ class EagerTest(fixtures.MappedTest):
options.c.someoption==False))))
result = ["%d %s" % ( t.id,t.category.name ) for t in l]
- eq_(result, [u'1 Some Category', u'3 Some Category'])
+ eq_(result, ['1 Some Category', '3 Some Category'])
def test_dslish(self):
"""test the same as withjoinedload except using generative"""
@@ -203,7 +203,7 @@ class EagerTest(fixtures.MappedTest):
).outerjoin('owner_option')
result = ["%d %s" % ( t.id,t.category.name ) for t in l]
- eq_(result, [u'1 Some Category', u'3 Some Category'])
+ eq_(result, ['1 Some Category', '3 Some Category'])
@testing.crashes('sybase', 'FIXME: unknown, verify not fails_on')
def test_without_outerjoin_literal(self):
@@ -219,7 +219,7 @@ class EagerTest(fixtures.MappedTest):
join('owner_option'))
result = ["%d %s" % ( t.id,t.category.name ) for t in l]
- eq_(result, [u'3 Some Category'])
+ eq_(result, ['3 Some Category'])
def test_withoutouterjoin(self):
Thing, tests, options = (self.classes.Thing,
@@ -234,7 +234,7 @@ class EagerTest(fixtures.MappedTest):
).join('owner_option')
result = ["%d %s" % ( t.id,t.category.name ) for t in l]
- eq_(result, [u'3 Some Category'])
+ eq_(result, ['3 Some Category'])
class EagerTest2(fixtures.MappedTest):
@@ -723,13 +723,13 @@ class EagerTest8(fixtures.MappedTest):
Table('prj', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('created', sa.DateTime ),
- Column('title', sa.Unicode(100)))
+ Column('title', sa.String(100)))
Table('task', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('status_id', Integer,
ForeignKey('task_status.id'), nullable=False),
- Column('title', sa.Unicode(100)),
+ Column('title', sa.String(100)),
Column('task_type_id', Integer ,
ForeignKey('task_type.id'), nullable=False),
Column('prj_id', Integer , ForeignKey('prj.id'), nullable=False))
@@ -748,8 +748,8 @@ class EagerTest8(fixtures.MappedTest):
Table('msg_type', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', sa.Unicode(20)),
- Column('display_name', sa.Unicode(20)))
+ Column('name', sa.String(20)),
+ Column('display_name', sa.String(20)))
@classmethod
def fixtures(cls):
@@ -764,7 +764,7 @@ class EagerTest8(fixtures.MappedTest):
(1,),),
task=(('title', 'task_type_id', 'status_id', 'prj_id'),
- (u'task 1', 1, 1, 1)))
+ ('task 1', 1, 1, 1)))
@classmethod
def setup_classes(cls):
@@ -804,7 +804,7 @@ class EagerTest8(fixtures.MappedTest):
session = create_session()
eq_(session.query(Joined).limit(10).offset(0).one(),
- Joined(id=1, title=u'task 1', props_cnt=0))
+ Joined(id=1, title='task 1', props_cnt=0))
class EagerTest9(fixtures.MappedTest):
diff --git a/test/orm/test_attributes.py b/test/orm/test_attributes.py
index d60c55edd..2b5d14695 100644
--- a/test/orm/test_attributes.py
+++ b/test/orm/test_attributes.py
@@ -838,7 +838,7 @@ class AttributesTest(fixtures.ORMTest):
attributes.register_attribute(Foo, 'collection',
uselist=True, typecallable=dict, useobject=True)
assert False
- except sa_exc.ArgumentError, e:
+ except sa_exc.ArgumentError as e:
assert str(e) \
== 'Type InstrumentedDict must elect an appender '\
'method to be a collection class'
@@ -865,7 +865,7 @@ class AttributesTest(fixtures.ORMTest):
attributes.register_attribute(Foo, 'collection',
uselist=True, typecallable=MyColl, useobject=True)
assert False
- except sa_exc.ArgumentError, e:
+ except sa_exc.ArgumentError as e:
assert str(e) \
== 'Type MyColl must elect an appender method to be a '\
'collection class'
@@ -889,7 +889,7 @@ class AttributesTest(fixtures.ORMTest):
try:
Foo().collection
assert True
- except sa_exc.ArgumentError, e:
+ except sa_exc.ArgumentError as e:
assert False
class GetNoValueTest(fixtures.ORMTest):
@@ -1410,7 +1410,7 @@ class HistoryTest(fixtures.TestBase):
class Foo(fixtures.BasicEntity):
pass
class Bar(fixtures.BasicEntity):
- def __nonzero__(self):
+ def __bool__(self):
assert False
instrumentation.register_class(Foo)
diff --git a/test/orm/test_collection.py b/test/orm/test_collection.py
index f6cf51037..c9f9f6951 100644
--- a/test/orm/test_collection.py
+++ b/test/orm/test_collection.py
@@ -1106,7 +1106,7 @@ class CollectionsTest(fixtures.ORMTest):
@collection.converter
def _convert(self, dictlike):
- for key, value in dictlike.iteritems():
+ for key, value in dictlike.items():
yield value + 5
class Foo(object):
@@ -1145,12 +1145,12 @@ class CollectionsTest(fixtures.ORMTest):
def __delitem__(self, key):
del self.data[key]
def values(self):
- return self.data.values()
+ return list(self.data.values())
def __contains__(self, key):
return key in self.data
@collection.iterator
def itervalues(self):
- return self.data.itervalues()
+ return iter(self.data.values())
__hash__ = object.__hash__
def __eq__(self, other):
return self.data == other
@@ -1158,7 +1158,7 @@ class CollectionsTest(fixtures.ORMTest):
return 'DictLike(%s)' % repr(self.data)
self._test_adapter(DictLike, self.dictable_entity,
- to_set=lambda c: set(c.itervalues()))
+ to_set=lambda c: set(c.values()))
self._test_dict(DictLike)
self._test_dict_bulk(DictLike)
self.assert_(getattr(DictLike, '_sa_instrumented') == id(DictLike))
@@ -1185,12 +1185,12 @@ class CollectionsTest(fixtures.ORMTest):
def __delitem__(self, key):
del self.data[key]
def values(self):
- return self.data.values()
+ return list(self.data.values())
def __contains__(self, key):
return key in self.data
@collection.iterator
def itervalues(self):
- return self.data.itervalues()
+ return iter(self.data.values())
__hash__ = object.__hash__
def __eq__(self, other):
return self.data == other
@@ -1198,7 +1198,7 @@ class CollectionsTest(fixtures.ORMTest):
return 'DictIsh(%s)' % repr(self.data)
self._test_adapter(DictIsh, self.dictable_entity,
- to_set=lambda c: set(c.itervalues()))
+ to_set=lambda c: set(c.values()))
self._test_dict(DictIsh)
self._test_dict_bulk(DictIsh)
self.assert_(getattr(DictIsh, '_sa_instrumented') == id(DictIsh))
@@ -1859,7 +1859,7 @@ class CustomCollectionsTest(fixtures.MappedTest):
f = sess.query(Foo).get(f.col1)
assert len(list(f.bars)) == 2
- existing = set([id(b) for b in f.bars.values()])
+ existing = set([id(b) for b in list(f.bars.values())])
col = collections.collection_adapter(f.bars)
col.append_with_event(Bar('b'))
@@ -1869,7 +1869,7 @@ class CustomCollectionsTest(fixtures.MappedTest):
f = sess.query(Foo).get(f.col1)
assert len(list(f.bars)) == 2
- replaced = set([id(b) for b in f.bars.values()])
+ replaced = set([id(b) for b in list(f.bars.values())])
self.assert_(existing != replaced)
def test_list(self):
diff --git a/test/orm/test_composites.py b/test/orm/test_composites.py
index f9af0c702..b6e5d81a1 100644
--- a/test/orm/test_composites.py
+++ b/test/orm/test_composites.py
@@ -510,11 +510,11 @@ class MappedSelectTest(fixtures.MappedTest):
session.commit()
eq_(
testing.db.execute(descriptions.select()).fetchall(),
- [(1, u'Color', u'Number')]
+ [(1, 'Color', 'Number')]
)
eq_(
testing.db.execute(values.select()).fetchall(),
- [(1, 1, u'Red', u'5'), (2, 1, u'Blue', u'1')]
+ [(1, 1, 'Red', '5'), (2, 1, 'Blue', '1')]
)
class ManyToOneTest(fixtures.MappedTest):
diff --git a/test/orm/test_deprecations.py b/test/orm/test_deprecations.py
index 95fa9dd29..a8be34971 100644
--- a/test/orm/test_deprecations.py
+++ b/test/orm/test_deprecations.py
@@ -122,7 +122,7 @@ class QueryAlternativesTest(fixtures.MappedTest):
session = sessionmaker(query_cls=MyQuery)()
ad1 = session.query(Address).get(1)
- assert ad1 in cache.values()
+ assert ad1 in list(cache.values())
def test_load(self):
"""x = session.query(Address).load(1)
diff --git a/test/orm/test_dynamic.py b/test/orm/test_dynamic.py
index c8a675e3b..21dcfd436 100644
--- a/test/orm/test_dynamic.py
+++ b/test/orm/test_dynamic.py
@@ -119,9 +119,9 @@ class DynamicTest(_DynamicFixture, _fixtures.FixtureTest, AssertsCompiledSQL):
eq_(
list(u.addresses.order_by(desc(Address.email_address))),
[
- Address(email_address=u'ed@wood.com'),
- Address(email_address=u'ed@lala.com'),
- Address(email_address=u'ed@bettyboop.com')
+ Address(email_address='ed@wood.com'),
+ Address(email_address='ed@lala.com'),
+ Address(email_address='ed@bettyboop.com')
]
)
@@ -137,9 +137,9 @@ class DynamicTest(_DynamicFixture, _fixtures.FixtureTest, AssertsCompiledSQL):
eq_(
list(u.addresses),
[
- Address(email_address=u'ed@wood.com'),
- Address(email_address=u'ed@lala.com'),
- Address(email_address=u'ed@bettyboop.com')
+ Address(email_address='ed@wood.com'),
+ Address(email_address='ed@lala.com'),
+ Address(email_address='ed@bettyboop.com')
]
)
@@ -147,9 +147,9 @@ class DynamicTest(_DynamicFixture, _fixtures.FixtureTest, AssertsCompiledSQL):
eq_(
list(u.addresses.order_by(None).order_by(Address.email_address)),
[
- Address(email_address=u'ed@bettyboop.com'),
- Address(email_address=u'ed@lala.com'),
- Address(email_address=u'ed@wood.com')
+ Address(email_address='ed@bettyboop.com'),
+ Address(email_address='ed@lala.com'),
+ Address(email_address='ed@wood.com')
]
)
@@ -157,9 +157,9 @@ class DynamicTest(_DynamicFixture, _fixtures.FixtureTest, AssertsCompiledSQL):
eq_(
set(u.addresses.order_by(None)),
set([
- Address(email_address=u'ed@bettyboop.com'),
- Address(email_address=u'ed@lala.com'),
- Address(email_address=u'ed@wood.com')
+ Address(email_address='ed@bettyboop.com'),
+ Address(email_address='ed@lala.com'),
+ Address(email_address='ed@wood.com')
])
)
@@ -529,12 +529,12 @@ class UOWTest(_DynamicFixture, _fixtures.FixtureTest,
"SELECT addresses.id AS addresses_id, addresses.email_address "
"AS addresses_email_address FROM addresses "
"WHERE addresses.id = :param_1",
- lambda ctx: [{u'param_1': a2_id}]
+ lambda ctx: [{'param_1': a2_id}]
),
CompiledSQL(
"UPDATE addresses SET user_id=:user_id WHERE addresses.id = "
":addresses_id",
- lambda ctx: [{u'addresses_id': a2_id, 'user_id': None}]
+ lambda ctx: [{'addresses_id': a2_id, 'user_id': None}]
)
)
diff --git a/test/orm/test_eager_relations.py b/test/orm/test_eager_relations.py
index b240d29f6..54a106c5a 100644
--- a/test/orm/test_eager_relations.py
+++ b/test/orm/test_eager_relations.py
@@ -747,11 +747,11 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
sess = create_session()
eq_(sess.query(User).first(),
- User(name=u'jack',orders=[
- Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1),
- Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3),
- Order(address_id=None,description=u'order 5',isopen=0,user_id=7,id=5)],
- email_address=u'jack@bean.com',id=7)
+ User(name='jack',orders=[
+ Order(address_id=1,description='order 1',isopen=0,user_id=7,id=1),
+ Order(address_id=1,description='order 3',isopen=1,user_id=7,id=3),
+ Order(address_id=None,description='order 5',isopen=0,user_id=7,id=5)],
+ email_address='jack@bean.com',id=7)
)
def test_useget_cancels_eager(self):
@@ -2103,7 +2103,7 @@ class SelfReferentialM2MEagerTest(fixtures.MappedTest):
def define_tables(cls, metadata):
Table('widget', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', sa.Unicode(40), nullable=False, unique=True),
+ Column('name', sa.String(40), nullable=False, unique=True),
)
Table('widget_rel', metadata,
@@ -2127,15 +2127,15 @@ class SelfReferentialM2MEagerTest(fixtures.MappedTest):
})
sess = create_session()
- w1 = Widget(name=u'w1')
- w2 = Widget(name=u'w2')
+ w1 = Widget(name='w1')
+ w2 = Widget(name='w2')
w1.children.append(w2)
sess.add(w1)
sess.flush()
sess.expunge_all()
eq_([Widget(name='w1', children=[Widget(name='w2')])],
- sess.query(Widget).filter(Widget.name==u'w1').all())
+ sess.query(Widget).filter(Widget.name=='w1').all())
class MixedEntitiesTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
run_setup_mappers = 'once'
@@ -2219,24 +2219,24 @@ class MixedEntitiesTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
eq_(
[
(
- User(addresses=[Address(email_address=u'fred@fred.com')], name=u'fred'),
- Order(description=u'order 2', isopen=0, items=[Item(description=u'item 1'), Item(description=u'item 2'), Item(description=u'item 3')]),
- User(addresses=[Address(email_address=u'jack@bean.com')], name=u'jack'),
- Order(description=u'order 3', isopen=1, items=[Item(description=u'item 3'), Item(description=u'item 4'), Item(description=u'item 5')])
+ User(addresses=[Address(email_address='fred@fred.com')], name='fred'),
+ Order(description='order 2', isopen=0, items=[Item(description='item 1'), Item(description='item 2'), Item(description='item 3')]),
+ User(addresses=[Address(email_address='jack@bean.com')], name='jack'),
+ Order(description='order 3', isopen=1, items=[Item(description='item 3'), Item(description='item 4'), Item(description='item 5')])
),
(
- User(addresses=[Address(email_address=u'fred@fred.com')], name=u'fred'),
- Order(description=u'order 2', isopen=0, items=[Item(description=u'item 1'), Item(description=u'item 2'), Item(description=u'item 3')]),
- User(addresses=[Address(email_address=u'jack@bean.com')], name=u'jack'),
- Order(address_id=None, description=u'order 5', isopen=0, items=[Item(description=u'item 5')])
+ User(addresses=[Address(email_address='fred@fred.com')], name='fred'),
+ Order(description='order 2', isopen=0, items=[Item(description='item 1'), Item(description='item 2'), Item(description='item 3')]),
+ User(addresses=[Address(email_address='jack@bean.com')], name='jack'),
+ Order(address_id=None, description='order 5', isopen=0, items=[Item(description='item 5')])
),
(
- User(addresses=[Address(email_address=u'fred@fred.com')], name=u'fred'),
- Order(description=u'order 4', isopen=1, items=[Item(description=u'item 1'), Item(description=u'item 5')]),
- User(addresses=[Address(email_address=u'jack@bean.com')], name=u'jack'),
- Order(address_id=None, description=u'order 5', isopen=0, items=[Item(description=u'item 5')])
+ User(addresses=[Address(email_address='fred@fred.com')], name='fred'),
+ Order(description='order 4', isopen=1, items=[Item(description='item 1'), Item(description='item 5')]),
+ User(addresses=[Address(email_address='jack@bean.com')], name='jack'),
+ Order(address_id=None, description='order 5', isopen=0, items=[Item(description='item 5')])
),
],
sess.query(User, Order, u1, o1).\
@@ -2647,9 +2647,9 @@ class CyclicalInheritingEagerTestTwo(fixtures.DeclarativeMappedTest,
Movie = self.classes.Movie
session = Session(testing.db)
- rscott = Director(name=u"Ridley Scott")
- alien = Movie(title=u"Alien")
- brunner = Movie(title=u"Blade Runner")
+ rscott = Director(name="Ridley Scott")
+ alien = Movie(title="Alien")
+ brunner = Movie(title="Blade Runner")
rscott.movies.append(brunner)
rscott.movies.append(alien)
session.add_all([rscott, alien, brunner])
diff --git a/test/orm/test_evaluator.py b/test/orm/test_evaluator.py
index 4678100eb..2570f7650 100644
--- a/test/orm/test_evaluator.py
+++ b/test/orm/test_evaluator.py
@@ -62,6 +62,23 @@ class EvaluateTest(fixtures.MappedTest):
(User(name=None), True),
])
+ def test_true_false(self):
+ User = self.classes.User
+
+ eval_eq(User.name == False, testcases=[
+ (User(name='foo'), False),
+ (User(name=True), False),
+ (User(name=False), True),
+ ]
+ )
+
+ eval_eq(User.name == True, testcases=[
+ (User(name='foo'), False),
+ (User(name=True), True),
+ (User(name=False), False),
+ ]
+ )
+
def test_boolean_ops(self):
User = self.classes.User
diff --git a/test/orm/test_expire.py b/test/orm/test_expire.py
index 0b1350e0a..6eb124cd2 100644
--- a/test/orm/test_expire.py
+++ b/test/orm/test_expire.py
@@ -850,7 +850,7 @@ class ExpireTest(_fixtures.FixtureTest):
assert len(u.addresses) == 3
sess.expire(u)
assert 'addresses' not in u.__dict__
- print "-------------------------------------------"
+ print("-------------------------------------------")
sess.query(User).filter_by(id=8).all()
assert 'addresses' in u.__dict__
assert len(u.addresses) == 3
diff --git a/test/orm/test_froms.py b/test/orm/test_froms.py
index c701a7076..1a972d965 100644
--- a/test/orm/test_froms.py
+++ b/test/orm/test_froms.py
@@ -161,7 +161,7 @@ class RawSelectTest(QueryTest, AssertsCompiledSQL):
sess = create_session()
- self.assert_compile(sess.query(users).select_from(
+ self.assert_compile(sess.query(users).select_entity_from(
users.select()).with_labels().statement,
"SELECT users.id AS users_id, users.name AS users_name FROM users, "
"(SELECT users.id AS id, users.name AS name FROM users) AS anon_1",
@@ -180,7 +180,7 @@ class RawSelectTest(QueryTest, AssertsCompiledSQL):
filter(addresses.c.user_id == users.c.id).correlate(users).\
statement.alias()
- self.assert_compile(sess.query(users, s.c.email).select_from(
+ self.assert_compile(sess.query(users, s.c.email).select_entity_from(
users.join(s, s.c.id == users.c.id)
).with_labels().statement,
"SELECT users.id AS users_id, users.name AS users_name, "
@@ -315,12 +315,12 @@ class FromSelfTest(QueryTest, AssertsCompiledSQL):
from_self(User.name, ualias.name).
order_by(User.name, ualias.name).all(),
[
- (u'chuck', u'ed'),
- (u'chuck', u'fred'),
- (u'chuck', u'jack'),
- (u'ed', u'jack'),
- (u'fred', u'ed'),
- (u'fred', u'jack')
+ ('chuck', 'ed'),
+ ('chuck', 'fred'),
+ ('chuck', 'jack'),
+ ('ed', 'jack'),
+ ('fred', 'ed'),
+ ('fred', 'jack')
]
)
@@ -330,7 +330,7 @@ class FromSelfTest(QueryTest, AssertsCompiledSQL):
from_self(User.name, ualias.name).
filter(ualias.name=='ed')\
.order_by(User.name, ualias.name).all(),
- [(u'chuck', u'ed'), (u'fred', u'ed')]
+ [('chuck', 'ed'), ('fred', 'ed')]
)
eq_(
@@ -340,11 +340,11 @@ class FromSelfTest(QueryTest, AssertsCompiledSQL):
join(ualias.addresses).
order_by(ualias.name, Address.email_address).all(),
[
- (u'ed', u'fred@fred.com'),
- (u'jack', u'ed@bettyboop.com'),
- (u'jack', u'ed@lala.com'),
- (u'jack', u'ed@wood.com'),
- (u'jack', u'fred@fred.com')]
+ ('ed', 'fred@fred.com'),
+ ('jack', 'ed@bettyboop.com'),
+ ('jack', 'ed@lala.com'),
+ ('jack', 'ed@wood.com'),
+ ('jack', 'fred@fred.com')]
)
@@ -425,12 +425,12 @@ class ColumnAccessTest(QueryTest, AssertsCompiledSQL):
"WHERE anon_1.anon_2_users_name = :name_1"
)
- def test_select_from(self):
+ def test_select_entity_from(self):
User = self.classes.User
sess = create_session()
q = sess.query(User)
- q = sess.query(User).select_from(q.statement)
+ q = sess.query(User).select_entity_from(q.statement)
self.assert_compile(
q.filter(User.name=='ed'),
"SELECT anon_1.id AS anon_1_id, anon_1.name AS anon_1_name "
@@ -438,6 +438,31 @@ class ColumnAccessTest(QueryTest, AssertsCompiledSQL):
"users) AS anon_1 WHERE anon_1.name = :name_1"
)
+ def test_select_entity_from_no_entities(self):
+ User = self.classes.User
+ sess = create_session()
+
+ q = sess.query(User)
+ assert_raises_message(
+ sa.exc.ArgumentError,
+ r"A selectable \(FromClause\) instance is "
+ "expected when the base alias is being set",
+ sess.query(User).select_entity_from, User
+ )
+
+ def test_select_from_no_aliasing(self):
+ User = self.classes.User
+ sess = create_session()
+
+ q = sess.query(User)
+ q = sess.query(User).select_from(q.statement)
+ self.assert_compile(
+ q.filter(User.name=='ed'),
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users, (SELECT users.id AS id, users.name AS name FROM "
+ "users) AS anon_1 WHERE users.name = :name_1"
+ )
+
def test_anonymous_expression(self):
from sqlalchemy.sql import column
@@ -629,8 +654,8 @@ class AddEntityEquivalenceTest(fixtures.MappedTest, AssertsCompiledSQL):
eq_(
q.all(),
[(
- A(bid=2, id=1, name=u'a1', type=u'a'),
- C(age=3, id=2, name=u'c1', type=u'c')
+ A(bid=2, id=1, name='a1', type='a'),
+ C(age=3, id=2, name='c1', type='c')
)]
)
@@ -642,8 +667,8 @@ class AddEntityEquivalenceTest(fixtures.MappedTest, AssertsCompiledSQL):
eq_(
q.all(),
[(
- C(age=3, id=2, name=u'c1', type=u'c'),
- A(bid=2, id=1, name=u'a1', type=u'a')
+ C(age=3, id=2, name='c1', type='c'),
+ A(bid=2, id=1, name='a1', type='a')
)]
)
@@ -681,15 +706,15 @@ class InstancesTest(QueryTest, AssertsCompiledSQL):
assert self.static.user_address_result == l
self.assert_sql_count(testing.db, go, 1)
- # better way. use select_from()
+ # better way. use select_entity_from()
def go():
- l = sess.query(User).select_from(query).\
+ l = sess.query(User).select_entity_from(query).\
options(contains_eager('addresses')).all()
assert self.static.user_address_result == l
self.assert_sql_count(testing.db, go, 1)
# same thing, but alias addresses, so that the adapter
- # generated by select_from() is wrapped within
+ # generated by select_entity_from() is wrapped within
# the adapter created by contains_eager()
adalias = addresses.alias()
query = users.select(users.c.id==7).\
@@ -699,7 +724,7 @@ class InstancesTest(QueryTest, AssertsCompiledSQL):
select(use_labels=True,
order_by=['ulist.id', adalias.c.id])
def go():
- l = sess.query(User).select_from(query).\
+ l = sess.query(User).select_entity_from(query).\
options(contains_eager('addresses', alias=adalias)).all()
assert self.static.user_address_result == l
self.assert_sql_count(testing.db, go, 1)
@@ -733,7 +758,7 @@ class InstancesTest(QueryTest, AssertsCompiledSQL):
adalias = addresses.alias()
q = sess.query(User).\
- select_from(users.outerjoin(adalias)).\
+ select_entity_from(users.outerjoin(adalias)).\
options(contains_eager(User.addresses, alias=adalias)).\
order_by(User.id, adalias.c.id)
def go():
@@ -929,12 +954,12 @@ class InstancesTest(QueryTest, AssertsCompiledSQL):
# have a Dingaling here due to using the inner
# join for the eager load
[
- User(name=u'ed', addresses=[
- Address(email_address=u'ed@wood.com',
+ User(name='ed', addresses=[
+ Address(email_address='ed@wood.com',
dingaling=Dingaling(data='ding 1/2')),
]),
- User(name=u'fred', addresses=[
- Address(email_address=u'fred@fred.com',
+ User(name='fred', addresses=[
+ Address(email_address='fred@fred.com',
dingaling=Dingaling(data='ding 2/5'))
])
]
@@ -965,12 +990,12 @@ class InstancesTest(QueryTest, AssertsCompiledSQL):
# have a Dingaling here due to using the inner
# join for the eager load
[
- User(name=u'ed', addresses=[
- Address(email_address=u'ed@wood.com',
+ User(name='ed', addresses=[
+ Address(email_address='ed@wood.com',
dingaling=Dingaling(data='ding 1/2')),
]),
- User(name=u'fred', addresses=[
- Address(email_address=u'fred@fred.com',
+ User(name='fred', addresses=[
+ Address(email_address='fred@fred.com',
dingaling=Dingaling(data='ding 2/5'))
])
]
@@ -998,11 +1023,11 @@ class InstancesTest(QueryTest, AssertsCompiledSQL):
contains_eager(User.orders)).order_by(User.id,
Order.id).offset(1).limit(2).all()
eq_(l, [User(id=7,
- addresses=[Address(email_address=u'jack@bean.com',
- user_id=7, id=1)], name=u'jack',
+ addresses=[Address(email_address='jack@bean.com',
+ user_id=7, id=1)], name='jack',
orders=[Order(address_id=1, user_id=7,
- description=u'order 3', isopen=1, id=3),
- Order(address_id=None, user_id=7, description=u'order 5'
+ description='order 3', isopen=1, id=3),
+ Order(address_id=None, user_id=7, description='order 5'
, isopen=0, id=5)])])
self.assert_sql_count(testing.db, go, 1)
@@ -1020,11 +1045,11 @@ class InstancesTest(QueryTest, AssertsCompiledSQL):
order_by(User.id, oalias.id).\
offset(1).limit(2).all()
eq_(l, [User(id=7,
- addresses=[Address(email_address=u'jack@bean.com',
- user_id=7, id=1)], name=u'jack',
+ addresses=[Address(email_address='jack@bean.com',
+ user_id=7, id=1)], name='jack',
orders=[Order(address_id=1, user_id=7,
- description=u'order 3', isopen=1, id=3),
- Order(address_id=None, user_id=7, description=u'order 5'
+ description='order 3', isopen=1, id=3),
+ Order(address_id=None, user_id=7, description='order 5'
, isopen=0, id=5)])])
self.assert_sql_count(testing.db, go, 1)
@@ -1044,16 +1069,16 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
sel = users.select(User.id.in_([7, 8])).alias()
q = sess.query(User)
- q2 = q.select_from(sel).values(User.name)
- eq_(list(q2), [(u'jack',), (u'ed',)])
+ q2 = q.select_entity_from(sel).values(User.name)
+ eq_(list(q2), [('jack',), ('ed',)])
q = sess.query(User)
q2 = q.order_by(User.id).\
values(User.name, User.name + " " + cast(User.id, String(50)))
eq_(
list(q2),
- [(u'jack', u'jack 7'), (u'ed', u'ed 8'),
- (u'fred', u'fred 9'), (u'chuck', u'chuck 10')]
+ [('jack', 'jack 7'), ('ed', 'ed 8'),
+ ('fred', 'fred 9'), ('chuck', 'chuck 10')]
)
q2 = q.join('addresses').\
@@ -1061,39 +1086,39 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
order_by(User.id, Address.id).\
values(User.name, Address.email_address)
eq_(list(q2),
- [(u'ed', u'ed@wood.com'), (u'ed', u'ed@bettyboop.com'),
- (u'ed', u'ed@lala.com'), (u'fred', u'fred@fred.com')])
+ [('ed', 'ed@wood.com'), ('ed', 'ed@bettyboop.com'),
+ ('ed', 'ed@lala.com'), ('fred', 'fred@fred.com')])
q2 = q.join('addresses').\
filter(User.name.like('%e%')).\
order_by(desc(Address.email_address)).\
slice(1, 3).values(User.name, Address.email_address)
- eq_(list(q2), [(u'ed', u'ed@wood.com'), (u'ed', u'ed@lala.com')])
+ eq_(list(q2), [('ed', 'ed@wood.com'), ('ed', 'ed@lala.com')])
adalias = aliased(Address)
q2 = q.join(adalias, 'addresses').\
filter(User.name.like('%e%')).order_by(adalias.email_address).\
values(User.name, adalias.email_address)
- eq_(list(q2), [(u'ed', u'ed@bettyboop.com'), (u'ed', u'ed@lala.com'),
- (u'ed', u'ed@wood.com'), (u'fred', u'fred@fred.com')])
+ eq_(list(q2), [('ed', 'ed@bettyboop.com'), ('ed', 'ed@lala.com'),
+ ('ed', 'ed@wood.com'), ('fred', 'fred@fred.com')])
q2 = q.values(func.count(User.name))
- assert q2.next() == (4,)
+ assert next(q2) == (4,)
- q2 = q.select_from(sel).filter(User.id==8).values(User.name, sel.c.name, User.name)
- eq_(list(q2), [(u'ed', u'ed', u'ed')])
+ q2 = q.select_entity_from(sel).filter(User.id==8).values(User.name, sel.c.name, User.name)
+ eq_(list(q2), [('ed', 'ed', 'ed')])
# using User.xxx is alised against "sel", so this query returns nothing
- q2 = q.select_from(sel).\
+ q2 = q.select_entity_from(sel).\
filter(User.id==8).\
filter(User.id>sel.c.id).values(User.name, sel.c.name, User.name)
eq_(list(q2), [])
# whereas this uses users.c.xxx, is not aliased and creates a new join
- q2 = q.select_from(sel).\
+ q2 = q.select_entity_from(sel).\
filter(users.c.id==8).\
filter(users.c.id>sel.c.id).values(users.c.name, sel.c.name, User.name)
- eq_(list(q2), [(u'ed', u'jack', u'jack')])
+ eq_(list(q2), [('ed', 'jack', 'jack')])
def test_alias_naming(self):
User = self.classes.User
@@ -1119,14 +1144,14 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
sel = users.select(User.id.in_([7, 8])).alias()
q = sess.query(User)
u2 = aliased(User)
- q2 = q.select_from(sel).\
+ q2 = q.select_entity_from(sel).\
filter(u2.id>1).\
order_by(User.id, sel.c.id, u2.id).\
values(User.name, sel.c.name, u2.name)
- eq_(list(q2), [(u'jack', u'jack', u'jack'), (u'jack', u'jack', u'ed'),
- (u'jack', u'jack', u'fred'), (u'jack', u'jack', u'chuck'),
- (u'ed', u'ed', u'jack'), (u'ed', u'ed', u'ed'),
- (u'ed', u'ed', u'fred'), (u'ed', u'ed', u'chuck')])
+ eq_(list(q2), [('jack', 'jack', 'jack'), ('jack', 'jack', 'ed'),
+ ('jack', 'jack', 'fred'), ('jack', 'jack', 'chuck'),
+ ('ed', 'ed', 'jack'), ('ed', 'ed', 'ed'),
+ ('ed', 'ed', 'fred'), ('ed', 'ed', 'chuck')])
@testing.fails_on('mssql', 'FIXME: unknown')
@testing.fails_on('oracle',
@@ -1177,8 +1202,8 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
# we don't want Address to be outside of the subquery here
eq_(
list(sess.query(User, subq)[0:3]),
- [(User(id=7,name=u'jack'), 1), (User(id=8,name=u'ed'), 3),
- (User(id=9,name=u'fred'), 1)]
+ [(User(id=7,name='jack'), 1), (User(id=8,name='ed'), 3),
+ (User(id=9,name='fred'), 1)]
)
# same thing without the correlate, as it should
@@ -1190,8 +1215,8 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
# we don't want Address to be outside of the subquery here
eq_(
list(sess.query(User, subq)[0:3]),
- [(User(id=7,name=u'jack'), 1), (User(id=8,name=u'ed'), 3),
- (User(id=9,name=u'fred'), 1)]
+ [(User(id=7,name='jack'), 1), (User(id=8,name='ed'), 3),
+ (User(id=9,name='fred'), 1)]
)
@@ -1202,23 +1227,23 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
sess = create_session()
- eq_(sess.query(User.name).all(), [(u'jack',), (u'ed',), (u'fred',), (u'chuck',)])
+ eq_(sess.query(User.name).all(), [('jack',), ('ed',), ('fred',), ('chuck',)])
sel = users.select(User.id.in_([7, 8])).alias()
q = sess.query(User.name)
- q2 = q.select_from(sel).all()
- eq_(list(q2), [(u'jack',), (u'ed',)])
+ q2 = q.select_entity_from(sel).all()
+ eq_(list(q2), [('jack',), ('ed',)])
eq_(sess.query(User.name, Address.email_address).filter(User.id==Address.user_id).all(), [
- (u'jack', u'jack@bean.com'), (u'ed', u'ed@wood.com'),
- (u'ed', u'ed@bettyboop.com'), (u'ed', u'ed@lala.com'),
- (u'fred', u'fred@fred.com')
+ ('jack', 'jack@bean.com'), ('ed', 'ed@wood.com'),
+ ('ed', 'ed@bettyboop.com'), ('ed', 'ed@lala.com'),
+ ('fred', 'fred@fred.com')
])
eq_(sess.query(User.name, func.count(Address.email_address)).\
outerjoin(User.addresses).group_by(User.id, User.name).\
order_by(User.id).all(),
- [(u'jack', 1), (u'ed', 3), (u'fred', 1), (u'chuck', 0)]
+ [('jack', 1), ('ed', 3), ('fred', 1), ('chuck', 0)]
)
eq_(sess.query(User, func.count(Address.email_address)).\
@@ -1246,8 +1271,8 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
eq_(sess.query(func.count(adalias.email_address), User).\
outerjoin(adalias, User.addresses).group_by(User).\
order_by(User.id).all(),
- [(1, User(name=u'jack',id=7)), (3, User(name=u'ed',id=8)),
- (1, User(name=u'fred',id=9)), (0, User(name=u'chuck',id=10))]
+ [(1, User(name='jack',id=7)), (3, User(name='ed',id=8)),
+ (1, User(name='fred',id=9)), (0, User(name='chuck',id=10))]
)
# select from aliasing + explicit aliasing
@@ -1257,12 +1282,12 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
from_self(User, adalias.email_address).\
order_by(User.id, adalias.id).all(),
[
- (User(name=u'jack',id=7), u'jack@bean.com'),
- (User(name=u'ed',id=8), u'ed@wood.com'),
- (User(name=u'ed',id=8), u'ed@bettyboop.com'),
- (User(name=u'ed',id=8), u'ed@lala.com'),
- (User(name=u'fred',id=9), u'fred@fred.com'),
- (User(name=u'chuck',id=10), None)
+ (User(name='jack',id=7), 'jack@bean.com'),
+ (User(name='ed',id=8), 'ed@wood.com'),
+ (User(name='ed',id=8), 'ed@bettyboop.com'),
+ (User(name='ed',id=8), 'ed@lala.com'),
+ (User(name='fred',id=9), 'fred@fred.com'),
+ (User(name='chuck',id=10), None)
]
)
@@ -1272,12 +1297,12 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
filter(Address.email_address.like('%ed%')).\
from_self().all(),
[
- User(name=u'ed',id=8),
- User(name=u'fred',id=9),
+ User(name='ed',id=8),
+ User(name='fred',id=9),
]
)
- # test eager aliasing, with/without select_from aliasing
+ # test eager aliasing, with/without select_entity_from aliasing
for q in [
sess.query(User, adalias.email_address).\
outerjoin(adalias, User.addresses).\
@@ -1293,27 +1318,27 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
q.all(),
[(User(addresses=[
- Address(user_id=7,email_address=u'jack@bean.com',id=1)],
- name=u'jack',id=7), u'jack@bean.com'),
+ Address(user_id=7,email_address='jack@bean.com',id=1)],
+ name='jack',id=7), 'jack@bean.com'),
(User(addresses=[
- Address(user_id=8,email_address=u'ed@wood.com',id=2),
- Address(user_id=8,email_address=u'ed@bettyboop.com',id=3),
- Address(user_id=8,email_address=u'ed@lala.com',id=4)],
- name=u'ed',id=8), u'ed@wood.com'),
+ Address(user_id=8,email_address='ed@wood.com',id=2),
+ Address(user_id=8,email_address='ed@bettyboop.com',id=3),
+ Address(user_id=8,email_address='ed@lala.com',id=4)],
+ name='ed',id=8), 'ed@wood.com'),
(User(addresses=[
- Address(user_id=8,email_address=u'ed@wood.com',id=2),
- Address(user_id=8,email_address=u'ed@bettyboop.com',id=3),
- Address(user_id=8,email_address=u'ed@lala.com',id=4)],name=u'ed',id=8),
- u'ed@bettyboop.com'),
+ Address(user_id=8,email_address='ed@wood.com',id=2),
+ Address(user_id=8,email_address='ed@bettyboop.com',id=3),
+ Address(user_id=8,email_address='ed@lala.com',id=4)],name='ed',id=8),
+ 'ed@bettyboop.com'),
(User(addresses=[
- Address(user_id=8,email_address=u'ed@wood.com',id=2),
- Address(user_id=8,email_address=u'ed@bettyboop.com',id=3),
- Address(user_id=8,email_address=u'ed@lala.com',id=4)],name=u'ed',id=8),
- u'ed@lala.com'),
- (User(addresses=[Address(user_id=9,email_address=u'fred@fred.com',id=5)],name=u'fred',id=9),
- u'fred@fred.com'),
+ Address(user_id=8,email_address='ed@wood.com',id=2),
+ Address(user_id=8,email_address='ed@bettyboop.com',id=3),
+ Address(user_id=8,email_address='ed@lala.com',id=4)],name='ed',id=8),
+ 'ed@lala.com'),
+ (User(addresses=[Address(user_id=9,email_address='fred@fred.com',id=5)],name='fred',id=9),
+ 'fred@fred.com'),
- (User(addresses=[],name=u'chuck',id=10), None)]
+ (User(addresses=[],name='chuck',id=10), None)]
)
def test_column_from_limited_joinedload(self):
@@ -1367,19 +1392,19 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
eq_(
q.all(),
[
- (Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3),
- Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1)),
- (Order(address_id=None,description=u'order 5',isopen=0,user_id=7,id=5),
- Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1)),
- (Order(address_id=None,description=u'order 5',isopen=0,user_id=7,id=5),
- Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3))
+ (Order(address_id=1,description='order 3',isopen=1,user_id=7,id=3),
+ Order(address_id=1,description='order 1',isopen=0,user_id=7,id=1)),
+ (Order(address_id=None,description='order 5',isopen=0,user_id=7,id=5),
+ Order(address_id=1,description='order 1',isopen=0,user_id=7,id=1)),
+ (Order(address_id=None,description='order 5',isopen=0,user_id=7,id=5),
+ Order(address_id=1,description='order 3',isopen=1,user_id=7,id=3))
]
)
# ensure column expressions are taken from inside the subquery, not restated at the top
q = sess.query(Order.id, Order.description, literal_column("'q'").label('foo')).\
- filter(Order.description == u'order 3').from_self()
+ filter(Order.description == 'order 3').from_self()
self.assert_compile(q,
"SELECT anon_1.orders_id AS "
"anon_1_orders_id, anon_1.orders_descriptio"
@@ -1392,7 +1417,7 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
"anon_1")
eq_(
q.all(),
- [(3, u'order 3', 'q')]
+ [(3, 'order 3', 'q')]
)
@@ -1466,14 +1491,14 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
q = sess.query(User)
adalias = addresses.alias('adalias')
- q = q.add_entity(Address, alias=adalias).select_from(users.outerjoin(adalias))
+ q = q.add_entity(Address, alias=adalias).select_entity_from(users.outerjoin(adalias))
l = q.order_by(User.id, adalias.c.id).all()
assert l == expected
sess.expunge_all()
q = sess.query(User).add_entity(Address, alias=adalias)
- l = q.select_from(users.outerjoin(adalias)).filter(adalias.c.email_address=='ed@bettyboop.com').all()
+ l = q.select_entity_from(users.outerjoin(adalias)).filter(adalias.c.email_address=='ed@bettyboop.com').all()
assert l == [(user8, address3)]
def test_with_entities(self):
@@ -1518,7 +1543,7 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
eq_(
sess.query(User.id).add_column(users).all(),
- [(7, 7, u'jack'), (8, 8, u'ed'), (9, 9, u'fred'), (10, 10, u'chuck')]
+ [(7, 7, 'jack'), (8, 8, 'ed'), (9, 9, 'fred'), (10, 10, 'chuck')]
)
def test_multi_columns_2(self):
@@ -1591,9 +1616,9 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
sess.expunge_all()
- # test with select_from()
+ # test with select_entity_from()
q = create_session().query(User).add_column(func.count(addresses.c.id))\
- .add_column(("Name:" + users.c.name)).select_from(users.outerjoin(addresses))\
+ .add_column(("Name:" + users.c.name)).select_entity_from(users.outerjoin(addresses))\
.group_by(users).order_by(users.c.id)
assert q.all() == expected
@@ -1718,19 +1743,19 @@ class SelectFromTest(QueryTest, AssertsCompiledSQL):
sel = users.select(users.c.id.in_([7, 8])).alias()
sess = create_session()
- eq_(sess.query(User).select_from(sel).all(), [User(id=7), User(id=8)])
+ eq_(sess.query(User).select_entity_from(sel).all(), [User(id=7), User(id=8)])
- eq_(sess.query(User).select_from(sel).filter(User.id==8).all(), [User(id=8)])
+ eq_(sess.query(User).select_entity_from(sel).filter(User.id==8).all(), [User(id=8)])
- eq_(sess.query(User).select_from(sel).order_by(desc(User.name)).all(), [
+ eq_(sess.query(User).select_entity_from(sel).order_by(desc(User.name)).all(), [
User(name='jack',id=7), User(name='ed',id=8)
])
- eq_(sess.query(User).select_from(sel).order_by(asc(User.name)).all(), [
+ eq_(sess.query(User).select_entity_from(sel).order_by(asc(User.name)).all(), [
User(name='ed',id=8), User(name='jack',id=7)
])
- eq_(sess.query(User).select_from(sel).options(joinedload('addresses')).first(),
+ eq_(sess.query(User).select_entity_from(sel).options(joinedload('addresses')).first(),
User(name='jack', addresses=[Address(id=1)])
)
@@ -1745,7 +1770,7 @@ class SelectFromTest(QueryTest, AssertsCompiledSQL):
sel = users.select(users.c.id.in_([7, 8]))
sess = create_session()
- eq_(sess.query(User).select_from(sel).all(),
+ eq_(sess.query(User).select_entity_from(sel).all(),
[
User(name='jack',id=7), User(name='ed',id=8)
]
@@ -1772,14 +1797,14 @@ class SelectFromTest(QueryTest, AssertsCompiledSQL):
)
self.assert_compile(
- sess.query(ualias).select_from(sel).filter(ualias.id>sel.c.id),
+ sess.query(ualias).select_entity_from(sel).filter(ualias.id>sel.c.id),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name FROM "
"users AS users_1, (SELECT users.id AS id, users.name AS name FROM "
"users WHERE users.id IN (:id_1, :id_2)) AS anon_1 WHERE users_1.id > anon_1.id",
)
self.assert_compile(
- sess.query(ualias).select_from(sel).join(ualias, ualias.id>sel.c.id),
+ sess.query(ualias).select_entity_from(sel).join(ualias, ualias.id>sel.c.id),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM (SELECT users.id AS id, users.name AS name "
"FROM users WHERE users.id IN (:id_1, :id_2)) AS anon_1 "
@@ -1787,7 +1812,7 @@ class SelectFromTest(QueryTest, AssertsCompiledSQL):
)
self.assert_compile(
- sess.query(ualias).select_from(sel).join(ualias, ualias.id>User.id),
+ sess.query(ualias).select_entity_from(sel).join(ualias, ualias.id>User.id),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM (SELECT users.id AS id, users.name AS name FROM "
"users WHERE users.id IN (:id_1, :id_2)) AS anon_1 "
@@ -1805,7 +1830,7 @@ class SelectFromTest(QueryTest, AssertsCompiledSQL):
# this one uses an explicit join(left, right, onclause) so works
self.assert_compile(
- sess.query(ualias).select_from(join(sel, ualias, ualias.id>sel.c.id)),
+ sess.query(ualias).select_entity_from(join(sel, ualias, ualias.id>sel.c.id)),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name FROM "
"(SELECT users.id AS id, users.name AS name FROM users WHERE users.id "
"IN (:id_1, :id_2)) AS anon_1 JOIN users AS users_1 ON users_1.id > anon_1.id",
@@ -1848,7 +1873,7 @@ class SelectFromTest(QueryTest, AssertsCompiledSQL):
# here for comparison
self.assert_compile(
sess.query(User.name).\
- select_from(users.select().where(users.c.id > 5)),
+ select_entity_from(users.select().where(users.c.id > 5)),
"SELECT anon_1.name AS anon_1_name FROM (SELECT users.id AS id, "
"users.name AS name FROM users WHERE users.id > :id_1) AS anon_1"
)
@@ -1861,7 +1886,7 @@ class SelectFromTest(QueryTest, AssertsCompiledSQL):
sel = users.select(users.c.id.in_([7, 8]))
sess = create_session()
- eq_(sess.query(User).select_from(sel).all(),
+ eq_(sess.query(User).select_entity_from(sel).all(),
[
User(name='jack',id=7), User(name='ed',id=8)
]
@@ -1881,7 +1906,7 @@ class SelectFromTest(QueryTest, AssertsCompiledSQL):
sel = users.select(users.c.id.in_([7, 8]))
sess = create_session()
- eq_(sess.query(User).select_from(sel).join('addresses').
+ eq_(sess.query(User).select_entity_from(sel).join('addresses').
add_entity(Address).order_by(User.id).order_by(Address.id).all(),
[
(User(name='jack',id=7), Address(user_id=7,email_address='jack@bean.com',id=1)),
@@ -1892,7 +1917,7 @@ class SelectFromTest(QueryTest, AssertsCompiledSQL):
)
adalias = aliased(Address)
- eq_(sess.query(User).select_from(sel).join(adalias, 'addresses').
+ eq_(sess.query(User).select_entity_from(sel).join(adalias, 'addresses').
add_entity(adalias).order_by(User.id).order_by(adalias.id).all(),
[
(User(name='jack',id=7), Address(user_id=7,email_address='jack@bean.com',id=1)),
@@ -1931,82 +1956,82 @@ class SelectFromTest(QueryTest, AssertsCompiledSQL):
sess = create_session()
sel = users.select(users.c.id.in_([7, 8]))
- eq_(sess.query(User).select_from(sel).\
+ eq_(sess.query(User).select_entity_from(sel).\
join('orders', 'items', 'keywords').\
filter(Keyword.name.in_(['red', 'big', 'round'])).\
all(),
[
- User(name=u'jack',id=7)
+ User(name='jack',id=7)
])
- eq_(sess.query(User).select_from(sel).\
+ eq_(sess.query(User).select_entity_from(sel).\
join('orders', 'items', 'keywords', aliased=True).\
filter(Keyword.name.in_(['red', 'big', 'round'])).\
all(),
[
- User(name=u'jack',id=7)
+ User(name='jack',id=7)
])
def go():
eq_(
- sess.query(User).select_from(sel).
+ sess.query(User).select_entity_from(sel).
options(joinedload_all('orders.items.keywords')).
join('orders', 'items', 'keywords', aliased=True).
filter(Keyword.name.in_(['red', 'big', 'round'])).\
all(),
[
- User(name=u'jack',orders=[
- Order(description=u'order 1',items=[
- Item(description=u'item 1',
+ User(name='jack',orders=[
+ Order(description='order 1',items=[
+ Item(description='item 1',
keywords=[
- Keyword(name=u'red'),
- Keyword(name=u'big'),
- Keyword(name=u'round')
+ Keyword(name='red'),
+ Keyword(name='big'),
+ Keyword(name='round')
]),
- Item(description=u'item 2',
+ Item(description='item 2',
keywords=[
- Keyword(name=u'red',id=2),
- Keyword(name=u'small',id=5),
- Keyword(name=u'square')
+ Keyword(name='red',id=2),
+ Keyword(name='small',id=5),
+ Keyword(name='square')
]),
- Item(description=u'item 3',
+ Item(description='item 3',
keywords=[
- Keyword(name=u'green',id=3),
- Keyword(name=u'big',id=4),
- Keyword(name=u'round',id=6)])
+ Keyword(name='green',id=3),
+ Keyword(name='big',id=4),
+ Keyword(name='round',id=6)])
]),
- Order(description=u'order 3',items=[
- Item(description=u'item 3',
+ Order(description='order 3',items=[
+ Item(description='item 3',
keywords=[
- Keyword(name=u'green',id=3),
- Keyword(name=u'big',id=4),
- Keyword(name=u'round',id=6)
+ Keyword(name='green',id=3),
+ Keyword(name='big',id=4),
+ Keyword(name='round',id=6)
]),
- Item(description=u'item 4',keywords=[],id=4),
- Item(description=u'item 5',keywords=[],id=5)
+ Item(description='item 4',keywords=[],id=4),
+ Item(description='item 5',keywords=[],id=5)
]),
- Order(description=u'order 5',
+ Order(description='order 5',
items=[
- Item(description=u'item 5',keywords=[])])
+ Item(description='item 5',keywords=[])])
])
])
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
sel2 = orders.select(orders.c.id.in_([1,2,3]))
- eq_(sess.query(Order).select_from(sel2).\
+ eq_(sess.query(Order).select_entity_from(sel2).\
join('items', 'keywords').\
filter(Keyword.name == 'red').\
order_by(Order.id).all(), [
- Order(description=u'order 1',id=1),
- Order(description=u'order 2',id=2),
+ Order(description='order 1',id=1),
+ Order(description='order 2',id=2),
])
- eq_(sess.query(Order).select_from(sel2).\
+ eq_(sess.query(Order).select_entity_from(sel2).\
join('items', 'keywords', aliased=True).\
filter(Keyword.name == 'red').\
order_by(Order.id).all(), [
- Order(description=u'order 1',id=1),
- Order(description=u'order 2',id=2),
+ Order(description='order 1',id=1),
+ Order(description='order 2',id=2),
])
@@ -2025,7 +2050,9 @@ class SelectFromTest(QueryTest, AssertsCompiledSQL):
sess = create_session()
def go():
- eq_(sess.query(User).options(joinedload('addresses')).select_from(sel).order_by(User.id).all(),
+ eq_(sess.query(User).options(
+ joinedload('addresses')
+ ).select_entity_from(sel).order_by(User.id).all(),
[
User(id=7, addresses=[Address(id=1)]),
User(id=8, addresses=[Address(id=2), Address(id=3), Address(id=4)])
@@ -2035,14 +2062,19 @@ class SelectFromTest(QueryTest, AssertsCompiledSQL):
sess.expunge_all()
def go():
- eq_(sess.query(User).options(joinedload('addresses')).select_from(sel).filter(User.id==8).order_by(User.id).all(),
+ eq_(sess.query(User).options(
+ joinedload('addresses')
+ ).select_entity_from(sel).filter(User.id==8).order_by(User.id).all(),
[User(id=8, addresses=[Address(id=2), Address(id=3), Address(id=4)])]
)
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
def go():
- eq_(sess.query(User).options(joinedload('addresses')).select_from(sel).order_by(User.id)[1], User(id=8, addresses=[Address(id=2), Address(id=3), Address(id=4)]))
+ eq_(sess.query(User).options(
+ joinedload('addresses')
+ ).select_entity_from(sel).order_by(User.id)[1],
+ User(id=8, addresses=[Address(id=2), Address(id=3), Address(id=4)]))
self.assert_sql_count(testing.db, go, 1)
class CustomJoinTest(QueryTest):
@@ -2180,7 +2212,7 @@ class ExternalColumnsTest(QueryTest):
ua = aliased(User)
eq_(sess.query(Address, ua.concat, ua.count).
- select_from(join(Address, ua, 'user')).
+ select_entity_from(join(Address, ua, 'user')).
options(joinedload(Address.user)).order_by(Address.id).all(),
[
(Address(id=1, user=User(id=7, concat=14, count=1)), 14, 1),
@@ -2195,7 +2227,7 @@ class ExternalColumnsTest(QueryTest):
[(1, 7, 14, 1), (2, 8, 16, 3), (3, 8, 16, 3), (4, 8, 16, 3), (5, 9, 18, 1)]
)
- eq_(list(sess.query(Address, ua).select_from(join(Address,ua, 'user')).values(Address.id, ua.id, ua.concat, ua.count)),
+ eq_(list(sess.query(Address, ua).select_entity_from(join(Address,ua, 'user')).values(Address.id, ua.id, ua.concat, ua.count)),
[(1, 7, 14, 1), (2, 8, 16, 3), (3, 8, 16, 3), (4, 8, 16, 3), (5, 9, 18, 1)]
)
diff --git a/test/orm/test_generative.py b/test/orm/test_generative.py
index 3f5da69c3..52858cc26 100644
--- a/test/orm/test_generative.py
+++ b/test/orm/test_generative.py
@@ -78,13 +78,8 @@ class GenerativeQueryTest(fixtures.MappedTest):
assert sess.query(func.min(foo.c.bar)).filter(foo.c.bar<30).one() == (0,)
assert sess.query(func.max(foo.c.bar)).filter(foo.c.bar<30).one() == (29,)
- # Py3K
- #assert query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)).__next__()[0] == 29
- #assert query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)).__next__()[0] == 29
- # Py2K
- assert query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)).next()[0] == 29
- assert query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)).next()[0] == 29
- # end Py2K
+ assert next(query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)))[0] == 29
+ assert next(query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)))[0] == 29
@testing.fails_if(lambda:testing.against('mysql+mysqldb') and
testing.db.dialect.dbapi.version_info[:4] == (1, 2, 1, 'gamma'),
@@ -111,18 +106,10 @@ class GenerativeQueryTest(fixtures.MappedTest):
query = create_session().query(Foo)
- # Py3K
- #avg_f = query.filter(foo.c.bar<30).values(sa.func.avg(foo.c.bar)).__next__()[0]
- # Py2K
- avg_f = query.filter(foo.c.bar<30).values(sa.func.avg(foo.c.bar)).next()[0]
- # end Py2K
+ avg_f = next(query.filter(foo.c.bar<30).values(sa.func.avg(foo.c.bar)))[0]
assert float(round(avg_f, 1)) == 14.5
- # Py3K
- #avg_o = query.filter(foo.c.bar<30).values(sa.func.avg(foo.c.bar)).__next__()[0]
- # Py2K
- avg_o = query.filter(foo.c.bar<30).values(sa.func.avg(foo.c.bar)).next()[0]
- # end Py2K
+ avg_o = next(query.filter(foo.c.bar<30).values(sa.func.avg(foo.c.bar)))[0]
assert float(round(avg_o, 1)) == 14.5
def test_filter(self):
diff --git a/test/orm/test_inspect.py b/test/orm/test_inspect.py
index 2a401f91d..78d84f505 100644
--- a/test/orm/test_inspect.py
+++ b/test/orm/test_inspect.py
@@ -142,7 +142,7 @@ class TestORMInspection(_fixtures.FixtureTest):
[insp.get_property('id'), insp.get_property('name')]
)
eq_(
- insp.column_attrs.keys(),
+ list(insp.column_attrs.keys()),
['id', 'name']
)
is_(
@@ -274,7 +274,7 @@ class TestORMInspection(_fixtures.FixtureTest):
insp = inspect(SomeSubClass)
eq_(
dict((k, v.extension_type)
- for k, v in insp.all_orm_descriptors.items()
+ for k, v in list(insp.all_orm_descriptors.items())
),
{
'id': NOT_EXTENSION,
diff --git a/test/orm/test_instrumentation.py b/test/orm/test_instrumentation.py
index 1a1a70e8b..c3d24ebe7 100644
--- a/test/orm/test_instrumentation.py
+++ b/test/orm/test_instrumentation.py
@@ -540,30 +540,6 @@ class NativeInstrumentationTest(fixtures.ORMTest):
class Py3KFunctionInstTest(fixtures.ORMTest):
__requires__ = ("python3", )
- # Py3K
- #def _kw_only_fixture(self):
- # class A(object):
- # def __init__(self, a, *, b, c):
- # self.a = a
- # self.b = b
- # self.c = c
- # return self._instrument(A)
- #
- #def _kw_plus_posn_fixture(self):
- # class A(object):
- # def __init__(self, a, *args, b, c):
- # self.a = a
- # self.b = b
- # self.c = c
- # return self._instrument(A)
- #
- #def _kw_opt_fixture(self):
- # class A(object):
- # def __init__(self, a, *, b, c="c"):
- # self.a = a
- # self.b = b
- # self.c = c
- # return self._instrument(A)
def _instrument(self, cls):
manager = instrumentation.register_class(cls)
@@ -614,6 +590,36 @@ class Py3KFunctionInstTest(fixtures.ORMTest):
cls, "a", "b", c="c"
)
+if util.py3k:
+ _locals = {}
+ exec("""
+def _kw_only_fixture(self):
+ class A(object):
+ def __init__(self, a, *, b, c):
+ self.a = a
+ self.b = b
+ self.c = c
+ return self._instrument(A)
+
+def _kw_plus_posn_fixture(self):
+ class A(object):
+ def __init__(self, a, *args, b, c):
+ self.a = a
+ self.b = b
+ self.c = c
+ return self._instrument(A)
+
+def _kw_opt_fixture(self):
+ class A(object):
+ def __init__(self, a, *, b, c="c"):
+ self.a = a
+ self.b = b
+ self.c = c
+ return self._instrument(A)
+""", _locals)
+ for k in _locals:
+ setattr(Py3KFunctionInstTest, k, _locals[k])
+
class MiscTest(fixtures.ORMTest):
"""Seems basic, but not directly covered elsewhere!"""
diff --git a/test/orm/test_joins.py b/test/orm/test_joins.py
index 2bf0d8d92..2dac59150 100644
--- a/test/orm/test_joins.py
+++ b/test/orm/test_joins.py
@@ -774,14 +774,14 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
eq_(
sess.query(User).join(Address.user).\
filter(Address.email_address=='ed@wood.com').all(),
- [User(id=8,name=u'ed')]
+ [User(id=8,name='ed')]
)
# its actually not so controversial if you view it in terms
# of multiple entities.
eq_(
sess.query(User, Address).join(Address.user).filter(Address.email_address=='ed@wood.com').all(),
- [(User(id=8,name=u'ed'), Address(email_address='ed@wood.com'))]
+ [(User(id=8,name='ed'), Address(email_address='ed@wood.com'))]
)
# this was the controversial part. now, raise an error if the feature is abused.
@@ -1066,7 +1066,7 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
# same with an explicit select_from()
eq_(
- sess.query(User).select_from(select([users]).
+ sess.query(User).select_entity_from(select([users]).
order_by(User.id).offset(2).alias()).
join(Order, User.id==Order.user_id).
all(),
@@ -1162,9 +1162,9 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
sess.query(OrderAlias).join('items').filter_by(description='item 3').\
order_by(OrderAlias.id).all(),
[
- Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1),
- Order(address_id=4,description=u'order 2',isopen=0,user_id=9,id=2),
- Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3)
+ Order(address_id=1,description='order 1',isopen=0,user_id=7,id=1),
+ Order(address_id=4,description='order 2',isopen=0,user_id=9,id=2),
+ Order(address_id=1,description='order 3',isopen=1,user_id=7,id=3)
]
)
@@ -1175,9 +1175,9 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
filter_by(description='item 3').\
order_by(User.id, OrderAlias.id).all(),
[
- (User(name=u'jack',id=7), Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1), u'item 3'),
- (User(name=u'jack',id=7), Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3), u'item 3'),
- (User(name=u'fred',id=9), Order(address_id=4,description=u'order 2',isopen=0,user_id=9,id=2), u'item 3')
+ (User(name='jack',id=7), Order(address_id=1,description='order 1',isopen=0,user_id=7,id=1), 'item 3'),
+ (User(name='jack',id=7), Order(address_id=1,description='order 3',isopen=1,user_id=7,id=3), 'item 3'),
+ (User(name='fred',id=9), Order(address_id=4,description='order 2',isopen=0,user_id=9,id=2), 'item 3')
]
)
@@ -1334,12 +1334,12 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
eq_(
sess.query(User, ualias).filter(User.id > ualias.id).order_by(desc(ualias.id), User.name).all(),
[
- (User(id=10,name=u'chuck'), User(id=9,name=u'fred')),
- (User(id=10,name=u'chuck'), User(id=8,name=u'ed')),
- (User(id=9,name=u'fred'), User(id=8,name=u'ed')),
- (User(id=10,name=u'chuck'), User(id=7,name=u'jack')),
- (User(id=8,name=u'ed'), User(id=7,name=u'jack')),
- (User(id=9,name=u'fred'), User(id=7,name=u'jack'))
+ (User(id=10,name='chuck'), User(id=9,name='fred')),
+ (User(id=10,name='chuck'), User(id=8,name='ed')),
+ (User(id=9,name='fred'), User(id=8,name='ed')),
+ (User(id=10,name='chuck'), User(id=7,name='jack')),
+ (User(id=8,name='ed'), User(id=7,name='jack')),
+ (User(id=9,name='fred'), User(id=7,name='jack'))
]
)
@@ -1351,7 +1351,7 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
eq_(
sess.query(User.name).join(addresses, User.id==addresses.c.user_id).order_by(User.id).all(),
- [(u'jack',), (u'ed',), (u'ed',), (u'ed',), (u'fred',)]
+ [('jack',), ('ed',), ('ed',), ('ed',), ('fred',)]
)
def test_no_joinpoint_expr(self):
@@ -2066,13 +2066,13 @@ class SelfReferentialTest(fixtures.MappedTest, AssertsCompiledSQL):
# using 'n1.parent' implicitly joins to unaliased Node
eq_(
sess.query(n1).join(n1.parent).filter(Node.data=='n1').all(),
- [Node(parent_id=1,data=u'n11',id=2), Node(parent_id=1,data=u'n12',id=3), Node(parent_id=1,data=u'n13',id=4)]
+ [Node(parent_id=1,data='n11',id=2), Node(parent_id=1,data='n12',id=3), Node(parent_id=1,data='n13',id=4)]
)
# explicit (new syntax)
eq_(
sess.query(n1).join(Node, n1.parent).filter(Node.data=='n1').all(),
- [Node(parent_id=1,data=u'n11',id=2), Node(parent_id=1,data=u'n12',id=3), Node(parent_id=1,data=u'n13',id=4)]
+ [Node(parent_id=1,data='n11',id=2), Node(parent_id=1,data='n12',id=3), Node(parent_id=1,data='n13',id=4)]
)
diff --git a/test/orm/test_loading.py b/test/orm/test_loading.py
index 20dc082c5..97c08ea29 100644
--- a/test/orm/test_loading.py
+++ b/test/orm/test_loading.py
@@ -73,7 +73,7 @@ class MergeResultTest(_fixtures.FixtureTest):
[(x.id, y) for x, y in it],
[(1, 1), (2, 2), (7, 7), (8, 8)]
)
- eq_(it[0].keys(), ['User', 'id'])
+ eq_(list(it[0].keys()), ['User', 'id'])
def test_entity_col_mix_keyed_tuple(self):
s, (u1, u2, u3, u4) = self._fixture()
@@ -91,7 +91,7 @@ class MergeResultTest(_fixtures.FixtureTest):
[(x.id, y) for x, y in it],
[(1, 1), (2, 2), (7, 7), (8, 8)]
)
- eq_(it[0].keys(), ['User', 'id'])
+ eq_(list(it[0].keys()), ['User', 'id'])
def test_none_entity(self):
s, (u1, u2, u3, u4) = self._fixture()
diff --git a/test/orm/test_mapper.py b/test/orm/test_mapper.py
index 6b97fb135..19ff78004 100644
--- a/test/orm/test_mapper.py
+++ b/test/orm/test_mapper.py
@@ -272,13 +272,14 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
)
m = mapper(Foo, foo_t)
class DontCompareMeToString(int):
- # Py3K
- # pass
- # Py2K
- def __lt__(self, other):
- assert not isinstance(other, basestring)
- return int(self) < other
- # end Py2K
+# start Py3K
+ pass
+# end Py3K
+# start Py2K
+# def __lt__(self, other):
+# assert not isinstance(other, basestring)
+# return int(self) < other
+# end Py2K
foos = [Foo(id='f%d' % i) for i in range(5)]
states = [attributes.instance_state(f) for f in foos]
@@ -847,7 +848,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_we_dont_call_bool(self):
class NoBoolAllowed(object):
- def __nonzero__(self):
+ def __bool__(self):
raise Exception("nope")
mapper(NoBoolAllowed, self.tables.users)
u1 = NoBoolAllowed()
@@ -1057,12 +1058,12 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
eq_(
create_session().query(User).all(),
- [User(id=7, name=u'jack'), User(id=9, name=u'fred'), User(id=8, name=u'ed'), User(id=10, name=u'chuck')]
+ [User(id=7, name='jack'), User(id=9, name='fred'), User(id=8, name='ed'), User(id=10, name='chuck')]
)
eq_(
create_session().query(User).order_by(User.name).all(),
- [User(id=10, name=u'chuck'), User(id=8, name=u'ed'), User(id=9, name=u'fred'), User(id=7, name=u'jack')]
+ [User(id=10, name='chuck'), User(id=8, name='ed'), User(id=9, name='fred'), User(id=7, name='jack')]
)
# 'Raises a "expression evaluation not supported" error at prepare time
@@ -2123,7 +2124,7 @@ class ValidatorTest(_fixtures.FixtureTest):
mapper(Address, addresses)
eq_(
- dict((k, v[0].__name__) for k, v in u_m.validators.items()),
+ dict((k, v[0].__name__) for k, v in list(u_m.validators.items())),
{'name':'validate_name',
'addresses':'validate_address'}
)
@@ -2992,29 +2993,29 @@ class RequirementsTest(fixtures.MappedTest):
Column('ht1b_id', Integer, ForeignKey('ht1.id'), primary_key=True),
Column('value', String(10)))
- # Py2K
- def test_baseclass(self):
- ht1 = self.tables.ht1
-
- class OldStyle:
- pass
-
- assert_raises(sa.exc.ArgumentError, mapper, OldStyle, ht1)
-
- assert_raises(sa.exc.ArgumentError, mapper, 123)
-
- class NoWeakrefSupport(str):
- pass
-
- # TODO: is weakref support detectable without an instance?
- #self.assertRaises(sa.exc.ArgumentError, mapper, NoWeakrefSupport, t2)
- # end Py2K
+# start Py2K
+# def test_baseclass(self):
+# ht1 = self.tables.ht1
+#
+# class OldStyle:
+# pass
+#
+# assert_raises(sa.exc.ArgumentError, mapper, OldStyle, ht1)
+#
+# assert_raises(sa.exc.ArgumentError, mapper, 123)
+#
+# class NoWeakrefSupport(str):
+# pass
+#
+# # TODO: is weakref support detectable without an instance?
+# #self.assertRaises(sa.exc.ArgumentError, mapper, NoWeakrefSupport, t2)
+# end Py2K
class _ValueBase(object):
def __init__(self, value='abc', id=None):
self.id = id
self.value = value
- def __nonzero__(self):
+ def __bool__(self):
return False
def __hash__(self):
return hash(self.value)
@@ -3173,7 +3174,7 @@ class RequirementsTest(fixtures.MappedTest):
return self.value
class H2(object):
- def __nonzero__(self):
+ def __bool__(self):
return bool(self.get_value())
def get_value(self):
@@ -3224,7 +3225,7 @@ class IsUserlandTest(fixtures.MappedTest):
self._test("someprop")
def test_unicode(self):
- self._test(u"someprop")
+ self._test("someprop")
def test_int(self):
self._test(5)
diff --git a/test/orm/test_merge.py b/test/orm/test_merge.py
index e1474f39b..494f5e349 100644
--- a/test/orm/test_merge.py
+++ b/test/orm/test_merge.py
@@ -346,7 +346,7 @@ class MergeTest(_fixtures.FixtureTest):
sess = create_session()
sess.merge(u1)
sess.flush()
- assert u1.addresses.keys() == ['foo@bar.com']
+ assert list(u1.addresses.keys()) == ['foo@bar.com']
def test_attribute_cascade(self):
"""Merge of a persistent entity with two child
@@ -803,7 +803,7 @@ class MergeTest(_fixtures.FixtureTest):
try:
sess2.merge(u, load=False)
assert False
- except sa.exc.InvalidRequestError, e:
+ except sa.exc.InvalidRequestError as e:
assert "merge() with load=False option does not support "\
"objects marked as 'dirty'. flush() all changes on "\
"mapped instances before merging with load=False." \
@@ -924,7 +924,7 @@ class MergeTest(_fixtures.FixtureTest):
sess2.expunge_all()
eq_(sess2.query(User).get(u2.id).addresses[0].email_address,
'somenewaddress')
- except sa.exc.InvalidRequestError, e:
+ except sa.exc.InvalidRequestError as e:
assert "load=False option does not support" in str(e)
def test_synonym_comparable(self):
diff --git a/test/orm/test_naturalpks.py b/test/orm/test_naturalpks.py
index d30cdc598..173408b82 100644
--- a/test/orm/test_naturalpks.py
+++ b/test/orm/test_naturalpks.py
@@ -2,7 +2,7 @@
Primary key changing capabilities and passive/non-passive cascading updates.
"""
-from __future__ import with_statement
+
from sqlalchemy.testing import eq_, ne_, \
assert_raises, assert_raises_message
import sqlalchemy as sa
@@ -499,7 +499,7 @@ class ReversePKsTest(fixtures.MappedTest):
'user', metadata,
Column('code', Integer, primary_key=True),
Column('status', Integer, primary_key=True),
- Column('username', Unicode(50), nullable=False),
+ Column('username', String(50), nullable=False),
)
@classmethod
@@ -519,11 +519,11 @@ class ReversePKsTest(fixtures.MappedTest):
session = sa.orm.sessionmaker()()
- a_published = User(1, PUBLISHED, u'a')
+ a_published = User(1, PUBLISHED, 'a')
session.add(a_published)
session.commit()
- a_editable = User(1, EDITABLE, u'a')
+ a_editable = User(1, EDITABLE, 'a')
session.add(a_editable)
session.commit()
diff --git a/test/orm/test_pickled.py b/test/orm/test_pickled.py
index 5ac34f914..b54af93f2 100644
--- a/test/orm/test_pickled.py
+++ b/test/orm/test_pickled.py
@@ -443,21 +443,21 @@ class TupleLabelTest(_fixtures.FixtureTest):
if pickled is not False:
row = pickle.loads(pickle.dumps(row, pickled))
- eq_(row.keys(), ['User', 'Address'])
+ eq_(list(row.keys()), ['User', 'Address'])
eq_(row.User, row[0])
eq_(row.Address, row[1])
for row in sess.query(User.name, User.id.label('foobar')):
if pickled is not False:
row = pickle.loads(pickle.dumps(row, pickled))
- eq_(row.keys(), ['name', 'foobar'])
+ eq_(list(row.keys()), ['name', 'foobar'])
eq_(row.name, row[0])
eq_(row.foobar, row[1])
for row in sess.query(User).values(User.name, User.id.label('foobar')):
if pickled is not False:
row = pickle.loads(pickle.dumps(row, pickled))
- eq_(row.keys(), ['name', 'foobar'])
+ eq_(list(row.keys()), ['name', 'foobar'])
eq_(row.name, row[0])
eq_(row.foobar, row[1])
@@ -465,21 +465,21 @@ class TupleLabelTest(_fixtures.FixtureTest):
for row in sess.query(User, oalias).join(User.orders).all():
if pickled is not False:
row = pickle.loads(pickle.dumps(row, pickled))
- eq_(row.keys(), ['User'])
+ eq_(list(row.keys()), ['User'])
eq_(row.User, row[0])
oalias = aliased(Order, name='orders')
for row in sess.query(User, oalias).join(oalias, User.orders).all():
if pickled is not False:
row = pickle.loads(pickle.dumps(row, pickled))
- eq_(row.keys(), ['User', 'orders'])
+ eq_(list(row.keys()), ['User', 'orders'])
eq_(row.User, row[0])
eq_(row.orders, row[1])
# test here that first col is not labeled, only
# one name in keys, matches correctly
for row in sess.query(User.name + 'hoho', User.name):
- eq_(row.keys(), ['name'])
+ eq_(list(row.keys()), ['name'])
eq_(row[0], row.name + 'hoho')
if pickled is not False:
diff --git a/test/orm/test_query.py b/test/orm/test_query.py
index 3882ec4b5..2e0355e63 100644
--- a/test/orm/test_query.py
+++ b/test/orm/test_query.py
@@ -407,11 +407,12 @@ class GetTest(QueryTest):
Column('data', Unicode(40)))
try:
metadata.create_all()
- # Py3K
- #ustring = b'petit voix m\xe2\x80\x99a'.decode('utf-8')
- # Py2K
- ustring = 'petit voix m\xe2\x80\x99a'.decode('utf-8')
- # end Py2K
+# start Py3K
+ ustring = b'petit voix m\xe2\x80\x99a'.decode('utf-8')
+# end Py3K
+# start Py2K
+# ustring = 'petit voix m\xe2\x80\x99a'.decode('utf-8')
+# end Py2K
table.insert().execute(id=ustring, data=ustring)
class LocalFoo(self.classes.Base):
@@ -637,9 +638,9 @@ class OperatorTest(QueryTest, AssertsCompiledSQL):
(operator.sub, '-'),
# Py3k
#(operator.truediv, '/'),
- # Py2K
- (operator.div, '/'),
- # end Py2K
+# start Py2K
+# (operator.div, '/'),
+# end Py2K
):
for (lhs, rhs, res) in (
(5, User.id, ':id_1 %s users.id'),
@@ -1047,7 +1048,7 @@ class ExpressionTest(QueryTest, AssertsCompiledSQL):
adalias = aliased(Address, q1.subquery())
eq_(
s.query(User, adalias).join(adalias, User.id==adalias.user_id).all(),
- [(User(id=7,name=u'jack'), Address(email_address=u'jack@bean.com',user_id=7,id=1))]
+ [(User(id=7,name='jack'), Address(email_address='jack@bean.com',user_id=7,id=1))]
)
# more slice tests are available in test/orm/generative.py
@@ -1196,7 +1197,7 @@ class FilterTest(QueryTest, AssertsCompiledSQL):
"users.name AS users_name FROM users WHERE users.id = :param_1 "
"UNION SELECT users.id AS users_id, users.name AS users_name "
"FROM users WHERE users.id = :param_2) AS anon_1",
- checkparams = {u'param_1': 7, u'param_2': 8}
+ checkparams = {'param_1': 7, 'param_2': 8}
)
def test_any(self):
@@ -1319,7 +1320,7 @@ class FilterTest(QueryTest, AssertsCompiledSQL):
"FROM users JOIN addresses ON users.id = addresses.user_id "
"WHERE users.name = :name_1 AND "
"addresses.email_address = :email_address_1",
- checkparams={u'email_address_1': 'ed@ed.com', u'name_1': 'ed'}
+ checkparams={'email_address_1': 'ed@ed.com', 'name_1': 'ed'}
)
def test_filter_by_no_property(self):
@@ -1468,14 +1469,14 @@ class SetOpsTest(QueryTest, AssertsCompiledSQL):
for q in (q3.order_by(User.id, "anon_1_param_1"), q6.order_by(User.id, "foo")):
eq_(q.all(),
[
- (User(id=7, name=u'jack'), u'x'),
- (User(id=7, name=u'jack'), u'y'),
- (User(id=8, name=u'ed'), u'x'),
- (User(id=8, name=u'ed'), u'y'),
- (User(id=9, name=u'fred'), u'x'),
- (User(id=9, name=u'fred'), u'y'),
- (User(id=10, name=u'chuck'), u'x'),
- (User(id=10, name=u'chuck'), u'y')
+ (User(id=7, name='jack'), 'x'),
+ (User(id=7, name='jack'), 'y'),
+ (User(id=8, name='ed'), 'x'),
+ (User(id=8, name='ed'), 'y'),
+ (User(id=9, name='fred'), 'x'),
+ (User(id=9, name='fred'), 'y'),
+ (User(id=10, name='chuck'), 'x'),
+ (User(id=10, name='chuck'), 'y')
]
)
@@ -1603,7 +1604,7 @@ class AggregateTest(QueryTest):
sess = create_session()
orders = sess.query(Order).filter(Order.id.in_([2, 3, 4]))
- eq_(orders.values(func.sum(Order.user_id * Order.address_id)).next(), (79,))
+ eq_(next(orders.values(func.sum(Order.user_id * Order.address_id))), (79,))
eq_(orders.value(func.sum(Order.user_id * Order.address_id)), 79)
def test_apply(self):
@@ -1616,9 +1617,9 @@ class AggregateTest(QueryTest):
User, Address = self.classes.User, self.classes.Address
sess = create_session()
- assert [User(name=u'ed',id=8)] == sess.query(User).order_by(User.id).group_by(User).join('addresses').having(func.count(Address.id)> 2).all()
+ assert [User(name='ed',id=8)] == sess.query(User).order_by(User.id).group_by(User).join('addresses').having(func.count(Address.id)> 2).all()
- assert [User(name=u'jack',id=7), User(name=u'fred',id=9)] == sess.query(User).order_by(User.id).group_by(User).join('addresses').having(func.count(Address.id)< 2).all()
+ assert [User(name='jack',id=7), User(name='fred',id=9)] == sess.query(User).order_by(User.id).group_by(User).join('addresses').having(func.count(Address.id)< 2).all()
class ExistsTest(QueryTest, AssertsCompiledSQL):
@@ -1798,14 +1799,14 @@ class YieldTest(QueryTest):
ret = []
eq_(len(sess.identity_map), 0)
- ret.append(q.next())
- ret.append(q.next())
+ ret.append(next(q))
+ ret.append(next(q))
eq_(len(sess.identity_map), 2)
- ret.append(q.next())
- ret.append(q.next())
+ ret.append(next(q))
+ ret.append(next(q))
eq_(len(sess.identity_map), 4)
try:
- q.next()
+ next(q)
assert False
except StopIteration:
pass
@@ -1915,7 +1916,7 @@ class TextTest(QueryTest):
User.id, text("users.name"))
eq_(s.query(User.id, "name").order_by(User.id).all(),
- [(7, u'jack'), (8, u'ed'), (9, u'fred'), (10, u'chuck')])
+ [(7, 'jack'), (8, 'ed'), (9, 'fred'), (10, 'chuck')])
def test_via_select(self):
User = self.classes.User
@@ -1973,7 +1974,7 @@ class ParentTest(QueryTest, AssertsCompiledSQL):
try:
q = sess.query(Item).with_parent(u1)
assert False
- except sa_exc.InvalidRequestError, e:
+ except sa_exc.InvalidRequestError as e:
assert str(e) \
== "Could not locate a property which relates "\
"instances of class 'Item' to instances of class 'User'"
@@ -2058,7 +2059,7 @@ class ParentTest(QueryTest, AssertsCompiledSQL):
"addresses.id AS addresses_id, addresses.user_id AS "
"addresses_user_id, addresses.email_address AS addresses_email_address "
"FROM addresses WHERE :param_2 = addresses.user_id) AS anon_1",
- checkparams={u'param_1': 7, u'param_2': 8},
+ checkparams={'param_1': 7, 'param_2': 8},
)
def test_unique_binds_or(self):
@@ -2075,7 +2076,7 @@ class ParentTest(QueryTest, AssertsCompiledSQL):
"addresses_user_id, addresses.email_address AS "
"addresses_email_address FROM addresses WHERE "
":param_1 = addresses.user_id OR :param_2 = addresses.user_id",
- checkparams={u'param_1': 7, u'param_2': 8},
+ checkparams={'param_1': 7, 'param_2': 8},
)
class SynonymTest(QueryTest):
@@ -2117,9 +2118,9 @@ class SynonymTest(QueryTest):
options(joinedload(User.orders_syn)).all()
eq_(result, [
User(id=7, name='jack', orders=[
- Order(description=u'order 1'),
- Order(description=u'order 3'),
- Order(description=u'order 5')
+ Order(description='order 1'),
+ Order(description='order 3'),
+ Order(description='order 5')
])
])
self.assert_sql_count(testing.db, go, 1)
@@ -2133,9 +2134,9 @@ class SynonymTest(QueryTest):
options(joinedload(User.orders_syn_2)).all()
eq_(result, [
User(id=7, name='jack', orders=[
- Order(description=u'order 1'),
- Order(description=u'order 3'),
- Order(description=u'order 5')
+ Order(description='order 1'),
+ Order(description='order 3'),
+ Order(description='order 5')
])
])
self.assert_sql_count(testing.db, go, 1)
@@ -2149,9 +2150,9 @@ class SynonymTest(QueryTest):
options(joinedload('orders_syn_2')).all()
eq_(result, [
User(id=7, name='jack', orders=[
- Order(description=u'order 1'),
- Order(description=u'order 3'),
- Order(description=u'order 5')
+ Order(description='order 1'),
+ Order(description='order 3'),
+ Order(description='order 5')
])
])
self.assert_sql_count(testing.db, go, 1)
@@ -2355,7 +2356,7 @@ class OptionsTest(QueryTest):
if isinstance(item, type):
item = class_mapper(item)
else:
- if isinstance(item, basestring):
+ if isinstance(item, str):
item = inspect(r[-1]).mapper.attrs[item]
r.append(item)
return tuple(r)
diff --git a/test/orm/test_relationships.py b/test/orm/test_relationships.py
index e5789eb7c..124202dcc 100644
--- a/test/orm/test_relationships.py
+++ b/test/orm/test_relationships.py
@@ -426,13 +426,13 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
c1 = Company('c1')
c2 = Company('c2')
- e1 = Employee(u'emp1', c1, 1)
- e2 = Employee(u'emp2', c1, 2, e1)
- e3 = Employee(u'emp3', c1, 3, e1)
- e4 = Employee(u'emp4', c1, 4, e3)
- e5 = Employee(u'emp5', c2, 1)
- e6 = Employee(u'emp6', c2, 2, e5)
- e7 = Employee(u'emp7', c2, 3, e5)
+ e1 = Employee('emp1', c1, 1)
+ e2 = Employee('emp2', c1, 2, e1)
+ e3 = Employee('emp3', c1, 3, e1)
+ e4 = Employee('emp4', c1, 4, e3)
+ e5 = Employee('emp5', c2, 1)
+ e6 = Employee('emp6', c2, 2, e5)
+ e7 = Employee('emp7', c2, 3, e5)
sess.add_all((c1, c2))
sess.commit()
@@ -642,7 +642,7 @@ class FKsAsPksTest(fixtures.MappedTest):
try:
sess.flush()
assert False
- except AssertionError, e:
+ except AssertionError as e:
startswith_(str(e),
"Dependency rule tried to blank-out "
"primary key column 'tableB.id' on instance ")
@@ -667,7 +667,7 @@ class FKsAsPksTest(fixtures.MappedTest):
try:
sess.flush()
assert False
- except AssertionError, e:
+ except AssertionError as e:
startswith_(str(e),
"Dependency rule tried to blank-out "
"primary key column 'tableB.id' on instance ")
@@ -1106,9 +1106,9 @@ class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest):
eq_(
sess.query(Subscriber).order_by(Subscriber.type).all(),
[
- Subscriber(id=1, type=u'A'),
- Subscriber(id=2, type=u'B'),
- Subscriber(id=2, type=u'C')
+ Subscriber(id=1, type='A'),
+ Subscriber(id=2, type='B'),
+ Subscriber(id=2, type='C')
]
)
@@ -1365,7 +1365,7 @@ class TypeMatchTest(fixtures.MappedTest):
try:
sess.add(a1)
assert False
- except AssertionError, err:
+ except AssertionError as err:
eq_(str(err),
"Attribute 'bs' on class '%s' doesn't handle "
"objects of type '%s'" % (A, C))
diff --git a/test/orm/test_session.py b/test/orm/test_session.py
index 7c2e8a3b8..45164483b 100644
--- a/test/orm/test_session.py
+++ b/test/orm/test_session.py
@@ -232,7 +232,7 @@ class ExecutionTest(_fixtures.FixtureTest):
# use :bindparam style
eq_(sess.execute("select * from users where id=:id",
{'id': 7}).fetchall(),
- [(7, u'jack')])
+ [(7, 'jack')])
# use :bindparam style
@@ -733,7 +733,7 @@ class SessionStateTest(_fixtures.FixtureTest):
# withstand a change? should this be
# more directly attempting to manipulate the identity_map ?
u1, u2, u3 = sess.query(User).all()
- for i, (key, value) in enumerate(sess.identity_map.iteritems()):
+ for i, (key, value) in enumerate(iter(sess.identity_map.items())):
if i == 2:
del u3
gc_collect()
@@ -747,7 +747,7 @@ class SessionStateTest(_fixtures.FixtureTest):
@event.listens_for(m, "after_update")
def e(mapper, conn, target):
sess = object_session(target)
- for entry in sess.identity_map.values():
+ for entry in list(sess.identity_map.values()):
entry.name = "5"
a1, a2 = User(name="1"), User(name="2")
@@ -845,7 +845,7 @@ class SessionStateWFixtureTest(_fixtures.FixtureTest):
u = session.query(User).filter_by(id=7).one()
# get everything to load in both directions
- print [a.user for a in u.addresses]
+ print([a.user for a in u.addresses])
# then see if expunge fails
session.expunge(u)
@@ -1187,7 +1187,7 @@ class StrongIdentityMapTest(_fixtures.FixtureTest):
s.flush()
user = s.query(User).one()
user = None
- print s.identity_map
+ print(s.identity_map)
gc_collect()
assert len(s.identity_map) == 1
@@ -1207,7 +1207,7 @@ class StrongIdentityMapTest(_fixtures.FixtureTest):
s = create_session(weak_identity_map=False)
mapper(User, users)
- for o in [User(name='u%s' % x) for x in xrange(10)]:
+ for o in [User(name='u%s' % x) for x in range(10)]:
s.add(o)
# o is still live after this loop...
diff --git a/test/orm/test_subquery_relations.py b/test/orm/test_subquery_relations.py
index 3ee94cae9..d493e0b7e 100644
--- a/test/orm/test_subquery_relations.py
+++ b/test/orm/test_subquery_relations.py
@@ -1552,9 +1552,9 @@ class CyclicalInheritingEagerTestTwo(fixtures.DeclarativeMappedTest,
Movie = self.classes.Movie
session = Session(testing.db)
- rscott = Director(name=u"Ridley Scott")
- alien = Movie(title=u"Alien")
- brunner = Movie(title=u"Blade Runner")
+ rscott = Director(name="Ridley Scott")
+ alien = Movie(title="Alien")
+ brunner = Movie(title="Blade Runner")
rscott.movies.append(brunner)
rscott.movies.append(alien)
session.add_all([rscott, alien, brunner])
diff --git a/test/orm/test_unitofwork.py b/test/orm/test_unitofwork.py
index 6be1672e1..4b9799d47 100644
--- a/test/orm/test_unitofwork.py
+++ b/test/orm/test_unitofwork.py
@@ -6,6 +6,7 @@ import datetime
from sqlalchemy.orm import mapper as orm_mapper
import sqlalchemy as sa
+from sqlalchemy.util import u, ue, b
from sqlalchemy import Integer, String, ForeignKey, literal_column, event
from sqlalchemy.testing import engines
from sqlalchemy import testing
@@ -87,7 +88,7 @@ class UnicodeTest(fixtures.MappedTest):
mapper(Test, uni_t1)
- txt = u"\u0160\u0110\u0106\u010c\u017d"
+ txt = ue("\u0160\u0110\u0106\u010c\u017d")
t1 = Test(id=1, txt=txt)
self.assert_(t1.txt == txt)
@@ -107,7 +108,7 @@ class UnicodeTest(fixtures.MappedTest):
't2s': relationship(Test2)})
mapper(Test2, uni_t2)
- txt = u"\u0160\u0110\u0106\u010c\u017d"
+ txt = ue("\u0160\u0110\u0106\u010c\u017d")
t1 = Test(txt=txt)
t1.t2s.append(Test2())
t1.t2s.append(Test2())
@@ -132,16 +133,16 @@ class UnicodeSchemaTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
t1 = Table('unitable1', metadata,
- Column(u'méil', Integer, primary_key=True, key='a', test_needs_autoincrement=True),
- Column(u'\u6e2c\u8a66', Integer, key='b'),
+ Column(u('méil'), Integer, primary_key=True, key='a', test_needs_autoincrement=True),
+ Column(ue('\u6e2c\u8a66'), Integer, key='b'),
Column('type', String(20)),
test_needs_fk=True,
test_needs_autoincrement=True)
- t2 = Table(u'Unitéble2', metadata,
- Column(u'méil', Integer, primary_key=True, key="cc", test_needs_autoincrement=True),
- Column(u'\u6e2c\u8a66', Integer,
- ForeignKey(u'unitable1.a'), key="d"),
- Column(u'\u6e2c\u8a66_2', Integer, key="e"),
+ t2 = Table(u('Unitéble2'), metadata,
+ Column(u('méil'), Integer, primary_key=True, key="cc", test_needs_autoincrement=True),
+ Column(ue('\u6e2c\u8a66'), Integer,
+ ForeignKey('unitable1.a'), key="d"),
+ Column(ue('\u6e2c\u8a66_2'), Integer, key="e"),
test_needs_fk=True,
test_needs_autoincrement=True)
@@ -237,12 +238,7 @@ class BinaryHistTest(fixtures.MappedTest, testing.AssertsExecutionResults):
def test_binary_equality(self):
Foo, t1 = self.classes.Foo, self.tables.t1
-
- # Py3K
- #data = b"this is some data"
- # Py2K
- data = "this is some data"
- # end Py2K
+ data = b("this is some data")
mapper(Foo, t1)
@@ -1054,13 +1050,13 @@ class OneToManyTest(_fixtures.FixtureTest):
session.flush()
user_rows = users.select(users.c.id.in_([u.id])).execute().fetchall()
- eq_(user_rows[0].values(), [u.id, 'one2manytester'])
+ eq_(list(user_rows[0].values()), [u.id, 'one2manytester'])
address_rows = addresses.select(
addresses.c.id.in_([a.id, a2.id]),
order_by=[addresses.c.email_address]).execute().fetchall()
- eq_(address_rows[0].values(), [a2.id, u.id, 'lala@test.org'])
- eq_(address_rows[1].values(), [a.id, u.id, 'one2many@test.org'])
+ eq_(list(address_rows[0].values()), [a2.id, u.id, 'lala@test.org'])
+ eq_(list(address_rows[1].values()), [a.id, u.id, 'one2many@test.org'])
userid = u.id
addressid = a2.id
@@ -1071,7 +1067,7 @@ class OneToManyTest(_fixtures.FixtureTest):
address_rows = addresses.select(
addresses.c.id == addressid).execute().fetchall()
- eq_(address_rows[0].values(),
+ eq_(list(address_rows[0].values()),
[addressid, userid, 'somethingnew@foo.com'])
self.assert_(u.id == userid and a2.id == addressid)
@@ -1501,18 +1497,18 @@ class SaveTest(_fixtures.FixtureTest):
assert u.name == 'multitester'
user_rows = users.select(users.c.id.in_([u.foo_id])).execute().fetchall()
- eq_(user_rows[0].values(), [u.foo_id, 'multitester'])
+ eq_(list(user_rows[0].values()), [u.foo_id, 'multitester'])
address_rows = addresses.select(addresses.c.id.in_([u.id])).execute().fetchall()
- eq_(address_rows[0].values(), [u.id, u.foo_id, 'multi@test.org'])
+ eq_(list(address_rows[0].values()), [u.id, u.foo_id, 'multi@test.org'])
u.email = 'lala@hey.com'
u.name = 'imnew'
session.flush()
user_rows = users.select(users.c.id.in_([u.foo_id])).execute().fetchall()
- eq_(user_rows[0].values(), [u.foo_id, 'imnew'])
+ eq_(list(user_rows[0].values()), [u.foo_id, 'imnew'])
address_rows = addresses.select(addresses.c.id.in_([u.id])).execute().fetchall()
- eq_(address_rows[0].values(), [u.id, u.foo_id, 'lala@hey.com'])
+ eq_(list(address_rows[0].values()), [u.id, u.foo_id, 'lala@hey.com'])
session.expunge_all()
u = session.query(User).get(id)
@@ -1650,7 +1646,7 @@ class ManyToOneTest(_fixtures.FixtureTest):
l = sa.select([users, addresses],
sa.and_(users.c.id==addresses.c.user_id,
addresses.c.id==a.id)).execute()
- eq_(l.first().values(),
+ eq_(list(l.first().values()),
[a.user.id, 'asdf8d', a.id, a.user_id, 'theater@foo.com'])
def test_many_to_one_1(self):
@@ -2127,7 +2123,6 @@ class SaveTest3(fixtures.MappedTest):
assert assoc.count().scalar() == 2
i.keywords = []
- print i.keywords
session.flush()
assert assoc.count().scalar() == 0
diff --git a/test/orm/test_unitofworkv2.py b/test/orm/test_unitofworkv2.py
index 7c7337c62..34648c256 100644
--- a/test/orm/test_unitofworkv2.py
+++ b/test/orm/test_unitofworkv2.py
@@ -26,7 +26,7 @@ class AssertsUOW(object):
def _assert_uow_size(self, session, expected ):
uow = self._get_test_uow(session)
postsort_actions = uow._generate_actions()
- print postsort_actions
+ print(postsort_actions)
eq_(len(postsort_actions), expected, postsort_actions)
class UOWTest(_fixtures.FixtureTest,
@@ -125,12 +125,12 @@ class RudimentaryFlushTest(UOWTest):
CompiledSQL(
"UPDATE addresses SET user_id=:user_id WHERE "
"addresses.id = :addresses_id",
- lambda ctx: [{u'addresses_id': a1.id, 'user_id': None}]
+ lambda ctx: [{'addresses_id': a1.id, 'user_id': None}]
),
CompiledSQL(
"UPDATE addresses SET user_id=:user_id WHERE "
"addresses.id = :addresses_id",
- lambda ctx: [{u'addresses_id': a2.id, 'user_id': None}]
+ lambda ctx: [{'addresses_id': a2.id, 'user_id': None}]
),
CompiledSQL(
"DELETE FROM users WHERE users.id = :id",
@@ -235,12 +235,12 @@ class RudimentaryFlushTest(UOWTest):
CompiledSQL(
"UPDATE addresses SET user_id=:user_id WHERE "
"addresses.id = :addresses_id",
- lambda ctx: [{u'addresses_id': a1.id, 'user_id': None}]
+ lambda ctx: [{'addresses_id': a1.id, 'user_id': None}]
),
CompiledSQL(
"UPDATE addresses SET user_id=:user_id WHERE "
"addresses.id = :addresses_id",
- lambda ctx: [{u'addresses_id': a2.id, 'user_id': None}]
+ lambda ctx: [{'addresses_id': a2.id, 'user_id': None}]
),
CompiledSQL(
"DELETE FROM users WHERE users.id = :id",
@@ -1149,7 +1149,7 @@ class SingleCycleM2MTest(fixtures.MappedTest,
"nodes, node_to_nodes WHERE :param_1 = "
"node_to_nodes.right_node_id AND nodes.id = "
"node_to_nodes.left_node_id" ,
- lambda ctx:{u'param_1': n1.id},
+ lambda ctx:{'param_1': n1.id},
),
CompiledSQL(
"DELETE FROM node_to_nodes WHERE "
diff --git a/test/orm/test_update_delete.py b/test/orm/test_update_delete.py
index 65d69538e..6915ac8a2 100644
--- a/test/orm/test_update_delete.py
+++ b/test/orm/test_update_delete.py
@@ -188,22 +188,22 @@ class UpdateDeleteTest(fixtures.MappedTest):
update({'age': User.age - 10}, synchronize_session='evaluate')
eq_([john.age, jack.age, jill.age, jane.age], [25,37,29,27])
- eq_(sess.query(User.age).order_by(User.id).all(), zip([25,37,29,27]))
+ eq_(sess.query(User.age).order_by(User.id).all(), list(zip([25,37,29,27])))
sess.query(User).filter(User.age > 29).\
update({User.age: User.age - 10}, synchronize_session='evaluate')
eq_([john.age, jack.age, jill.age, jane.age], [25,27,29,27])
- eq_(sess.query(User.age).order_by(User.id).all(), zip([25,27,29,27]))
+ eq_(sess.query(User.age).order_by(User.id).all(), list(zip([25,27,29,27])))
sess.query(User).filter(User.age > 27).\
update({users.c.age: User.age - 10}, synchronize_session='evaluate')
eq_([john.age, jack.age, jill.age, jane.age], [25,27,19,27])
- eq_(sess.query(User.age).order_by(User.id).all(), zip([25,27,19,27]))
+ eq_(sess.query(User.age).order_by(User.id).all(), list(zip([25,27,19,27])))
sess.query(User).filter(User.age == 25).\
update({User.age: User.age - 10}, synchronize_session='fetch')
eq_([john.age, jack.age, jill.age, jane.age], [15,27,19,27])
- eq_(sess.query(User.age).order_by(User.id).all(), zip([15,27,19,27]))
+ eq_(sess.query(User.age).order_by(User.id).all(), list(zip([15,27,19,27])))
def test_update_against_metadata(self):
User, users = self.classes.User, self.tables.users
@@ -211,7 +211,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess = Session()
sess.query(users).update({users.c.age: 29}, synchronize_session=False)
- eq_(sess.query(User.age).order_by(User.id).all(), zip([29,29,29,29]))
+ eq_(sess.query(User.age).order_by(User.id).all(), list(zip([29,29,29,29])))
def test_update_with_bindparams(self):
User = self.classes.User
@@ -224,7 +224,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
update({'age': User.age - 10}, synchronize_session='fetch')
eq_([john.age, jack.age, jill.age, jane.age], [25,37,29,27])
- eq_(sess.query(User.age).order_by(User.id).all(), zip([25,37,29,27]))
+ eq_(sess.query(User.age).order_by(User.id).all(), list(zip([25,37,29,27])))
def test_update_without_load(self):
User = self.classes.User
@@ -233,7 +233,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess.query(User).filter(User.id == 3).\
update({'age': 44}, synchronize_session='fetch')
- eq_(sess.query(User.age).order_by(User.id).all(), zip([25,47,44,37]))
+ eq_(sess.query(User.age).order_by(User.id).all(), list(zip([25,47,44,37])))
def test_update_changes_resets_dirty(self):
User = self.classes.User
@@ -300,7 +300,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
update({'age': User.age - 10}, synchronize_session='fetch')
eq_([john.age, jack.age, jill.age, jane.age], [25,37,29,27])
- eq_(sess.query(User.age).order_by(User.id).all(), zip([25,37,29,27]))
+ eq_(sess.query(User.age).order_by(User.id).all(), list(zip([25,37,29,27])))
@testing.fails_if(lambda: not testing.db.dialect.supports_sane_rowcount)
def test_update_returns_rowcount(self):
@@ -334,7 +334,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess.query(User).update({'age': 42}, synchronize_session='evaluate')
eq_([john.age, jack.age, jill.age, jane.age], [42,42,42,42])
- eq_(sess.query(User.age).order_by(User.id).all(), zip([42,42,42,42]))
+ eq_(sess.query(User.age).order_by(User.id).all(), list(zip([42,42,42,42])))
def test_delete_all(self):
User = self.classes.User
@@ -516,7 +516,7 @@ class UpdateDeleteIgnoresLoadersTest(fixtures.MappedTest):
eq_([foo.title, bar.title, baz.title], ['foofoo','barbar', 'baz'])
eq_(sess.query(Document.title).order_by(Document.id).all(),
- zip(['foofoo','barbar', 'baz']))
+ list(zip(['foofoo','barbar', 'baz'])))
def test_update_with_explicit_joinedload(self):
User = self.classes.User
@@ -528,7 +528,7 @@ class UpdateDeleteIgnoresLoadersTest(fixtures.MappedTest):
update({'age': User.age - 10}, synchronize_session='fetch')
eq_([john.age, jack.age, jill.age, jane.age], [25,37,29,27])
- eq_(sess.query(User.age).order_by(User.id).all(), zip([25,37,29,27]))
+ eq_(sess.query(User.age).order_by(User.id).all(), list(zip([25,37,29,27])))
def test_delete_with_eager_relationships(self):
Document = self.classes.Document
@@ -538,7 +538,7 @@ class UpdateDeleteIgnoresLoadersTest(fixtures.MappedTest):
sess.query(Document).filter(Document.user_id == 1).\
delete(synchronize_session=False)
- eq_(sess.query(Document.title).all(), zip(['baz']))
+ eq_(sess.query(Document.title).all(), list(zip(['baz'])))
class UpdateDeleteFromTest(fixtures.MappedTest):
@classmethod
diff --git a/test/orm/test_utils.py b/test/orm/test_utils.py
index b2853a8b8..49ff61ea0 100644
--- a/test/orm/test_utils.py
+++ b/test/orm/test_utils.py
@@ -52,10 +52,10 @@ class AliasedClassTest(fixtures.TestBase):
alias = aliased(Point)
assert Point.zero
- # Py2K
- # TODO: what is this testing ??
- assert not getattr(alias, 'zero')
- # end Py2K
+# start Py2K
+# # TODO: what is this testing ??
+# assert not getattr(alias, 'zero')
+# end Py2K
def test_classmethods(self):
class Point(object):
@@ -123,17 +123,19 @@ class AliasedClassTest(fixtures.TestBase):
self.func = func
def __get__(self, instance, owner):
if instance is None:
- # Py3K
- #args = (self.func, owner)
- # Py2K
- args = (self.func, owner, owner.__class__)
- # end Py2K
+# start Py3K
+ args = (self.func, owner)
+# end Py3K
+# start Py2K
+# args = (self.func, owner, owner.__class__)
+# end Py2K
else:
- # Py3K
- #args = (self.func, instance)
- # Py2K
- args = (self.func, instance, owner)
- # end Py2K
+# start Py3K
+ args = (self.func, instance)
+# end Py3K
+# start Py2K
+# args = (self.func, instance, owner)
+# end Py2K
return types.MethodType(*args)
class PropertyDescriptor(object):
@@ -368,13 +370,6 @@ class PathRegistryTest(_fixtures.FixtureTest):
assert p1.contains_mapper(umapper)
assert not p1.contains_mapper(amapper)
- def _registry(self):
- class Reg(dict):
- @property
- def _attributes(self):
- return self
- return Reg()
-
def test_path(self):
umapper = inspect(self.classes.User)
amapper = inspect(self.classes.Address)
@@ -394,7 +389,7 @@ class PathRegistryTest(_fixtures.FixtureTest):
)
def test_registry_set(self):
- reg = self._registry()
+ reg = {}
umapper = inspect(self.classes.User)
amapper = inspect(self.classes.Address)
@@ -415,7 +410,7 @@ class PathRegistryTest(_fixtures.FixtureTest):
)
def test_registry_get(self):
- reg = self._registry()
+ reg = {}
umapper = inspect(self.classes.User)
amapper = inspect(self.classes.Address)
@@ -437,7 +432,7 @@ class PathRegistryTest(_fixtures.FixtureTest):
eq_(p3.get(reg, "p1key"), None)
def test_registry_contains(self):
- reg = self._registry()
+ reg = {}
umapper = inspect(self.classes.User)
amapper = inspect(self.classes.Address)
@@ -457,7 +452,7 @@ class PathRegistryTest(_fixtures.FixtureTest):
assert not p2.contains(reg, "fake")
def test_registry_setdefault(self):
- reg = self._registry()
+ reg = {}
umapper = inspect(self.classes.User)
amapper = inspect(self.classes.Address)
diff --git a/test/perf/README b/test/perf/README
deleted file mode 100644
index 1d03cb5ce..000000000
--- a/test/perf/README
+++ /dev/null
@@ -1,17 +0,0 @@
-This directory contains informal scripts used to stress test various
-library subsections over the years, including testing of memory usage,
-function call count, threading behavior.
-
-The scripts here are *not* part of the automated test suite, and instead
-were used at the time of development for particular features or
-performance enhancements in an ad-hoc fashion. Ideally
-the various functionality tested within would be brought under the
-umbrella of controlled, automated tests. Many of the scripts here
-are out of date and are possibly covered by formal performance tests
-elsewhere.
-
-Current automated stress and performance tests are in test/aaa_profiling/,
-which test either for expected function call count, or flat growth in memory
-usage over time. These tests are part of the automated test suite
-and are maintained for 100% success rate along Python versions from 2.4 through
-current 3 versions. \ No newline at end of file
diff --git a/test/perf/insertspeed.py b/test/perf/insertspeed.py
deleted file mode 100644
index 03d2c4144..000000000
--- a/test/perf/insertspeed.py
+++ /dev/null
@@ -1,109 +0,0 @@
-import sys, time
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from sqlalchemy.testing import profiling
-
-db = create_engine('sqlite://')
-metadata = MetaData(db)
-Person_table = Table('Person', metadata,
- Column('name', String(40)),
- Column('sex', Integer),
- Column('age', Integer))
-
-
-def sa_unprofiled_insertmany(n):
- i = Person_table.insert()
- i.execute([{'name':'John Doe','sex':1,'age':35} for j in xrange(n)])
-
-def sqlite_unprofiled_insertmany(n):
- conn = db.connect().connection
- c = conn.cursor()
- persons = [('john doe', 1, 35) for i in xrange(n)]
- c.executemany("insert into Person(name, sex, age) values (?,?,?)", persons)
-
-@profiling.profiled('sa_profiled_insert_many', always=True)
-def sa_profiled_insert_many(n):
- i = Person_table.insert()
- i.execute([{'name':'John Doe','sex':1,'age':35} for j in xrange(n)])
- s = Person_table.select()
- r = s.execute()
- res = [[value for value in row] for row in r.fetchall()]
-
-def sqlite_unprofiled_insert(n):
- conn = db.connect().connection
- c = conn.cursor()
- for j in xrange(n):
- c.execute("insert into Person(name, sex, age) values (?,?,?)",
- ('john doe', 1, 35))
-
-def sa_unprofiled_insert(n):
- # Another option is to build Person_table.insert() outside of the
- # loop. But it doesn't make much of a difference, so might as well
- # use the worst-case/naive version here.
- for j in xrange(n):
- Person_table.insert().execute({'name':'John Doe','sex':1,'age':35})
-
-@profiling.profiled('sa_profiled_insert', always=True)
-def sa_profiled_insert(n):
- i = Person_table.insert()
- for j in xrange(n):
- i.execute({'name':'John Doe','sex':1,'age':35})
- s = Person_table.select()
- r = s.execute()
- res = [[value for value in row] for row in r.fetchall()]
-
-def run_timed(fn, label, *args, **kw):
- metadata.drop_all()
- metadata.create_all()
-
- sys.stdout.write("%s (%s): " % (label, ', '.join([str(a) for a in args])))
- sys.stdout.flush()
-
- t = time.clock()
- fn(*args, **kw)
- t2 = time.clock()
-
- sys.stdout.write("%0.2f seconds\n" % (t2 - t))
-
-def run_profiled(fn, label, *args, **kw):
- metadata.drop_all()
- metadata.create_all()
-
- print "%s (%s)" % (label, ', '.join([str(a) for a in args]))
- fn(*args, **kw)
-
-def all():
- try:
- print "Bulk INSERTS via executemany():\n"
-
- run_timed(sqlite_unprofiled_insertmany,
- 'pysqlite bulk insert',
- 50000)
-
- run_timed(sa_unprofiled_insertmany,
- 'SQLAlchemy bulk insert',
- 50000)
-
- run_profiled(sa_profiled_insert_many,
- 'SQLAlchemy bulk insert/select, profiled',
- 50000)
-
- print "\nIndividual INSERTS via execute():\n"
-
- run_timed(sqlite_unprofiled_insert,
- "pysqlite individual insert",
- 50000)
-
- run_timed(sa_unprofiled_insert,
- "SQLAlchemy individual insert",
- 50000)
-
- run_profiled(sa_profiled_insert,
- 'SQLAlchemy individual insert/select, profiled',
- 50000)
-
- finally:
- metadata.drop_all()
-
-if __name__ == '__main__':
- all()
diff --git a/test/perf/large_flush.py b/test/perf/large_flush.py
deleted file mode 100644
index b1ecce852..000000000
--- a/test/perf/large_flush.py
+++ /dev/null
@@ -1,84 +0,0 @@
-import sqlalchemy as sa
-from sqlalchemy import create_engine, MetaData, orm
-from sqlalchemy import Column, ForeignKey
-from sqlalchemy import Integer, String
-from sqlalchemy.orm import mapper
-from sqlalchemy.testing import profiling
-
-class Object(object):
- pass
-
-class Q(Object):
- pass
-
-class A(Object):
- pass
-
-class C(Object):
- pass
-
-class WC(C):
- pass
-
-engine = create_engine('sqlite:///:memory:', echo=True)
-
-sm = orm.sessionmaker(bind=engine)
-
-SA_Session = orm.scoped_session(sm)
-
-SA_Metadata = MetaData()
-
-object_table = sa.Table('Object',
- SA_Metadata,
- Column('ObjectID', Integer,primary_key=True),
- Column('Type', String(1), nullable=False))
-
-q_table = sa.Table('Q',
- SA_Metadata,
- Column('QID', Integer, ForeignKey('Object.ObjectID'),primary_key=True))
-
-c_table = sa.Table('C',
- SA_Metadata,
- Column('CID', Integer, ForeignKey('Object.ObjectID'),primary_key=True))
-
-wc_table = sa.Table('WC',
- SA_Metadata,
- Column('WCID', Integer, ForeignKey('C.CID'), primary_key=True))
-
-a_table = sa.Table('A',
- SA_Metadata,
- Column('AID', Integer, ForeignKey('Object.ObjectID'),primary_key=True),
- Column('QID', Integer, ForeignKey('Q.QID')),
- Column('CID', Integer, ForeignKey('C.CID')))
-
-mapper(Object, object_table, polymorphic_on=object_table.c.Type, polymorphic_identity='O')
-
-mapper(Q, q_table, inherits=Object, polymorphic_identity='Q')
-mapper(C, c_table, inherits=Object, polymorphic_identity='C')
-mapper(WC, wc_table, inherits=C, polymorphic_identity='W')
-
-mapper(A, a_table, inherits=Object, polymorphic_identity='A',
- properties = {
- 'Q' : orm.relation(Q,primaryjoin=a_table.c.QID==q_table.c.QID,
- backref='As'
- ),
- 'C' : orm.relation(C,primaryjoin=a_table.c.CID==c_table.c.CID,
- backref='A',
- uselist=False)
- }
- )
-
-SA_Metadata.create_all(engine)
-
-@profiling.profiled('large_flush', always=True, sort=['file'])
-def generate_error():
- q = Q()
- for j in range(100): #at 306 the error does not pop out (depending on recursion depth)
- a = A()
- a.Q = q
- a.C = WC()
-
- SA_Session.add(q)
- SA_Session.commit() #here the error pops out
-
-generate_error() \ No newline at end of file
diff --git a/test/perf/objselectspeed.py b/test/perf/objselectspeed.py
deleted file mode 100644
index c0ed88444..000000000
--- a/test/perf/objselectspeed.py
+++ /dev/null
@@ -1,146 +0,0 @@
-import time, resource
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from sqlalchemy.testing.util import gc_collect
-from sqlalchemy.testing import profiling
-
-db = create_engine('sqlite://')
-metadata = MetaData(db)
-Person_table = Table('Person', metadata,
- Column('id', Integer, primary_key=True),
- Column('type', String(10)),
- Column('name', String(40)),
- Column('sex', Integer),
- Column('age', Integer))
-
-
-Employee_table = Table('Employee', metadata,
- Column('id', Integer, ForeignKey('Person.id'), primary_key=True),
- Column('foo', String(40)),
- Column('bar', Integer),
- Column('bat', Integer))
-
-class RawPerson(object): pass
-class Person(object): pass
-mapper(Person, Person_table)
-
-class JoinedPerson(object):pass
-class Employee(JoinedPerson):pass
-mapper(JoinedPerson, Person_table, \
- polymorphic_on=Person_table.c.type, polymorphic_identity='person')
-mapper(Employee, Employee_table, \
- inherits=JoinedPerson, polymorphic_identity='employee')
-compile_mappers()
-
-def setup():
- metadata.create_all()
- i = Person_table.insert()
- data = [{'name':'John Doe','sex':1,'age':35, 'type':'employee'}] * 100
- for j in xrange(500):
- i.execute(data)
-
- # note we arent fetching from employee_table,
- # so we can leave it empty even though its "incorrect"
- #i = Employee_table.insert()
- #data = [{'foo':'foo', 'bar':'bar':'bat':'bat'}] * 100
- #for j in xrange(500):
- # i.execute(data)
-
- print "Inserted 50,000 rows"
-
-def sqlite_select(entity_cls):
- conn = db.connect().connection
- cr = conn.cursor()
- cr.execute("SELECT id, name, sex, age FROM Person")
- people = []
- for row in cr.fetchall():
- person = entity_cls()
- person.id = row[0]
- person.name = row[1]
- person.sex = row[2]
- person.age = row[3]
- people.append(person)
- cr.close()
- conn.close()
-
-def sql_select(entity_cls):
- people = []
- for row in Person_table.select().execute().fetchall():
- person = entity_cls()
- person.id = row['id']
- person.name = row['name']
- person.sex = row['sex']
- person.age = row['age']
- people.append(person)
-
-#@profiling.profiled(report=True, always=True)
-def orm_select():
- session = create_session()
- people = session.query(Person).all()
-
-#@profiling.profiled(report=True, always=True)
-def joined_orm_select():
- session = create_session()
- people = session.query(JoinedPerson).all()
-
-def all():
- setup()
- try:
- t, t2 = 0, 0
- def usage(label):
- now = resource.getrusage(resource.RUSAGE_SELF)
- print "%s: %0.3fs real, %0.3fs user, %0.3fs sys" % (
- label, t2 - t,
- now.ru_utime - usage.last.ru_utime,
- now.ru_stime - usage.last.ru_stime)
- usage.snap(now)
- usage.snap = lambda stats=None: setattr(
- usage, 'last', stats or resource.getrusage(resource.RUSAGE_SELF))
-
- gc_collect()
- usage.snap()
- t = time.clock()
- sqlite_select(RawPerson)
- t2 = time.clock()
- usage('sqlite select/native')
-
- gc_collect()
- usage.snap()
- t = time.clock()
- sqlite_select(Person)
- t2 = time.clock()
- usage('sqlite select/instrumented')
-
- gc_collect()
- usage.snap()
- t = time.clock()
- sql_select(RawPerson)
- t2 = time.clock()
- usage('sqlalchemy.sql select/native')
-
- gc_collect()
- usage.snap()
- t = time.clock()
- sql_select(Person)
- t2 = time.clock()
- usage('sqlalchemy.sql select/instrumented')
-
- gc_collect()
- usage.snap()
- t = time.clock()
- orm_select()
- t2 = time.clock()
- usage('sqlalchemy.orm fetch')
-
- gc_collect()
- usage.snap()
- t = time.clock()
- joined_orm_select()
- t2 = time.clock()
- usage('sqlalchemy.orm "joined" fetch')
- finally:
- metadata.drop_all()
-
-
-if __name__ == '__main__':
- all()
diff --git a/test/perf/objupdatespeed.py b/test/perf/objupdatespeed.py
deleted file mode 100644
index 98d10180e..000000000
--- a/test/perf/objupdatespeed.py
+++ /dev/null
@@ -1,94 +0,0 @@
-import time, resource
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from sqlalchemy.testing import *
-from sqlalchemy.testing.util import gc_collect
-
-
-NUM = 100
-
-metadata = MetaData(testing.db)
-Person_table = Table('Person', metadata,
- Column('id', Integer, primary_key=True),
- Column('name', String(40)),
- Column('sex', Integer),
- Column('age', Integer))
-
-Email_table = Table('Email', metadata,
- Column('id', Integer, primary_key=True),
- Column('person_id', Integer, ForeignKey('Person.id')),
- Column('address', String(300)))
-
-class Person(object):
- pass
-class Email(object):
- def __repr__(self):
- return '<email %s %s>' % (getattr(self, 'id', None),
- getattr(self, 'address', None))
-
-mapper(Person, Person_table, properties={
- 'emails': relationship(Email, backref='owner', lazy='joined')
- })
-mapper(Email, Email_table)
-compile_mappers()
-
-def setup():
- metadata.create_all()
- i = Person_table.insert()
- data = [{'name':'John Doe','sex':1,'age':35}] * NUM
- i.execute(data)
-
- i = Email_table.insert()
- for j in xrange(1, NUM + 1):
- i.execute(address='foo@bar', person_id=j)
- if j % 2:
- i.execute(address='baz@quux', person_id=j)
-
- print "Inserted %d rows." % (NUM + NUM + (NUM // 2))
-
-def orm_select(session):
- return session.query(Person).all()
-
-@profiling.profiled('update_and_flush')
-def update_and_flush(session, people):
- for p in people:
- p.name = 'Exene Cervenka'
- p.sex = 2
- p.emails[0].address = 'hoho@lala'
- session.flush()
-
-def all():
- setup()
- try:
- t, t2 = 0, 0
- def usage(label):
- now = resource.getrusage(resource.RUSAGE_SELF)
- print "%s: %0.3fs real, %0.3fs user, %0.3fs sys" % (
- label, t2 - t,
- now.ru_utime - usage.last.ru_utime,
- now.ru_stime - usage.last.ru_stime)
- usage.snap(now)
- usage.snap = lambda stats=None: setattr(
- usage, 'last', stats or resource.getrusage(resource.RUSAGE_SELF))
-
- session = create_session()
-
- gc_collect()
- usage.snap()
- t = time.clock()
- people = orm_select(session)
- t2 = time.clock()
- usage('load objects')
-
- gc_collect()
- usage.snap()
- t = time.clock()
- update_and_flush(session, people)
- t2 = time.clock()
- usage('update and flush')
- finally:
- metadata.drop_all()
-
-
-if __name__ == '__main__':
- all()
diff --git a/test/perf/orm2010.py b/test/perf/orm2010.py
index 47257ba87..937e6ddff 100644
--- a/test/perf/orm2010.py
+++ b/test/perf/orm2010.py
@@ -95,7 +95,7 @@ def runit():
name="Boss %d" % i,
golf_average=Decimal(random.randint(40, 150))
)
- for i in xrange(1000)
+ for i in range(1000)
]
sess.add_all(bosses)
@@ -107,7 +107,7 @@ def runit():
name="Grunt %d" % i,
savings=Decimal(random.randint(5000000, 15000000) / 100)
)
- for i in xrange(10000)
+ for i in range(10000)
]
# Assign each Grunt a Boss. Look them up in the DB
@@ -149,15 +149,15 @@ stats = pstats.Stats(filename)
counts_by_methname = dict((key[2], stats.stats[key][0]) for key in stats.stats)
-print "SQLA Version: %s" % __version__
-print "Total calls %d" % stats.total_calls
-print "Total cpu seconds: %.2f" % stats.total_tt
-print 'Total execute calls: %d' \
+print("SQLA Version: %s" % __version__)
+print("Total calls %d" % stats.total_calls)
+print("Total cpu seconds: %.2f" % stats.total_tt)
+print('Total execute calls: %d' \
% counts_by_methname["<method 'execute' of 'sqlite3.Cursor' "
- "objects>"]
-print 'Total executemany calls: %d' \
+ "objects>"])
+print('Total executemany calls: %d' \
% counts_by_methname.get("<method 'executemany' of 'sqlite3.Cursor' "
- "objects>", 0)
+ "objects>", 0))
#stats.sort_stats('time', 'calls')
#stats.print_stats()
diff --git a/test/perf/ormsession.py b/test/perf/ormsession.py
deleted file mode 100644
index 5e38d6e80..000000000
--- a/test/perf/ormsession.py
+++ /dev/null
@@ -1,225 +0,0 @@
-import time
-from datetime import datetime
-
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from sqlalchemy.testing import *
-from sqlalchemy.testing.profiling import profiled
-
-class Item(object):
- def __repr__(self):
- return 'Item<#%s "%s">' % (self.id, self.name)
-class SubItem(object):
- def __repr__(self):
- return 'SubItem<#%s "%s">' % (self.id, self.name)
-class Customer(object):
- def __repr__(self):
- return 'Customer<#%s "%s">' % (self.id, self.name)
-class Purchase(object):
- def __repr__(self):
- return 'Purchase<#%s "%s">' % (self.id, self.purchase_date)
-
-items, subitems, customers, purchases, purchaseitems = \
- None, None, None, None, None
-
-metadata = MetaData()
-
-@profiled('table')
-def define_tables():
- global items, subitems, customers, purchases, purchaseitems
- items = Table('items', metadata,
- Column('id', Integer, primary_key=True),
- Column('name', String(100)),
- test_needs_acid=True)
- subitems = Table('subitems', metadata,
- Column('id', Integer, primary_key=True),
- Column('item_id', Integer, ForeignKey('items.id'),
- nullable=False),
- Column('name', String(100), server_default='no name'),
- test_needs_acid=True)
- customers = Table('customers', metadata,
- Column('id', Integer, primary_key=True),
- Column('name', String(100)),
- *[Column("col_%s" % chr(i), String(64), default=str(i))
- for i in range(97,117)],
- **dict(test_needs_acid=True))
- purchases = Table('purchases', metadata,
- Column('id', Integer, primary_key=True),
- Column('customer_id', Integer,
- ForeignKey('customers.id'), nullable=False),
- Column('purchase_date', DateTime,
- default=datetime.now),
- test_needs_acid=True)
- purchaseitems = Table('purchaseitems', metadata,
- Column('purchase_id', Integer,
- ForeignKey('purchases.id'),
- nullable=False, primary_key=True),
- Column('item_id', Integer, ForeignKey('items.id'),
- nullable=False, primary_key=True),
- test_needs_acid=True)
-
-@profiled('mapper')
-def setup_mappers():
- mapper(Item, items, properties={
- 'subitems': relationship(SubItem, backref='item', lazy='select')
- })
- mapper(SubItem, subitems)
- mapper(Customer, customers, properties={
- 'purchases': relationship(Purchase, lazy='select', backref='customer')
- })
- mapper(Purchase, purchases, properties={
- 'items': relationship(Item, lazy='select', secondary=purchaseitems)
- })
-
-@profiled('inserts')
-def insert_data():
- q_items = 1000
- q_sub_per_item = 10
- q_customers = 1000
-
- con = testing.db.connect()
-
- transaction = con.begin()
- data, subdata = [], []
- for item_id in xrange(1, q_items + 1):
- data.append({'name': "item number %s" % item_id})
- for subitem_id in xrange(1, (item_id % q_sub_per_item) + 1):
- subdata.append({'item_id': item_id,
- 'name': "subitem number %s" % subitem_id})
- if item_id % 100 == 0:
- items.insert().execute(*data)
- subitems.insert().execute(*subdata)
- del data[:]
- del subdata[:]
- if data:
- items.insert().execute(*data)
- if subdata:
- subitems.insert().execute(*subdata)
- transaction.commit()
-
- transaction = con.begin()
- data = []
- for customer_id in xrange(1, q_customers):
- data.append({'name': "customer number %s" % customer_id})
- if customer_id % 100 == 0:
- customers.insert().execute(*data)
- del data[:]
- if data:
- customers.insert().execute(*data)
- transaction.commit()
-
- transaction = con.begin()
- data, subdata = [], []
- order_t = int(time.time()) - (5000 * 5 * 60)
- current = xrange(1, q_customers)
- step, purchase_id = 1, 0
- while current:
- next = []
- for customer_id in current:
- order_t += 300
- data.append({'customer_id': customer_id,
- 'purchase_date': datetime.fromtimestamp(order_t)})
- purchase_id += 1
- for item_id in range(customer_id % 200, customer_id + 1, 200):
- if item_id != 0:
- subdata.append({'purchase_id': purchase_id,
- 'item_id': item_id})
- if customer_id % 10 > step:
- next.append(customer_id)
-
- if len(data) >= 100:
- purchases.insert().execute(*data)
- if subdata:
- purchaseitems.insert().execute(*subdata)
- del data[:]
- del subdata[:]
- step, current = step + 1, next
-
- if data:
- purchases.insert().execute(*data)
- if subdata:
- purchaseitems.insert().execute(*subdata)
- transaction.commit()
-
-@profiled('queries')
-def run_queries():
- session = create_session()
- # no explicit transaction here.
-
- # build a report of summarizing the last 50 purchases and
- # the top 20 items from all purchases
-
- q = session.query(Purchase). \
- order_by(desc(Purchase.purchase_date)). \
- limit(50).\
- options(joinedload('items'), joinedload('items.subitems'),
- joinedload('customer'))
-
- report = []
- # "write" the report. pretend it's going to a web template or something,
- # the point is to actually pull data through attributes and collections.
- for purchase in q:
- report.append(purchase.customer.name)
- report.append(purchase.customer.col_a)
- report.append(purchase.purchase_date)
- for item in purchase.items:
- report.append(item.name)
- report.extend([s.name for s in item.subitems])
-
- # mix a little low-level with orm
- # pull a report of the top 20 items of all time
- _item_id = purchaseitems.c.item_id
- top_20_q = select([func.distinct(_item_id).label('id')],
- group_by=[purchaseitems.c.purchase_id, _item_id],
- order_by=[desc(func.count(_item_id)), _item_id],
- limit=20)
- ids = [r.id for r in top_20_q.execute().fetchall()]
- q2 = session.query(Item).filter(Item.id.in_(ids))
-
- for num, item in enumerate(q2):
- report.append("number %s: %s" % (num + 1, item.name))
-
-@profiled('creating')
-def create_purchase():
- # commit a purchase
- customer_id = 100
- item_ids = (10,22,34,46,58)
-
- session = create_session()
- session.begin()
-
- customer = session.query(Customer).get(customer_id)
- items = session.query(Item).filter(Item.id.in_(item_ids))
-
- purchase = Purchase()
- purchase.customer = customer
- purchase.items.extend(items)
-
- session.flush()
- session.commit()
- session.expire(customer)
-
-def setup_db():
- metadata.drop_all()
- metadata.create_all()
-def cleanup_db():
- metadata.drop_all()
-
-@profiled('default')
-def default():
- run_queries()
- create_purchase()
-
-@profiled('all')
-def main():
- metadata.bind = testing.db
- try:
- define_tables()
- setup_mappers()
- setup_db()
- insert_data()
- default()
- finally:
- cleanup_db()
-
-main()
diff --git a/test/perf/sessions.py b/test/perf/sessions.py
deleted file mode 100644
index 80553fe48..000000000
--- a/test/perf/sessions.py
+++ /dev/null
@@ -1,95 +0,0 @@
-from sqlalchemy import *
-from sqlalchemy.orm import *
-
-from sqlalchemy.testing.compat import gc_collect
-from sqlalchemy.testing import AssertsExecutionResults, profiling, testing
-from test.orm import _fixtures
-
-# in this test we are specifically looking for time spent in the attributes.InstanceState.__cleanup() method.
-
-ITERATIONS = 100
-
-class SessionTest(fixtures.TestBase, AssertsExecutionResults):
- @classmethod
- def setup_class(cls):
- global t1, t2, metadata,T1, T2
- metadata = MetaData(testing.db)
- t1 = Table('t1', metadata,
- Column('c1', Integer, primary_key=True),
- Column('c2', String(30)))
-
- t2 = Table('t2', metadata,
- Column('c1', Integer, primary_key=True),
- Column('c2', String(30)),
- Column('t1id', Integer, ForeignKey('t1.c1'))
- )
-
- metadata.create_all()
-
- l = []
- for x in range(1,51):
- l.append({'c2':'this is t1 #%d' % x})
- t1.insert().execute(*l)
- for x in range(1, 51):
- l = []
- for y in range(1, 100):
- l.append({'c2':'this is t2 #%d' % y, 't1id':x})
- t2.insert().execute(*l)
-
- class T1(fixtures.ComparableEntity):
- pass
- class T2(fixtures.ComparableEntity):
- pass
-
- mapper(T1, t1, properties={
- 't2s':relationship(T2, backref='t1')
- })
- mapper(T2, t2)
-
- @classmethod
- def teardown_class(cls):
- metadata.drop_all()
- clear_mappers()
-
- @profiling.profiled('clean', report=True)
- def test_session_clean(self):
- for x in range(0, ITERATIONS):
- sess = create_session()
- t1s = sess.query(T1).filter(T1.c1.between(15, 48)).all()
- for index in [2, 7, 12, 15, 18, 20]:
- t1s[index].t2s
-
- sess.close()
- del sess
- gc_collect()
-
- @profiling.profiled('dirty', report=True)
- def test_session_dirty(self):
- for x in range(0, ITERATIONS):
- sess = create_session()
- t1s = sess.query(T1).filter(T1.c1.between(15, 48)).all()
-
- for index in [2, 7, 12, 15, 18, 20]:
- t1s[index].c2 = 'this is some modified text'
- for t2 in t1s[index].t2s:
- t2.c2 = 'this is some modified text'
-
- del t1s
- gc_collect()
-
- sess.close()
- del sess
- gc_collect()
-
- @profiling.profiled('noclose', report=True)
- def test_session_noclose(self):
- for x in range(0, ITERATIONS):
- sess = create_session()
- t1s = sess.query(T1).filter(T1.c1.between(15, 48)).all()
- for index in [2, 7, 12, 15, 18, 20]:
- t1s[index].t2s
-
- del sess
- gc_collect()
-
-
diff --git a/test/perf/stress_all.py b/test/perf/stress_all.py
deleted file mode 100644
index 890ef24a3..000000000
--- a/test/perf/stress_all.py
+++ /dev/null
@@ -1,226 +0,0 @@
-# -*- encoding: utf8 -*-
-from datetime import *
-import decimal
-#from fastdec import mpd as Decimal
-from cPickle import dumps, loads
-
-#from sqlalchemy.dialects.postgresql.base import ARRAY
-
-from stresstest import *
-
-# ---
-test_types = False
-test_methods = True
-test_pickle = False
-test_orm = False
-# ---
-verbose = True
-
-def values_results(raw_results):
- return [tuple(r.values()) for r in raw_results]
-
-def getitem_str_results(raw_results):
- return [
- (r['id'],
- r['field0'], r['field1'], r['field2'], r['field3'], r['field4'],
- r['field5'], r['field6'], r['field7'], r['field8'], r['field9'])
- for r in raw_results]
-
-def getitem_fallback_results(raw_results):
- return [
- (r['ID'],
- r['FIELD0'], r['FIELD1'], r['FIELD2'], r['FIELD3'], r['FIELD4'],
- r['FIELD5'], r['FIELD6'], r['FIELD7'], r['FIELD8'], r['FIELD9'])
- for r in raw_results]
-
-def getitem_int_results(raw_results):
- return [
- (r[0],
- r[1], r[2], r[3], r[4], r[5],
- r[6], r[7], r[8], r[9], r[10])
- for r in raw_results]
-
-def getitem_long_results(raw_results):
- return [
- (r[0L],
- r[1L], r[2L], r[3L], r[4L], r[5L],
- r[6L], r[7L], r[8L], r[9L], r[10L])
- for r in raw_results]
-
-def getitem_obj_results(raw_results):
- c = test_table.c
- fid, f0, f1, f2, f3, f4, f5, f6, f7, f8, f9 = (
- c.id, c.field0, c.field1, c.field2, c.field3, c.field4,
- c.field5, c.field6, c.field7, c.field8, c.field9)
- return [
- (r[fid],
- r[f0], r[f1], r[f2], r[f3], r[f4],
- r[f5], r[f6], r[f7], r[f8], r[f9])
- for r in raw_results]
-
-def slice_results(raw_results):
- return [row[0:6] + row[6:11] for row in raw_results]
-
-# ---------- #
-# Test types #
-# ---------- #
-
-# Array
-#def genarrayvalue(rnum, fnum):
-# return [fnum, fnum + 1, fnum + 2]
-#arraytest = (ARRAY(Integer), genarrayvalue,
-# dict(num_fields=100, num_records=1000,
-# engineurl='postgresql:///test'))
-
-# Boolean
-def genbooleanvalue(rnum, fnum):
- if rnum % 4:
- return bool(fnum % 2)
- else:
- return None
-booleantest = (Boolean, genbooleanvalue, dict(num_records=100000))
-
-# Datetime
-def gendatetimevalue(rnum, fnum):
- return (rnum % 4) and datetime(2005, 3, 3) or None
-datetimetest = (DateTime, gendatetimevalue, dict(num_records=10000))
-
-# Decimal
-def gendecimalvalue(rnum, fnum):
- if rnum % 4:
- return Decimal(str(0.25 * fnum))
- else:
- return None
-decimaltest = (Numeric(10, 2), gendecimalvalue, dict(num_records=10000))
-
-# Interval
-
-# no microseconds because Postgres does not seem to support it
-from_epoch = timedelta(14643, 70235)
-def genintervalvalue(rnum, fnum):
- return from_epoch
-intervaltest = (Interval, genintervalvalue,
- dict(num_fields=2, num_records=100000))
-
-# PickleType
-def genpicklevalue(rnum, fnum):
- return (rnum % 4) and {'str': "value%d" % fnum, 'int': rnum} or None
-pickletypetest = (PickleType, genpicklevalue,
- dict(num_fields=1, num_records=100000))
-
-# TypeDecorator
-class MyIntType(TypeDecorator):
- impl = Integer
-
- def process_bind_param(self, value, dialect):
- return value * 10
-
- def process_result_value(self, value, dialect):
- return value / 10
-
- def copy(self):
- return MyIntType()
-
-def genmyintvalue(rnum, fnum):
- return rnum + fnum
-typedecoratortest = (MyIntType, genmyintvalue,
- dict(num_records=100000))
-
-# Unicode
-def genunicodevalue(rnum, fnum):
- return (rnum % 4) and (u"value%d" % fnum) or None
-unicodetest = (Unicode(20, ), genunicodevalue,
- dict(num_records=100000))
-# dict(engineurl='mysql:///test', freshdata=False))
-
-# do the tests
-if test_types:
- tests = [booleantest, datetimetest, decimaltest, intervaltest,
- pickletypetest, typedecoratortest, unicodetest]
- for engineurl in ('postgresql://scott:tiger@localhost/test',
- 'sqlite://', 'mysql://scott:tiger@localhost/test'):
- print "\n%s\n" % engineurl
- for datatype, genvalue, kwargs in tests:
- print "%s:" % getattr(datatype, '__name__',
- datatype.__class__.__name__),
- profile_and_time_dbfunc(iter_results, datatype, genvalue,
- profile=False, engineurl=engineurl,
- verbose=verbose, **kwargs)
-
-# ---------------------- #
-# test row proxy methods #
-# ---------------------- #
-
-if test_methods:
- methods = [iter_results, values_results, getattr_results,
- getitem_str_results, getitem_fallback_results,
- getitem_int_results, getitem_long_results, getitem_obj_results,
- slice_results]
- for engineurl in ('postgresql://scott:tiger@localhost/test',
- 'sqlite://', 'mysql://scott:tiger@localhost/test'):
- print "\n%s\n" % engineurl
- test_table = prepare(Unicode(20,),
- genunicodevalue,
- num_fields=10, num_records=100000,
- verbose=verbose, engineurl=engineurl)
- for method in methods:
- print "%s:" % method.__name__,
- time_dbfunc(test_table, method, genunicodevalue,
- num_fields=10, num_records=100000, profile=False,
- verbose=verbose)
-
-# --------------------------------
-# test pickling Rowproxy instances
-# --------------------------------
-
-def pickletofile_results(raw_results):
- from cPickle import dump, load
- for protocol in (0, 1, 2):
- print "dumping protocol %d..." % protocol
- f = file('noext.pickle%d' % protocol, 'wb')
- dump(raw_results, f, protocol)
- f.close()
- return raw_results
-
-def pickle_results(raw_results):
- return loads(dumps(raw_results, 2))
-
-def pickle_meta(raw_results):
- pickled = dumps(raw_results[0]._parent, 2)
- metadata = loads(pickled)
- return raw_results
-
-def pickle_rows(raw_results):
- return [loads(dumps(row, 2)) for row in raw_results]
-
-if test_pickle:
- test_table = prepare(Unicode, genunicodevalue,
- num_fields=10, num_records=10000)
- funcs = [pickle_rows, pickle_results]
- for func in funcs:
- print "%s:" % func.__name__,
- time_dbfunc(test_table, func, genunicodevalue,
- num_records=10000, profile=False, verbose=verbose)
-
-# --------------------------------
-# test ORM
-# --------------------------------
-
-if test_orm:
- from sqlalchemy.orm import *
-
- class Test(object):
- pass
-
- Session = sessionmaker()
- session = Session()
-
- def get_results():
- return session.query(Test).all()
- print "ORM:",
- for engineurl in ('postgresql:///test', 'sqlite://', 'mysql:///test'):
- print "\n%s\n" % engineurl
- profile_and_time_dbfunc(getattr_results, Unicode(20), genunicodevalue,
- class_=Test, getresults_func=get_results,
- engineurl=engineurl, #freshdata=False,
- num_records=10000, verbose=verbose)
diff --git a/test/perf/stresstest.py b/test/perf/stresstest.py
deleted file mode 100644
index cf9404f53..000000000
--- a/test/perf/stresstest.py
+++ /dev/null
@@ -1,174 +0,0 @@
-import gc
-import sys
-import timeit
-import cProfile
-
-from sqlalchemy import MetaData, Table, Column
-from sqlalchemy.types import *
-from sqlalchemy.orm import mapper, clear_mappers
-
-metadata = MetaData()
-
-def gen_table(num_fields, field_type, metadata):
- return Table('test', metadata,
- Column('id', Integer, primary_key=True),
- *[Column("field%d" % fnum, field_type)
- for fnum in range(num_fields)])
-
-def insert(test_table, num_fields, num_records, genvalue, verbose=True):
- if verbose:
- print "building insert values...",
- sys.stdout.flush()
- values = [dict(("field%d" % fnum, genvalue(rnum, fnum))
- for fnum in range(num_fields))
- for rnum in range(num_records)]
- if verbose:
- print "inserting...",
- sys.stdout.flush()
- def db_insert():
- test_table.insert().execute(values)
- sys.modules['__main__'].db_insert = db_insert
- timing = timeit.timeit("db_insert()",
- "from __main__ import db_insert",
- number=1)
- if verbose:
- print "%s" % round(timing, 3)
-
-def check_result(results, num_fields, genvalue, verbose=True):
- if verbose:
- print "checking...",
- sys.stdout.flush()
- for rnum, row in enumerate(results):
- expected = tuple([rnum + 1] +
- [genvalue(rnum, fnum) for fnum in range(num_fields)])
- assert row == expected, "got: %s\nexpected: %s" % (row, expected)
- return True
-
-def avgdev(values, comparison):
- return sum(value - comparison for value in values) / len(values)
-
-def nicer_res(values, printvalues=False):
- if printvalues:
- print values
- min_time = min(values)
- return round(min_time, 3), round(avgdev(values, min_time), 2)
-
-def profile_func(func_name, verbose=True):
- if verbose:
- print "profiling...",
- sys.stdout.flush()
- cProfile.run('%s()' % func_name, 'prof')
-
-def time_func(func_name, num_tests=1, verbose=True):
- if verbose:
- print "timing...",
- sys.stdout.flush()
- timings = timeit.repeat('%s()' % func_name,
- "from __main__ import %s" % func_name,
- number=num_tests, repeat=5)
- avg, dev = nicer_res(timings)
- if verbose:
- print "%s (%s)" % (avg, dev)
- else:
- print avg
-
-def profile_and_time(func_name, num_tests=1):
- profile_func(func_name)
- time_func(func_name, num_tests)
-
-def iter_results(raw_results):
- return [tuple(row) for row in raw_results]
-
-def getattr_results(raw_results):
- return [
- (r.id,
- r.field0, r.field1, r.field2, r.field3, r.field4,
- r.field5, r.field6, r.field7, r.field8, r.field9)
- for r in raw_results]
-
-def fetchall(test_table):
- def results():
- return test_table.select().order_by(test_table.c.id).execute() \
- .fetchall()
- return results
-
-def hashable_set(l):
- hashables = []
- for o in l:
- try:
- hash(o)
- hashables.append(o)
- except:
- pass
- return set(hashables)
-
-def prepare(field_type, genvalue, engineurl='sqlite://',
- num_fields=10, num_records=1000, freshdata=True, verbose=True):
- global metadata
- metadata.clear()
- metadata.bind = engineurl
- test_table = gen_table(num_fields, field_type, metadata)
- if freshdata:
- metadata.drop_all()
- metadata.create_all()
- insert(test_table, num_fields, num_records, genvalue, verbose)
- return test_table
-
-def time_dbfunc(test_table, test_func, genvalue,
- class_=None,
- getresults_func=None,
- num_fields=10, num_records=1000, num_tests=1,
- check_results=check_result, profile=True,
- check_leaks=True, print_leaks=False, verbose=True):
- if verbose:
- print "testing '%s'..." % test_func.__name__,
- sys.stdout.flush()
- if class_ is not None:
- clear_mappers()
- mapper(class_, test_table)
- if getresults_func is None:
- getresults_func = fetchall(test_table)
- def test():
- return test_func(getresults_func())
- sys.modules['__main__'].test = test
- if check_leaks:
- gc.collect()
- objects_before = gc.get_objects()
- num_objects_before = len(objects_before)
- hashable_objects_before = hashable_set(objects_before)
-# gc.set_debug(gc.DEBUG_LEAK)
- if check_results:
- check_results(test(), num_fields, genvalue, verbose)
- if check_leaks:
- gc.collect()
- objects_after = gc.get_objects()
- num_objects_after = len(objects_after)
- num_leaks = num_objects_after - num_objects_before
- hashable_objects_after = hashable_set(objects_after)
- diff = hashable_objects_after - hashable_objects_before
- ldiff = len(diff)
- if print_leaks and ldiff < num_records:
- print "\n*** hashable objects leaked (%d) ***" % ldiff
- print '\n'.join(map(str, diff))
- print "***\n"
-
- if num_leaks > num_records:
- print "(leaked: %d !)" % num_leaks,
- if profile:
- profile_func('test', verbose)
- time_func('test', num_tests, verbose)
-
-def profile_and_time_dbfunc(test_func, field_type, genvalue,
- class_=None,
- getresults_func=None,
- engineurl='sqlite://', freshdata=True,
- num_fields=10, num_records=1000, num_tests=1,
- check_results=check_result, profile=True,
- check_leaks=True, print_leaks=False, verbose=True):
- test_table = prepare(field_type, genvalue, engineurl,
- num_fields, num_records, freshdata, verbose)
- time_dbfunc(test_table, test_func, genvalue, class_,
- getresults_func,
- num_fields, num_records, num_tests,
- check_results, profile,
- check_leaks, print_leaks, verbose)
diff --git a/test/perf/threaded_compile.py b/test/perf/threaded_compile.py
deleted file mode 100644
index 0facf0ec2..000000000
--- a/test/perf/threaded_compile.py
+++ /dev/null
@@ -1,75 +0,0 @@
-"""test that mapper compilation is threadsafe, including
-when additional mappers are created while the existing
-collection is being compiled."""
-
-from sqlalchemy import *
-from sqlalchemy.orm import *
-import thread, time
-from sqlalchemy.orm import mapperlib
-
-
-meta = MetaData('sqlite:///foo.db')
-
-t1 = Table('t1', meta,
- Column('c1', Integer, primary_key=True),
- Column('c2', String(30))
- )
-
-t2 = Table('t2', meta,
- Column('c1', Integer, primary_key=True),
- Column('c2', String(30)),
- Column('t1c1', None, ForeignKey('t1.c1'))
-)
-t3 = Table('t3', meta,
- Column('c1', Integer, primary_key=True),
- Column('c2', String(30)),
-)
-meta.create_all()
-
-class T1(object):
- pass
-
-class T2(object):
- pass
-
-class FakeLock(object):
- def acquire(self):pass
- def release(self):pass
-
-# uncomment this to disable the mutex in mapper compilation;
-# should produce thread collisions
-#mapperlib._COMPILE_MUTEX = FakeLock()
-
-def run1():
- for i in range(50):
- print "T1", thread.get_ident()
- class_mapper(T1)
- time.sleep(.05)
-
-def run2():
- for i in range(50):
- print "T2", thread.get_ident()
- class_mapper(T2)
- time.sleep(.057)
-
-def run3():
- for i in range(50):
- def foo():
- print "FOO", thread.get_ident()
- class Foo(object):pass
- mapper(Foo, t3)
- class_mapper(Foo).compile()
- foo()
- time.sleep(.05)
-
-mapper(T1, t1, properties={'t2':relationship(T2, backref="t1")})
-mapper(T2, t2)
-print "START"
-for j in range(0, 5):
- thread.start_new_thread(run1, ())
- thread.start_new_thread(run2, ())
- thread.start_new_thread(run3, ())
- thread.start_new_thread(run3, ())
- thread.start_new_thread(run3, ())
-print "WAIT"
-time.sleep(5)
diff --git a/test/profiles.txt b/test/profiles.txt
index c9ba04c24..090cf5ea1 100644
--- a/test/profiles.txt
+++ b/test/profiles.txt
@@ -1,67 +1,88 @@
# /Users/classic/dev/sqlalchemy/test/profiles.txt
# This file is written out on a per-environment basis.
-# For each test in aaa_profiling, the corresponding function and
+# For each test in aaa_profiling, the corresponding function and
# environment is located within this file. If it doesn't exist,
# the test is skipped.
-# If a callcount does exist, it is compared to what we received.
+# If a callcount does exist, it is compared to what we received.
# assertions are raised if the counts do not match.
-#
-# To add a new callcount test, apply the function_call_count
-# decorator and re-run the tests using the --write-profiles
+#
+# To add a new callcount test, apply the function_call_count
+# decorator and re-run the tests using the --write-profiles
# option - this file will be rewritten including the new count.
-#
+#
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_insert
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.5_sqlite_pysqlite_nocextensions 62
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.6_sqlite_pysqlite_nocextensions 62
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_cextensions 62
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_nocextensions 62
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_cextensions 62
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_nocextensions 62
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_cextensions 62
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_nocextensions 62
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.6_sqlite_pysqlite_nocextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_cextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_nocextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_cextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_nocextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_cextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_nocextensions 67
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.2_postgresql_psycopg2_nocextensions 69
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.2_sqlite_pysqlite_nocextensions 69
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_postgresql_psycopg2_nocextensions 69
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_sqlite_pysqlite_nocextensions 69
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_select
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.5_sqlite_pysqlite_nocextensions 134
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.6_sqlite_pysqlite_nocextensions 135
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_cextensions 135
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_nocextensions 135
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_cextensions 135
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_nocextensions 135
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_cextensions 135
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_nocextensions 135
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.6_sqlite_pysqlite_nocextensions 141
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_cextensions 141
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_nocextensions 141
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_cextensions 141
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_nocextensions 141
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_cextensions 141
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_nocextensions 141
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.2_postgresql_psycopg2_nocextensions 151
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.2_sqlite_pysqlite_nocextensions 151
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_postgresql_psycopg2_nocextensions 151
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_sqlite_pysqlite_nocextensions 151
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_select_labels
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_nocextensions 177
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.6_sqlite_pysqlite_nocextensions 175
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_cextensions 175
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_nocextensions 175
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_cextensions 175
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_nocextensions 175
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_cextensions 175
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_nocextensions 175
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.2_postgresql_psycopg2_nocextensions 185
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.2_sqlite_pysqlite_nocextensions 185
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_postgresql_psycopg2_nocextensions 185
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_sqlite_pysqlite_nocextensions 185
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_update
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.5_sqlite_pysqlite_nocextensions 65
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.6_sqlite_pysqlite_nocextensions 65
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_cextensions 65
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_nocextensions 65
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_cextensions 65
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_nocextensions 65
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_cextensions 65
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_nocextensions 65
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.6_sqlite_pysqlite_nocextensions 70
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_cextensions 70
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_nocextensions 70
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_cextensions 70
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_nocextensions 70
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_cextensions 70
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_nocextensions 70
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.2_postgresql_psycopg2_nocextensions 70
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.2_sqlite_pysqlite_nocextensions 70
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_postgresql_psycopg2_nocextensions 70
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_sqlite_pysqlite_nocextensions 70
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.5_sqlite_pysqlite_nocextensions 129
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.6_sqlite_pysqlite_nocextensions 130
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_cextensions 130
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_nocextensions 130
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_cextensions 130
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_nocextensions 130
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_cextensions 130
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_nocextensions 130
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.6_sqlite_pysqlite_nocextensions 137
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_cextensions 137
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_nocextensions 137
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_cextensions 137
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_nocextensions 137
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_cextensions 137
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_nocextensions 137
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.2_postgresql_psycopg2_nocextensions 136
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.2_sqlite_pysqlite_nocextensions 136
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_postgresql_psycopg2_nocextensions 136
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_sqlite_pysqlite_nocextensions 136
# TEST: test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.5_sqlite_pysqlite_nocextensions 17987
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.6_sqlite_pysqlite_nocextensions 17987
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_mysql_mysqldb_cextensions 17987
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_mysql_mysqldb_nocextensions 17987
@@ -69,39 +90,52 @@ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_postgresql_psycopg2_nocextensions 17987
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_sqlite_pysqlite_cextensions 17987
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_sqlite_pysqlite_nocextensions 17987
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.2_postgresql_psycopg2_nocextensions 18987
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.2_sqlite_pysqlite_nocextensions 18987
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_postgresql_psycopg2_nocextensions 18987
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_sqlite_pysqlite_nocextensions 18987
# TEST: test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.5_sqlite_pysqlite_nocextensions 116289
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.6_sqlite_pysqlite_nocextensions 116790
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_mysql_mysqldb_cextensions 122540
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_mysql_mysqldb_nocextensions 125290
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_postgresql_psycopg2_cextensions 115040
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_postgresql_psycopg2_nocextensions 117790
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_cextensions 114040
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_nocextensions 116790
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.6_sqlite_pysqlite_nocextensions 118319
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_mysql_mysqldb_cextensions 124069
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_mysql_mysqldb_nocextensions 126819
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_postgresql_psycopg2_cextensions 116569
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_postgresql_psycopg2_nocextensions 119319
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_cextensions 115569
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_nocextensions 118319
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.2_postgresql_psycopg2_nocextensions 121790
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.2_sqlite_pysqlite_nocextensions 121822
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_postgresql_psycopg2_nocextensions 121822
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_sqlite_pysqlite_nocextensions 121822
# TEST: test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.5_sqlite_pysqlite_nocextensions 19852
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.6_sqlite_pysqlite_nocextensions 19217
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_mysql_mysqldb_cextensions 19491
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_mysql_mysqldb_nocextensions 19781
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_postgresql_psycopg2_cextensions 18878
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_postgresql_psycopg2_nocextensions 19168
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_cextensions 18957
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_nocextensions 19217
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.6_sqlite_pysqlite_nocextensions 19534
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_mysql_mysqldb_cextensions 19838
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_mysql_mysqldb_nocextensions 20098
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_postgresql_psycopg2_cextensions 19237
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_postgresql_psycopg2_nocextensions 19467
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_cextensions 19274
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_nocextensions 19534
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.2_postgresql_psycopg2_nocextensions 20424
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.2_sqlite_pysqlite_nocextensions 20447
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_postgresql_psycopg2_nocextensions 20344
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_sqlite_pysqlite_nocextensions 20433
# TEST: test.aaa_profiling.test_orm.MergeTest.test_merge_load
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.5_sqlite_pysqlite_nocextensions 1178
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.6_sqlite_pysqlite_nocextensions 1174
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_mysql_mysqldb_cextensions 1341
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_mysql_mysqldb_nocextensions 1366
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_cextensions 1275
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_nocextensions 1307
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_cextensions 1149
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_nocextensions 1174
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.6_sqlite_pysqlite_nocextensions 1221
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_mysql_mysqldb_cextensions 1388
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_mysql_mysqldb_nocextensions 1413
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_cextensions 1296
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_nocextensions 1321
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_cextensions 1196
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_nocextensions 1221
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.2_postgresql_psycopg2_nocextensions 1332
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.2_sqlite_pysqlite_nocextensions 1243
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_postgresql_psycopg2_nocextensions 1357
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_sqlite_pysqlite_nocextensions 1243
# TEST: test.aaa_profiling.test_orm.MergeTest.test_merge_no_load
@@ -112,6 +146,10 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2_nocextensions 122,18
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_cextensions 122,18
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_nocextensions 122,18
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.2_postgresql_psycopg2_nocextensions 127,19
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.2_sqlite_pysqlite_nocextensions 127,19
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_postgresql_psycopg2_nocextensions 127,19
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_nocextensions 127,19
# TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect
@@ -122,10 +160,13 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psy
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psycopg2_nocextensions 82
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_cextensions 82
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_nocextensions 82
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.2_postgresql_psycopg2_nocextensions 70
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.2_sqlite_pysqlite_nocextensions 70
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_postgresql_psycopg2_nocextensions 69
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_sqlite_pysqlite_nocextensions 69
# TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.5_sqlite_pysqlite_nocextensions 32
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.6_sqlite_pysqlite_nocextensions 29
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_mysql_mysqldb_cextensions 29
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_mysql_mysqldb_nocextensions 29
@@ -133,10 +174,13 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_postgresql_ps
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_postgresql_psycopg2_nocextensions 29
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqlite_cextensions 29
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqlite_nocextensions 29
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.2_postgresql_psycopg2_nocextensions 23
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.2_sqlite_pysqlite_nocextensions 23
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_postgresql_psycopg2_nocextensions 22
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_sqlite_pysqlite_nocextensions 22
# TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.5_sqlite_pysqlite_nocextensions 6
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.6_sqlite_pysqlite_nocextensions 6
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_mysql_mysqldb_cextensions 6
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_mysql_mysqldb_nocextensions 6
@@ -144,32 +188,41 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_po
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_postgresql_psycopg2_nocextensions 6
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sqlite_pysqlite_cextensions 6
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sqlite_pysqlite_nocextensions 6
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.2_postgresql_psycopg2_nocextensions 7
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.2_sqlite_pysqlite_nocextensions 7
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_postgresql_psycopg2_nocextensions 7
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_sqlite_pysqlite_nocextensions 7
# TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.5_sqlite_pysqlite_nocextensions 41
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.6_sqlite_pysqlite_nocextensions 42
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_cextensions 40
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_nocextensions 42
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_cextensions 40
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_nocextensions 42
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_cextensions 40
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_nocextensions 42
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.6_sqlite_pysqlite_nocextensions 43
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_cextensions 41
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_nocextensions 43
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_cextensions 41
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_nocextensions 43
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_cextensions 41
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_nocextensions 43
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.2_postgresql_psycopg2_nocextensions 41
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.2_sqlite_pysqlite_nocextensions 41
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_postgresql_psycopg2_nocextensions 41
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_sqlite_pysqlite_nocextensions 41
# TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.5_sqlite_pysqlite_nocextensions 64
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.6_sqlite_pysqlite_nocextensions 65
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_cextensions 63
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_nocextensions 65
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_cextensions 63
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_nocextensions 65
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_cextensions 63
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_nocextensions 65
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.6_sqlite_pysqlite_nocextensions 68
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_cextensions 66
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_nocextensions 68
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_cextensions 66
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_nocextensions 68
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_cextensions 66
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_nocextensions 68
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.2_postgresql_psycopg2_nocextensions 66
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.2_sqlite_pysqlite_nocextensions 66
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_postgresql_psycopg2_nocextensions 66
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_sqlite_pysqlite_nocextensions 66
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.5_sqlite_pysqlite_nocextensions 14
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.6_sqlite_pysqlite_nocextensions 14
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqldb_cextensions 14
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqldb_nocextensions 14
@@ -177,90 +230,125 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_postgresql_psycopg2_nocextensions 14
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_sqlite_pysqlite_cextensions 14
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_sqlite_pysqlite_nocextensions 14
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.2_postgresql_psycopg2_nocextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.2_sqlite_pysqlite_nocextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_postgresql_psycopg2_nocextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_sqlite_pysqlite_nocextensions 15
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_string
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.5_sqlite_pysqlite_nocextensions 14413
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.6_sqlite_pysqlite_nocextensions 14414
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_cextensions 476
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_nocextensions 14472
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_cextensions 20438
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_nocextensions 34458
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_cextensions 416
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_nocextensions 14414
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.6_sqlite_pysqlite_nocextensions 15447
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_cextensions 485
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_nocextensions 15505
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_cextensions 20471
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_nocextensions 35491
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_cextensions 427
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_nocextensions 15447
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.2_postgresql_psycopg2_nocextensions 14459
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.2_sqlite_pysqlite_nocextensions 14430
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_postgresql_psycopg2_nocextensions 14457
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_sqlite_pysqlite_nocextensions 14430
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_unicode
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.5_sqlite_pysqlite_nocextensions 14413
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.6_sqlite_pysqlite_nocextensions 14414
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_cextensions 476
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_nocextensions 44472
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_cextensions 20438
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_nocextensions 34458
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_cextensions 416
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_nocextensions 14414
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.6_sqlite_pysqlite_nocextensions 15447
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_cextensions 485
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_nocextensions 45505
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_cextensions 20471
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_nocextensions 35491
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_cextensions 427
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_nocextensions 15447
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.2_postgresql_psycopg2_nocextensions 14459
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.2_sqlite_pysqlite_nocextensions 14430
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_postgresql_psycopg2_nocextensions 14457
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_sqlite_pysqlite_nocextensions 14430
# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate 2.7_postgresql_psycopg2_cextensions 5044
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate 2.7_postgresql_psycopg2_nocextensions 5088
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate 2.7_postgresql_psycopg2_nocextensions 5175
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate 3.2_postgresql_psycopg2_nocextensions 4828
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate 3.3_postgresql_psycopg2_nocextensions 4792
# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 2.7_postgresql_psycopg2_cextensions 247
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 2.7_postgresql_psycopg2_nocextensions 247
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 2.7_postgresql_psycopg2_cextensions 256
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 2.7_postgresql_psycopg2_nocextensions 256
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 3.2_postgresql_psycopg2_nocextensions 239
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 3.3_postgresql_psycopg2_nocextensions 237
# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_cextensions 3366
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_nocextensions 3590
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_cextensions 3425
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_nocextensions 3749
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 3.2_postgresql_psycopg2_nocextensions 3401
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 3.3_postgresql_psycopg2_nocextensions 3385
# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 2.7_postgresql_psycopg2_cextensions 10915
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 2.7_postgresql_psycopg2_nocextensions 11982
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 2.7_postgresql_psycopg2_cextensions 11045
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 2.7_postgresql_psycopg2_nocextensions 12747
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 3.2_postgresql_psycopg2_nocextensions 11849
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 3.3_postgresql_psycopg2_nocextensions 11803
# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates 2.7_postgresql_psycopg2_cextensions 1005
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates 2.7_postgresql_psycopg2_nocextensions 1109
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates 2.7_postgresql_psycopg2_cextensions 1050
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates 2.7_postgresql_psycopg2_nocextensions 1167
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates 3.2_postgresql_psycopg2_nocextensions 1114
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates 3.3_postgresql_psycopg2_nocextensions 1106
# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 2.7_postgresql_psycopg2_cextensions 1736
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 2.7_postgresql_psycopg2_nocextensions 1779
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 2.7_postgresql_psycopg2_cextensions 1811
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 2.7_postgresql_psycopg2_nocextensions 1858
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 3.2_postgresql_psycopg2_nocextensions 1731
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 3.3_postgresql_psycopg2_nocextensions 1721
# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 2.7_postgresql_psycopg2_cextensions 2219
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 2.7_postgresql_psycopg2_nocextensions 2449
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 2.7_postgresql_psycopg2_cextensions 2300
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 2.7_postgresql_psycopg2_nocextensions 2559
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 3.2_postgresql_psycopg2_nocextensions 2483
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 3.3_postgresql_psycopg2_nocextensions 2473
# TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate 2.7_postgresql_psycopg2_cextensions 5977
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate 2.7_postgresql_psycopg2_nocextensions 6096
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate 2.7_postgresql_psycopg2_cextensions 6157
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate 2.7_postgresql_psycopg2_nocextensions 6276
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate 3.2_postgresql_psycopg2_nocextensions 6252
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate 3.3_postgresql_psycopg2_nocextensions 6251
# TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert 2.7_postgresql_psycopg2_cextensions 392
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert 2.7_postgresql_psycopg2_nocextensions 399
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert 2.7_postgresql_psycopg2_cextensions 391
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert 2.7_postgresql_psycopg2_nocextensions 398
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert 3.2_postgresql_psycopg2_nocextensions 395
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert 3.3_postgresql_psycopg2_nocextensions 394
# TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_cextensions 6124
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_nocextensions 6356
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_cextensions 6422
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_nocextensions 6654
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 3.2_postgresql_psycopg2_nocextensions 6560
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 3.3_postgresql_psycopg2_nocextensions 6560
# TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions 2.7_postgresql_psycopg2_cextensions 18140
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions 2.7_postgresql_psycopg2_nocextensions 19571
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions 2.7_postgresql_psycopg2_cextensions 19145
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions 2.7_postgresql_psycopg2_nocextensions 20576
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions 3.2_postgresql_psycopg2_nocextensions 20279
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions 3.3_postgresql_psycopg2_nocextensions 20279
# TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates 2.7_postgresql_psycopg2_cextensions 1018
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates 2.7_postgresql_psycopg2_nocextensions 1114
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates 2.7_postgresql_psycopg2_cextensions 1063
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates 2.7_postgresql_psycopg2_nocextensions 1171
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates 3.2_postgresql_psycopg2_nocextensions 1120
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates 3.3_postgresql_psycopg2_nocextensions 1113
# TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing 2.7_postgresql_psycopg2_cextensions 2614
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing 2.7_postgresql_psycopg2_nocextensions 2677
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing 2.7_postgresql_psycopg2_cextensions 2686
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing 2.7_postgresql_psycopg2_nocextensions 2749
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing 3.2_postgresql_psycopg2_nocextensions 2749
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing 3.3_postgresql_psycopg2_nocextensions 2749
diff --git a/test/requirements.py b/test/requirements.py
index c20bcbd0a..c09c0df99 100644
--- a/test/requirements.py
+++ b/test/requirements.py
@@ -556,6 +556,15 @@ class DefaultRequirements(SuiteRequirements):
"cPython interpreter needed"
)
+
+ @property
+ def non_broken_pickle(self):
+ from sqlalchemy.util import pickle
+ return only_if(
+ lambda: pickle.__name__ == 'cPickle' or sys.version_info >= (3, 2),
+ "Needs cPickle or newer Python 3 pickle"
+ )
+
@property
def predictable_gc(self):
"""target platform must remove all cycles unconditionally when
diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py
index 83a02a49e..1ab45a295 100644
--- a/test/sql/test_compiler.py
+++ b/test/sql/test_compiler.py
@@ -20,6 +20,7 @@ from sqlalchemy import Integer, String, MetaData, Table, Column, select, \
intersect, union_all, Boolean, distinct, join, outerjoin, asc, desc,\
over, subquery, case
import decimal
+from sqlalchemy.util import u
from sqlalchemy import exc, sql, util, types, schema
from sqlalchemy.sql import table, column, label
from sqlalchemy.sql.expression import ClauseList, _literal_as_text, HasPrefixes
@@ -661,13 +662,13 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
s = select([table1.c.myid]).as_scalar()
try:
s.c.foo
- except exc.InvalidRequestError, err:
+ except exc.InvalidRequestError as err:
assert str(err) \
== 'Scalar Select expression has no columns; use this '\
'object directly within a column-level expression.'
try:
s.columns.foo
- except exc.InvalidRequestError, err:
+ except exc.InvalidRequestError as err:
assert str(err) \
== 'Scalar Select expression has no columns; use this '\
'object directly within a column-level expression.'
@@ -734,13 +735,14 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
'JOIN myothertable ON mytable.myid = '
'myothertable.otherid')
- def test_label_comparison(self):
+ def test_label_comparison_one(self):
x = func.lala(table1.c.myid).label('foo')
self.assert_compile(select([x], x == 5),
'SELECT lala(mytable.myid) AS foo FROM '
'mytable WHERE lala(mytable.myid) = '
':param_1')
+ def test_label_comparison_two(self):
self.assert_compile(
label('bar', column('foo', type_=String)) + 'foo',
'foo || :param_1')
@@ -1184,9 +1186,9 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
# test unicode
self.assert_compile(select(
- [u"foobar(a)", u"pk_foo_bar(syslaal)"],
- u"a = 12",
- from_obj=[u"foobar left outer join lala on foobar.foo = lala.foo"]
+ ["foobar(a)", "pk_foo_bar(syslaal)"],
+ "a = 12",
+ from_obj=["foobar left outer join lala on foobar.foo = lala.foo"]
),
"SELECT foobar(a), pk_foo_bar(syslaal) FROM foobar "
"left outer join lala on foobar.foo = lala.foo WHERE a = 12"
@@ -2313,7 +2315,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
func.lala(table1.c.name).label('gg')])
eq_(
- s1.c.keys(),
+ list(s1.c.keys()),
['myid', 'foobar', str(f1), 'gg']
)
@@ -2341,7 +2343,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
t = table1
s1 = select([col], from_obj=t)
- assert s1.c.keys() == [key], s1.c.keys()
+ assert list(s1.c.keys()) == [key], list(s1.c.keys())
if label:
self.assert_compile(s1,
@@ -2747,11 +2749,11 @@ class DDLTest(fixtures.TestBase, AssertsCompiledSQL):
def test_reraise_of_column_spec_issue_unicode(self):
MyType = self._illegal_type_fixture()
t1 = Table('t', MetaData(),
- Column(u'méil', MyType())
+ Column(u('méil'), MyType())
)
assert_raises_message(
exc.CompileError,
- ur"\(in table 't', column 'méil'\): Couldn't compile type",
+ u(r"\(in table 't', column 'méil'\): Couldn't compile type"),
schema.CreateTable(t1).compile
)
diff --git a/test/sql/test_defaults.py b/test/sql/test_defaults.py
index 79514eaf4..1508c0532 100644
--- a/test/sql/test_defaults.py
+++ b/test/sql/test_defaults.py
@@ -11,6 +11,10 @@ from sqlalchemy.types import TypeDecorator, TypeEngine
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.dialects import sqlite
from sqlalchemy.testing import fixtures
+from sqlalchemy.util import u, b
+from sqlalchemy import util
+
+t = f = f2 = ts = currenttime = metadata = default_generator = None
t = f = f2 = ts = currenttime = metadata = default_generator = None
@@ -647,7 +651,7 @@ class SequenceExecTest(fixtures.TestBase):
def _assert_seq_result(self, ret):
"""asserts return of next_value is an int"""
- assert isinstance(ret, (int, long))
+ assert isinstance(ret, util.int_types)
assert ret > 0
def test_implicit_connectionless(self):
@@ -779,7 +783,7 @@ class SequenceTest(fixtures.TestBase, testing.AssertsCompiledSQL):
]
start = seq.start or 1
inc = seq.increment or 1
- assert values == list(xrange(start, start + inc * 3, inc))
+ assert values == list(range(start, start + inc * 3, inc))
finally:
seq.drop(testing.db)
@@ -1157,20 +1161,12 @@ class UnicodeDefaultsTest(fixtures.TestBase):
Column(Unicode(32))
def test_unicode_default(self):
- # Py3K
- #default = 'foo'
- # Py2K
- default = u'foo'
- # end Py2K
+ default = u('foo')
Column(Unicode(32), default=default)
def test_nonunicode_default(self):
- # Py3K
- #default = b'foo'
- # Py2K
- default = 'foo'
- # end Py2K
+ default = b('foo')
assert_raises_message(
sa.exc.SAWarning,
"Unicode column received non-unicode default value.",
diff --git a/test/sql/test_functions.py b/test/sql/test_functions.py
index b325b7763..ee503dbb7 100644
--- a/test/sql/test_functions.py
+++ b/test/sql/test_functions.py
@@ -381,7 +381,7 @@ class ExecuteTest(fixtures.TestBase):
assert t.select(t.c.id == id).execute().first()['value'] == 9
t.update(values={t.c.value: func.length("asdf")}).execute()
assert t.select().execute().first()['value'] == 4
- print "--------------------------"
+ print("--------------------------")
t2.insert().execute()
t2.insert(values=dict(value=func.length("one"))).execute()
t2.insert(values=dict(value=func.length("asfda") + -19)).\
@@ -409,7 +409,7 @@ class ExecuteTest(fixtures.TestBase):
t2.update(values={t2.c.value: func.length("asfdaasdf"),
t2.c.stuff: "foo"}).execute()
- print "HI", select([t2.c.value, t2.c.stuff]).execute().first()
+ print("HI", select([t2.c.value, t2.c.stuff]).execute().first())
eq_(select([t2.c.value, t2.c.stuff]).execute().first(),
(9, "foo")
)
diff --git a/test/sql/test_generative.py b/test/sql/test_generative.py
index 8b2abef0e..09b20d8ea 100644
--- a/test/sql/test_generative.py
+++ b/test/sql/test_generative.py
@@ -176,7 +176,7 @@ class BinaryEndpointTraversalTest(fixtures.TestBase):
canary = []
def visit(binary, l, r):
canary.append((binary.operator, l, r))
- print binary.operator, l, r
+ print(binary.operator, l, r)
sql_util.visit_binary_product(visit, expr)
eq_(
canary, expected
@@ -433,7 +433,7 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
clause2 = Vis().traverse(clause)
assert c1 == str(clause)
assert str(clause2) == c1 + " SOME MODIFIER=:lala"
- assert clause.bindparams.keys() == ['bar']
+ assert list(clause.bindparams.keys()) == ['bar']
assert set(clause2.bindparams.keys()) == set(['bar', 'lala'])
def test_select(self):
@@ -446,8 +446,8 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
s3 = Vis().traverse(s2)
assert str(s3) == s3_assert
assert str(s2) == s2_assert
- print str(s2)
- print str(s3)
+ print(str(s2))
+ print(str(s3))
class Vis(ClauseVisitor):
def visit_select(self, select):
select.append_whereclause(t1.c.col2 == 7)
@@ -459,8 +459,8 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
def visit_select(self, select):
select.append_whereclause(t1.c.col3 == 9)
s4 = Vis().traverse(s3)
- print str(s3)
- print str(s4)
+ print(str(s3))
+ print(str(s4))
assert str(s4) == s4_assert
assert str(s3) == s3_assert
@@ -471,8 +471,8 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
binary.left = t1.c.col1
binary.right = bindparam("col1", unique=True)
s5 = Vis().traverse(s4)
- print str(s4)
- print str(s5)
+ print(str(s4))
+ print(str(s5))
assert str(s5) == s5_assert
assert str(s4) == s4_assert
diff --git a/test/sql/test_metadata.py b/test/sql/test_metadata.py
index db2eaa4fa..c0873862d 100644
--- a/test/sql/test_metadata.py
+++ b/test/sql/test_metadata.py
@@ -89,7 +89,7 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
msgs.append("attach %s.%s" % (t.name, c.name))
c1 = Column('foo', String())
m = MetaData()
- for i in xrange(3):
+ for i in range(3):
cx = c1.copy()
# as of 0.7, these events no longer copy. its expected
# that listeners will be re-established from the
@@ -511,7 +511,7 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
def _get_key(i):
return [i.name, i.unique] + \
sorted(i.kwargs.items()) + \
- i.columns.keys()
+ list(i.columns.keys())
eq_(
sorted([_get_key(i) for i in table.indexes]),
diff --git a/test/sql/test_query.py b/test/sql/test_query.py
index 293e629c8..ae029b11c 100644
--- a/test/sql/test_query.py
+++ b/test/sql/test_query.py
@@ -367,10 +367,10 @@ class QueryTest(fixtures.TestBase):
)
if use_labels:
eq_(result[0]['query_users_user_id'], 7)
- eq_(result[0].keys(), ["query_users_user_id", "query_users_user_name"])
+ eq_(list(result[0].keys()), ["query_users_user_id", "query_users_user_name"])
else:
eq_(result[0]['user_id'], 7)
- eq_(result[0].keys(), ["user_id", "user_name"])
+ eq_(list(result[0].keys()), ["user_id", "user_name"])
eq_(result[0][0], 7)
eq_(result[0][users.c.user_id], 7)
@@ -523,13 +523,13 @@ class QueryTest(fixtures.TestBase):
def a_eq(got, wanted):
if got != wanted:
- print "Wanted %s" % wanted
- print "Received %s" % got
+ print("Wanted %s" % wanted)
+ print("Received %s" % got)
self.assert_(got == wanted, got)
a_eq(prep('select foo'), 'select foo')
a_eq(prep("time='12:30:00'"), "time='12:30:00'")
- a_eq(prep(u"time='12:30:00'"), u"time='12:30:00'")
+ a_eq(prep("time='12:30:00'"), "time='12:30:00'")
a_eq(prep(":this:that"), ":this:that")
a_eq(prep(":this :that"), "? ?")
a_eq(prep("(:this),(:that :other)"), "(?),(? ?)")
@@ -708,8 +708,6 @@ class QueryTest(fixtures.TestBase):
use_labels=labels),
[(3, 'a'), (2, 'b'), (1, None)])
- @testing.fails_on('mssql+pyodbc',
- "pyodbc result row doesn't support slicing")
def test_column_slices(self):
users.insert().execute(user_id=1, user_name='john')
users.insert().execute(user_id=2, user_name='jack')
@@ -769,7 +767,7 @@ class QueryTest(fixtures.TestBase):
).first()
eq_(r['user_id'], 1)
eq_(r['user_name'], "john")
- eq_(r.keys(), ["user_id", "user_name"])
+ eq_(list(r.keys()), ["user_id", "user_name"])
@testing.only_on("sqlite", "sqlite specific feature")
def test_column_accessor_sqlite_raw(self):
@@ -784,7 +782,7 @@ class QueryTest(fixtures.TestBase):
assert 'user_name' not in r
eq_(r['query_users.user_id'], 1)
eq_(r['query_users.user_name'], "john")
- eq_(r.keys(), ["query_users.user_id", "query_users.user_name"])
+ eq_(list(r.keys()), ["query_users.user_id", "query_users.user_name"])
@testing.only_on("sqlite", "sqlite specific feature")
def test_column_accessor_sqlite_translated(self):
@@ -799,7 +797,7 @@ class QueryTest(fixtures.TestBase):
eq_(r['user_name'], "john")
eq_(r['query_users.user_id'], 1)
eq_(r['query_users.user_name'], "john")
- eq_(r.keys(), ["user_id", "user_name"])
+ eq_(list(r.keys()), ["user_id", "user_name"])
def test_column_accessor_labels_w_dots(self):
users.insert().execute(
@@ -812,7 +810,7 @@ class QueryTest(fixtures.TestBase):
eq_(r['query_users.user_id'], 1)
eq_(r['query_users.user_name'], "john")
assert "user_name" not in r
- eq_(r.keys(), ["query_users.user_id", "query_users.user_name"])
+ eq_(list(r.keys()), ["query_users.user_id", "query_users.user_name"])
def test_column_accessor_unary(self):
users.insert().execute(
@@ -889,7 +887,7 @@ class QueryTest(fixtures.TestBase):
])
).first()
- eq_(row.keys(), ["case_insensitive", "CaseSensitive"])
+ eq_(list(row.keys()), ["case_insensitive", "CaseSensitive"])
eq_(row["case_insensitive"], 1)
eq_(row["CaseSensitive"], 2)
@@ -911,7 +909,7 @@ class QueryTest(fixtures.TestBase):
])
).first()
- eq_(row.keys(), ["case_insensitive", "CaseSensitive"])
+ eq_(list(row.keys()), ["case_insensitive", "CaseSensitive"])
eq_(row["case_insensitive"], 1)
eq_(row["CaseSensitive"], 2)
eq_(row["Case_insensitive"],1)
@@ -1072,14 +1070,14 @@ class QueryTest(fixtures.TestBase):
def test_keys(self):
users.insert().execute(user_id=1, user_name='foo')
r = users.select().execute()
- eq_([x.lower() for x in r.keys()], ['user_id', 'user_name'])
+ eq_([x.lower() for x in list(r.keys())], ['user_id', 'user_name'])
r = r.first()
- eq_([x.lower() for x in r.keys()], ['user_id', 'user_name'])
+ eq_([x.lower() for x in list(r.keys())], ['user_id', 'user_name'])
def test_items(self):
users.insert().execute(user_id=1, user_name='foo')
r = users.select().execute().first()
- eq_([(x[0].lower(), x[1]) for x in r.items()], [('user_id', 1), ('user_name', 'foo')])
+ eq_([(x[0].lower(), x[1]) for x in list(r.items())], [('user_id', 1), ('user_name', 'foo')])
def test_len(self):
users.insert().execute(user_id=1, user_name='foo')
@@ -1098,8 +1096,8 @@ class QueryTest(fixtures.TestBase):
r = users.select(users.c.user_id==1).execute().first()
eq_(r[0], 1)
eq_(r[1], 'foo')
- eq_([x.lower() for x in r.keys()], ['user_id', 'user_name'])
- eq_(r.values(), [1, 'foo'])
+ eq_([x.lower() for x in list(r.keys())], ['user_id', 'user_name'])
+ eq_(list(r.values()), [1, 'foo'])
def test_column_order_with_text_query(self):
# should return values in query order
@@ -1107,8 +1105,8 @@ class QueryTest(fixtures.TestBase):
r = testing.db.execute('select user_name, user_id from query_users').first()
eq_(r[0], 'foo')
eq_(r[1], 1)
- eq_([x.lower() for x in r.keys()], ['user_name', 'user_id'])
- eq_(r.values(), ['foo', 1])
+ eq_([x.lower() for x in list(r.keys())], ['user_name', 'user_id'])
+ eq_(list(r.values()), ['foo', 1])
@testing.crashes('oracle', 'FIXME: unknown, varify not fails_on()')
@testing.crashes('firebird', 'An identifier must begin with a letter')
@@ -1137,7 +1135,7 @@ class QueryTest(fixtures.TestBase):
self.assert_(r['_parent'] == 'Hidden parent')
self.assert_(r['_row'] == 'Hidden row')
try:
- print r._parent, r._row
+ print(r._parent, r._row)
self.fail('Should not allow access to private attributes')
except AttributeError:
pass # expected
@@ -2334,7 +2332,7 @@ class JoinTest(fixtures.TestBase):
expr = select(
[t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
from_obj=[(t1.join(t2).outerjoin(t3, criteria))])
- print expr
+ print(expr)
self.assertRows(expr, [(10, 20, 30), (11, 21, None)])
def test_mixed_where(self):
@@ -2416,7 +2414,7 @@ class OperatorTest(fixtures.TestBase):
select([
flds.c.intcol, func.row_number().over(order_by=flds.c.strcol)
]).execute().fetchall(),
- [(13, 1L), (5, 2L)]
+ [(13, 1), (5, 2)]
)
diff --git a/test/sql/test_quote.py b/test/sql/test_quote.py
index 8b14d23a9..717f0f797 100644
--- a/test/sql/test_quote.py
+++ b/test/sql/test_quote.py
@@ -552,8 +552,8 @@ class PreparerTest(fixtures.TestBase):
def a_eq(have, want):
if have != want:
- print "Wanted %s" % want
- print "Received %s" % have
+ print("Wanted %s" % want)
+ print("Received %s" % have)
self.assert_(have == want)
a_eq(unformat('foo'), ['foo'])
@@ -584,13 +584,13 @@ class PreparerTest(fixtures.TestBase):
def a_eq(have, want):
if have != want:
- print "Wanted %s" % want
- print "Received %s" % have
+ print("Wanted %s" % want)
+ print("Received %s" % have)
self.assert_(have == want)
a_eq(unformat('foo'), ['foo'])
a_eq(unformat('`foo`'), ['foo'])
- a_eq(unformat(`'foo'`), ["'foo'"])
+ a_eq(unformat(repr('foo')), ["'foo'"])
a_eq(unformat('foo.bar'), ['foo', 'bar'])
a_eq(unformat('`foo`.`bar`'), ['foo', 'bar'])
a_eq(unformat('foo.`bar`'), ['foo', 'bar'])
diff --git a/test/sql/test_rowcount.py b/test/sql/test_rowcount.py
index f14f78989..2dbf4f3ea 100644
--- a/test/sql/test_rowcount.py
+++ b/test/sql/test_rowcount.py
@@ -53,20 +53,20 @@ class FoundRowsTest(fixtures.TestBase, AssertsExecutionResults):
# WHERE matches 3, 3 rows changed
department = employees_table.c.department
r = employees_table.update(department=='C').execute(department='Z')
- print "expecting 3, dialect reports %s" % r.rowcount
+ print("expecting 3, dialect reports %s" % r.rowcount)
assert r.rowcount == 3
def test_update_rowcount2(self):
# WHERE matches 3, 0 rows changed
department = employees_table.c.department
r = employees_table.update(department=='C').execute(department='C')
- print "expecting 3, dialect reports %s" % r.rowcount
+ print("expecting 3, dialect reports %s" % r.rowcount)
assert r.rowcount == 3
def test_delete_rowcount(self):
# WHERE matches 3, 3 rows deleted
department = employees_table.c.department
r = employees_table.delete(department=='C').execute()
- print "expecting 3, dialect reports %s" % r.rowcount
+ print("expecting 3, dialect reports %s" % r.rowcount)
assert r.rowcount == 3
diff --git a/test/sql/test_selectable.py b/test/sql/test_selectable.py
index 183b72111..dc0b040b0 100644
--- a/test/sql/test_selectable.py
+++ b/test/sql/test_selectable.py
@@ -174,11 +174,11 @@ class SelectableTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled
def test_clone_append_column(self):
sel = select([literal_column('1').label('a')])
- eq_(sel.c.keys(), ['a'])
+ eq_(list(sel.c.keys()), ['a'])
cloned = visitors.ReplacingCloningVisitor().traverse(sel)
cloned.append_column(literal_column('2').label('b'))
cloned.append_column(func.foo())
- eq_(cloned.c.keys(), ['a', 'b', 'foo()'])
+ eq_(list(cloned.c.keys()), ['a', 'b', 'foo()'])
def test_append_column_after_replace_selectable(self):
basesel = select([literal_column('1').label('a')])
@@ -362,10 +362,10 @@ class SelectableTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiled
def test_join(self):
a = join(table1, table2)
- print str(a.select(use_labels=True))
+ print(str(a.select(use_labels=True)))
b = table2.alias('b')
j = join(a, b)
- print str(j)
+ print(str(j))
criterion = a.c.table1_col1 == b.c.col2
self.assert_(criterion.compare(j.onclause))
@@ -949,7 +949,7 @@ class PrimaryKeyTest(fixtures.TestBase, AssertsExecutionResults):
primary_key=True), Column('x', Integer))
d = Table('d', meta, Column('id', Integer, ForeignKey('c.id'),
primary_key=True), Column('x', Integer))
- print list(a.join(b, a.c.x == b.c.id).primary_key)
+ print(list(a.join(b, a.c.x == b.c.id).primary_key))
assert list(a.join(b, a.c.x == b.c.id).primary_key) == [a.c.id]
assert list(b.join(c, b.c.x == c.c.id).primary_key) == [b.c.id]
assert list(a.join(b).join(c, c.c.id == b.c.x).primary_key) \
@@ -1618,7 +1618,7 @@ class WithLabelsTest(fixtures.TestBase):
def test_names_overlap_label(self):
sel = self._names_overlap().apply_labels()
eq_(
- sel.c.keys(),
+ list(sel.c.keys()),
['t1_x', 't2_x']
)
self._assert_result_keys(sel, ['t1_x', 't2_x'])
@@ -1632,7 +1632,7 @@ class WithLabelsTest(fixtures.TestBase):
def test_names_overlap_keys_dont_nolabel(self):
sel = self._names_overlap_keys_dont()
eq_(
- sel.c.keys(),
+ list(sel.c.keys()),
['a', 'b']
)
self._assert_result_keys(sel, ['x'])
@@ -1640,7 +1640,7 @@ class WithLabelsTest(fixtures.TestBase):
def test_names_overlap_keys_dont_label(self):
sel = self._names_overlap_keys_dont().apply_labels()
eq_(
- sel.c.keys(),
+ list(sel.c.keys()),
['t1_a', 't2_b']
)
self._assert_result_keys(sel, ['t1_x', 't2_x'])
@@ -1654,7 +1654,7 @@ class WithLabelsTest(fixtures.TestBase):
def test_labels_overlap_nolabel(self):
sel = self._labels_overlap()
eq_(
- sel.c.keys(),
+ list(sel.c.keys()),
['x_id', 'id']
)
self._assert_result_keys(sel, ['x_id', 'id'])
@@ -1663,7 +1663,7 @@ class WithLabelsTest(fixtures.TestBase):
sel = self._labels_overlap().apply_labels()
t2 = sel.froms[1]
eq_(
- sel.c.keys(),
+ list(sel.c.keys()),
['t_x_id', t2.c.id.anon_label]
)
self._assert_result_keys(sel, ['t_x_id', 'id_1'])
@@ -1677,12 +1677,12 @@ class WithLabelsTest(fixtures.TestBase):
def test_labels_overlap_keylabels_dont_nolabel(self):
sel = self._labels_overlap_keylabels_dont()
- eq_(sel.c.keys(), ['a', 'b'])
+ eq_(list(sel.c.keys()), ['a', 'b'])
self._assert_result_keys(sel, ['x_id', 'id'])
def test_labels_overlap_keylabels_dont_label(self):
sel = self._labels_overlap_keylabels_dont().apply_labels()
- eq_(sel.c.keys(), ['t_a', 't_x_b'])
+ eq_(list(sel.c.keys()), ['t_a', 't_x_b'])
self._assert_result_keys(sel, ['t_x_id', 'id_1'])
def _keylabels_overlap_labels_dont(self):
@@ -1693,13 +1693,13 @@ class WithLabelsTest(fixtures.TestBase):
def test_keylabels_overlap_labels_dont_nolabel(self):
sel = self._keylabels_overlap_labels_dont()
- eq_(sel.c.keys(), ['x_id', 'id'])
+ eq_(list(sel.c.keys()), ['x_id', 'id'])
self._assert_result_keys(sel, ['a', 'b'])
def test_keylabels_overlap_labels_dont_label(self):
sel = self._keylabels_overlap_labels_dont().apply_labels()
t2 = sel.froms[1]
- eq_(sel.c.keys(), ['t_x_id', t2.c.id.anon_label])
+ eq_(list(sel.c.keys()), ['t_x_id', t2.c.id.anon_label])
self._assert_result_keys(sel, ['t_a', 't_x_b'])
self._assert_subq_result_keys(sel, ['t_a', 't_x_b'])
@@ -1711,14 +1711,14 @@ class WithLabelsTest(fixtures.TestBase):
def test_keylabels_overlap_labels_overlap_nolabel(self):
sel = self._keylabels_overlap_labels_overlap()
- eq_(sel.c.keys(), ['x_a', 'a'])
+ eq_(list(sel.c.keys()), ['x_a', 'a'])
self._assert_result_keys(sel, ['x_id', 'id'])
self._assert_subq_result_keys(sel, ['x_id', 'id'])
def test_keylabels_overlap_labels_overlap_label(self):
sel = self._keylabels_overlap_labels_overlap().apply_labels()
t2 = sel.froms[1]
- eq_(sel.c.keys(), ['t_x_a', t2.c.a.anon_label])
+ eq_(list(sel.c.keys()), ['t_x_a', t2.c.a.anon_label])
self._assert_result_keys(sel, ['t_x_id', 'id_1'])
self._assert_subq_result_keys(sel, ['t_x_id', 'id_1'])
@@ -1736,7 +1736,7 @@ class WithLabelsTest(fixtures.TestBase):
def test_keys_overlap_names_dont_label(self):
sel = self._keys_overlap_names_dont().apply_labels()
eq_(
- sel.c.keys(),
+ list(sel.c.keys()),
['t1_x', 't2_x']
)
self._assert_result_keys(sel, ['t1_a', 't2_b'])
diff --git a/test/sql/test_types.py b/test/sql/test_types.py
index 64dbb6204..dbb475b98 100644
--- a/test/sql/test_types.py
+++ b/test/sql/test_types.py
@@ -141,22 +141,14 @@ class AdaptTest(fixtures.TestBase):
eq_(types.Integer().python_type, int)
eq_(types.Numeric().python_type, decimal.Decimal)
eq_(types.Numeric(asdecimal=False).python_type, float)
- # Py3K
- #eq_(types.LargeBinary().python_type, bytes)
- # Py2K
- eq_(types.LargeBinary().python_type, str)
- # end Py2K
+ eq_(types.LargeBinary().python_type, util.binary_type)
eq_(types.Float().python_type, float)
eq_(types.Interval().python_type, datetime.timedelta)
eq_(types.Date().python_type, datetime.date)
eq_(types.DateTime().python_type, datetime.datetime)
- # Py3K
- #eq_(types.String().python_type, unicode)
- # Py2K
eq_(types.String().python_type, str)
- # end Py2K
- eq_(types.Unicode().python_type, unicode)
- eq_(types.String(convert_unicode=True).python_type, unicode)
+ eq_(types.Unicode().python_type, util.text_type)
+ eq_(types.String(convert_unicode=True).python_type, util.text_type)
assert_raises(
NotImplementedError,
@@ -257,14 +249,14 @@ class UserDefinedTest(fixtures.TablesTest, AssertsCompiledSQL):
def test_processing(self):
users = self.tables.users
users.insert().execute(
- user_id=2, goofy='jack', goofy2='jack', goofy4=u'jack',
- goofy7=u'jack', goofy8=12, goofy9=12)
+ user_id=2, goofy='jack', goofy2='jack', goofy4=util.u('jack'),
+ goofy7=util.u('jack'), goofy8=12, goofy9=12)
users.insert().execute(
- user_id=3, goofy='lala', goofy2='lala', goofy4=u'lala',
- goofy7=u'lala', goofy8=15, goofy9=15)
+ user_id=3, goofy='lala', goofy2='lala', goofy4=util.u('lala'),
+ goofy7=util.u('lala'), goofy8=15, goofy9=15)
users.insert().execute(
- user_id=4, goofy='fred', goofy2='fred', goofy4=u'fred',
- goofy7=u'fred', goofy8=9, goofy9=9)
+ user_id=4, goofy='fred', goofy2='fred', goofy4=util.u('fred'),
+ goofy7=util.u('fred'), goofy8=9, goofy9=9)
l = users.select().order_by(users.c.user_id).execute().fetchall()
for assertstr, assertint, assertint2, row in zip(
@@ -278,7 +270,7 @@ class UserDefinedTest(fixtures.TablesTest, AssertsCompiledSQL):
eq_(row[5], assertint)
eq_(row[6], assertint2)
for col in row[3], row[4]:
- assert isinstance(col, unicode)
+ assert isinstance(col, util.text_type)
def test_typedecorator_impl(self):
for impl_, exp, kw in [
@@ -715,9 +707,9 @@ class UnicodeTest(fixtures.TestBase):
expected
)
- data = u"Alors vous imaginez ma surprise, au lever du jour, quand "\
- u"une drôle de petite voix m’a réveillé. "\
- u"Elle disait: « S’il vous plaît… dessine-moi un mouton! »"
+ data = util.u("Alors vous imaginez ma surprise, au lever du jour, quand "\
+ "une drôle de petite voix m’a réveillé. "\
+ "Elle disait: « S’il vous plaît… dessine-moi un mouton! »")
def test_unicode_warnings_typelevel_native_unicode(self):
@@ -726,13 +718,12 @@ class UnicodeTest(fixtures.TestBase):
dialect = default.DefaultDialect()
dialect.supports_unicode_binds = True
uni = u.dialect_impl(dialect).bind_processor(dialect)
- # Py3K
- #assert_raises(exc.SAWarning, uni, b'x')
- #assert isinstance(uni(unicodedata), str)
- # Py2K
- assert_raises(exc.SAWarning, uni, 'x')
- assert isinstance(uni(unicodedata), unicode)
- # end Py2K
+ if util.py3k:
+ assert_raises(exc.SAWarning, uni, b'x')
+ assert isinstance(uni(unicodedata), str)
+ else:
+ assert_raises(exc.SAWarning, uni, 'x')
+ assert isinstance(uni(unicodedata), unicode)
def test_unicode_warnings_typelevel_sqla_unicode(self):
unicodedata = self.data
@@ -740,13 +731,8 @@ class UnicodeTest(fixtures.TestBase):
dialect = default.DefaultDialect()
dialect.supports_unicode_binds = False
uni = u.dialect_impl(dialect).bind_processor(dialect)
- # Py3K
- #assert_raises(exc.SAWarning, uni, b'x')
- #assert isinstance(uni(unicodedata), bytes)
- # Py2K
- assert_raises(exc.SAWarning, uni, 'x')
- assert isinstance(uni(unicodedata), str)
- # end Py2K
+ assert_raises(exc.SAWarning, uni, util.b('x'))
+ assert isinstance(uni(unicodedata), util.binary_type)
eq_(uni(unicodedata), unicodedata.encode('utf-8'))
@@ -759,14 +745,9 @@ class UnicodeTest(fixtures.TestBase):
s = String()
uni = s.dialect_impl(dialect).bind_processor(dialect)
- # this is not the unicode type - no warning
- # Py3K
- #uni(b'x')
- #assert isinstance(uni(unicodedata), bytes)
- # Py2K
- uni('x')
- assert isinstance(uni(unicodedata), str)
- # end Py2K
+
+ uni(util.b('x'))
+ assert isinstance(uni(unicodedata), util.binary_type)
eq_(uni(unicodedata), unicodedata.encode('utf-8'))
@@ -1368,7 +1349,7 @@ class NumericRawSQLTest(fixtures.TestBase):
metadata = self.metadata
self._fixture(metadata, Integer, 45)
val = testing.db.execute("select val from t").scalar()
- assert isinstance(val, (int, long))
+ assert isinstance(val, util.int_types)
eq_(val, 45)
@testing.provide_metadata
diff --git a/test/sql/test_unicode.py b/test/sql/test_unicode.py
index 37e44522e..ffcef903f 100644
--- a/test/sql/test_unicode.py
+++ b/test/sql/test_unicode.py
@@ -7,6 +7,7 @@ from sqlalchemy import testing
from sqlalchemy.testing.engines import utf8_engine
from sqlalchemy.sql import column
from sqlalchemy.testing.schema import Table, Column
+from sqlalchemy.util import u, ue
class UnicodeSchemaTest(fixtures.TestBase):
__requires__ = ('unicode_ddl',)
@@ -18,14 +19,14 @@ class UnicodeSchemaTest(fixtures.TestBase):
unicode_bind = utf8_engine()
metadata = MetaData(unicode_bind)
- t1 = Table('unitable1', metadata,
- Column(u'méil', Integer, primary_key=True),
- Column(u'\u6e2c\u8a66', Integer),
+ t1 = Table(u('unitable1'), metadata,
+ Column(u('méil'), Integer, primary_key=True),
+ Column(ue('\u6e2c\u8a66'), Integer),
test_needs_fk=True,
)
- t2 = Table(u'Unitéble2', metadata,
- Column(u'méil', Integer, primary_key=True, key="a"),
- Column(u'\u6e2c\u8a66', Integer, ForeignKey(u'unitable1.méil'),
+ t2 = Table(u('Unitéble2'), metadata,
+ Column(u('méil'), Integer, primary_key=True, key="a"),
+ Column(ue('\u6e2c\u8a66'), Integer, ForeignKey(u('unitable1.méil')),
key="b"
),
test_needs_fk=True,
@@ -33,27 +34,27 @@ class UnicodeSchemaTest(fixtures.TestBase):
# Few DBs support Unicode foreign keys
if testing.against('sqlite'):
- t3 = Table(u'\u6e2c\u8a66', metadata,
- Column(u'\u6e2c\u8a66_id', Integer, primary_key=True,
+ t3 = Table(ue('\u6e2c\u8a66'), metadata,
+ Column(ue('\u6e2c\u8a66_id'), Integer, primary_key=True,
autoincrement=False),
- Column(u'unitable1_\u6e2c\u8a66', Integer,
- ForeignKey(u'unitable1.\u6e2c\u8a66')
+ Column(ue('unitable1_\u6e2c\u8a66'), Integer,
+ ForeignKey(ue('unitable1.\u6e2c\u8a66'))
),
- Column(u'Unitéble2_b', Integer,
- ForeignKey(u'Unitéble2.b')
+ Column(u('Unitéble2_b'), Integer,
+ ForeignKey(u('Unitéble2.b'))
),
- Column(u'\u6e2c\u8a66_self', Integer,
- ForeignKey(u'\u6e2c\u8a66.\u6e2c\u8a66_id')
+ Column(ue('\u6e2c\u8a66_self'), Integer,
+ ForeignKey(ue('\u6e2c\u8a66.\u6e2c\u8a66_id'))
),
test_needs_fk=True,
)
else:
- t3 = Table(u'\u6e2c\u8a66', metadata,
- Column(u'\u6e2c\u8a66_id', Integer, primary_key=True,
+ t3 = Table(ue('\u6e2c\u8a66'), metadata,
+ Column(ue('\u6e2c\u8a66_id'), Integer, primary_key=True,
autoincrement=False),
- Column(u'unitable1_\u6e2c\u8a66', Integer),
- Column(u'Unitéble2_b', Integer),
- Column(u'\u6e2c\u8a66_self', Integer),
+ Column(ue('unitable1_\u6e2c\u8a66'), Integer),
+ Column(u('Unitéble2_b'), Integer),
+ Column(ue('\u6e2c\u8a66_self'), Integer),
test_needs_fk=True,
)
metadata.create_all()
@@ -72,42 +73,42 @@ class UnicodeSchemaTest(fixtures.TestBase):
del unicode_bind
def test_insert(self):
- t1.insert().execute({u'méil':1, u'\u6e2c\u8a66':5})
- t2.insert().execute({'a':1, 'b':1})
- t3.insert().execute({u'\u6e2c\u8a66_id': 1,
- u'unitable1_\u6e2c\u8a66': 5,
- u'Unitéble2_b': 1,
- u'\u6e2c\u8a66_self': 1})
+ t1.insert().execute({u('méil'):1, ue('\u6e2c\u8a66'):5})
+ t2.insert().execute({u('a'):1, u('b'):1})
+ t3.insert().execute({ue('\u6e2c\u8a66_id'): 1,
+ ue('unitable1_\u6e2c\u8a66'): 5,
+ u('Unitéble2_b'): 1,
+ ue('\u6e2c\u8a66_self'): 1})
assert t1.select().execute().fetchall() == [(1, 5)]
assert t2.select().execute().fetchall() == [(1, 1)]
assert t3.select().execute().fetchall() == [(1, 5, 1, 1)]
def test_reflect(self):
- t1.insert().execute({u'méil':2, u'\u6e2c\u8a66':7})
- t2.insert().execute({'a':2, 'b':2})
- t3.insert().execute({u'\u6e2c\u8a66_id': 2,
- u'unitable1_\u6e2c\u8a66': 7,
- u'Unitéble2_b': 2,
- u'\u6e2c\u8a66_self': 2})
+ t1.insert().execute({u('méil'):2, ue('\u6e2c\u8a66'):7})
+ t2.insert().execute({u('a'):2, u('b'):2})
+ t3.insert().execute({ue('\u6e2c\u8a66_id'): 2,
+ ue('unitable1_\u6e2c\u8a66'): 7,
+ u('Unitéble2_b'): 2,
+ ue('\u6e2c\u8a66_self'): 2})
meta = MetaData(unicode_bind)
tt1 = Table(t1.name, meta, autoload=True)
tt2 = Table(t2.name, meta, autoload=True)
tt3 = Table(t3.name, meta, autoload=True)
- tt1.insert().execute({u'méil':1, u'\u6e2c\u8a66':5})
- tt2.insert().execute({u'méil':1, u'\u6e2c\u8a66':1})
- tt3.insert().execute({u'\u6e2c\u8a66_id': 1,
- u'unitable1_\u6e2c\u8a66': 5,
- u'Unitéble2_b': 1,
- u'\u6e2c\u8a66_self': 1})
+ tt1.insert().execute({u('méil'):1, ue('\u6e2c\u8a66'):5})
+ tt2.insert().execute({u('méil'):1, ue('\u6e2c\u8a66'):1})
+ tt3.insert().execute({ue('\u6e2c\u8a66_id'): 1,
+ ue('unitable1_\u6e2c\u8a66'): 5,
+ u('Unitéble2_b'): 1,
+ ue('\u6e2c\u8a66_self'): 1})
- self.assert_(tt1.select(order_by=desc(u'méil')).execute().fetchall() ==
+ self.assert_(tt1.select(order_by=desc(u('méil'))).execute().fetchall() ==
[(2, 7), (1, 5)])
- self.assert_(tt2.select(order_by=desc(u'méil')).execute().fetchall() ==
+ self.assert_(tt2.select(order_by=desc(u('méil'))).execute().fetchall() ==
[(2, 2), (1, 1)])
- self.assert_(tt3.select(order_by=desc(u'\u6e2c\u8a66_id')).
+ self.assert_(tt3.select(order_by=desc(ue('\u6e2c\u8a66_id'))).
execute().fetchall() ==
[(2, 7, 2, 2), (1, 5, 1, 1)])
meta.drop_all()
@@ -117,7 +118,7 @@ class EscapesDefaultsTest(fixtures.TestBase):
def test_default_exec(self):
metadata = MetaData(testing.db)
t1 = Table('t1', metadata,
- Column(u'special_col', Integer, Sequence('special_col'), primary_key=True),
+ Column('special_col', Integer, Sequence('special_col'), primary_key=True),
Column('data', String(50)) # to appease SQLite without DEFAULT VALUES
)
metadata.create_all()
@@ -128,8 +129,8 @@ class EscapesDefaultsTest(fixtures.TestBase):
# reset the identifier preparer, so that we can force it to cache
# a unicode identifier
engine.dialect.identifier_preparer = engine.dialect.preparer(engine.dialect)
- select([column(u'special_col')]).select_from(t1).execute().close()
- assert isinstance(engine.dialect.identifier_preparer.format_sequence(Sequence('special_col')), unicode)
+ select([column('special_col')]).select_from(t1).execute().close()
+ assert isinstance(engine.dialect.identifier_preparer.format_sequence(Sequence('special_col')), str)
# now execute, run the sequence. it should run in u"Special_col.nextid" or similar as
# a unicode object; cx_oracle asserts that this is None or a String (postgresql lets it pass thru).
diff --git a/test/sql/test_update.py b/test/sql/test_update.py
index a8df86cd2..8695760fb 100644
--- a/test/sql/test_update.py
+++ b/test/sql/test_update.py
@@ -242,7 +242,7 @@ class UpdateFromCompileTest(_UpdateFromTestBase, fixtures.TablesTest,
'WHERE '
'users.id = addresses.user_id AND '
'addresses.email_address = :email_address_1',
- checkparams={u'email_address_1': 'e1', 'name': 'newname'})
+ checkparams={'email_address_1': 'e1', 'name': 'newname'})
def test_render_multi_table(self):
users = self.tables.users
@@ -250,8 +250,8 @@ class UpdateFromCompileTest(_UpdateFromTestBase, fixtures.TablesTest,
dingalings = self.tables.dingalings
checkparams = {
- u'email_address_1': 'e1',
- u'id_1': 2,
+ 'email_address_1': 'e1',
+ 'id_1': 2,
'name': 'newname'
}
@@ -285,15 +285,15 @@ class UpdateFromCompileTest(_UpdateFromTestBase, fixtures.TablesTest,
'WHERE '
'users.id = addresses.user_id AND '
'addresses.email_address = %s',
- checkparams={u'email_address_1': 'e1', 'name': 'newname'},
+ checkparams={'email_address_1': 'e1', 'name': 'newname'},
dialect=mysql.dialect())
def test_render_subquery(self):
users, addresses = self.tables.users, self.tables.addresses
checkparams = {
- u'email_address_1': 'e1',
- u'id_1': 7,
+ 'email_address_1': 'e1',
+ 'id_1': 7,
'name': 'newname'
}